poly_int: pointer_may_wrap_p
[official-gcc.git] / gcc / fold-const.c
blob75bc7627a0719b7f25cd435f3a6f055c51dd62cf
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
146 static location_t
147 expr_location_or (tree t, location_t loc)
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
168 return x;
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 widest_int quo;
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 return NULL_TREE;
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
196 static int fold_deferring_overflow_warnings;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
213 void
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 const char *warnmsg;
232 location_t locus;
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
248 if (!issue || warnmsg == NULL)
249 return;
251 if (gimple_no_warning_p (stmt))
252 return;
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
259 if (!issue_strict_overflow_warning (code))
260 return;
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL, 0);
278 /* Whether we are deferring overflow warnings. */
280 bool
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings > 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 if (fold_deferring_overflow_warnings > 0)
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
308 bool
309 negate_mathfn_p (combined_fn fn)
311 switch (fn)
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_SIN:
332 CASE_CFN_SINH:
333 CASE_CFN_TAN:
334 CASE_CFN_TANH:
335 CASE_CFN_TRUNC:
336 return true;
338 CASE_CFN_LLRINT:
339 CASE_CFN_LRINT:
340 CASE_CFN_NEARBYINT:
341 CASE_CFN_RINT:
342 return !flag_rounding_math;
344 default:
345 break;
347 return false;
350 /* Check whether we may negate an integer constant T without causing
351 overflow. */
353 bool
354 may_negate_without_overflow_p (const_tree t)
356 tree type;
358 gcc_assert (TREE_CODE (t) == INTEGER_CST);
360 type = TREE_TYPE (t);
361 if (TYPE_UNSIGNED (type))
362 return false;
364 return !wi::only_sign_bit_p (wi::to_wide (t));
367 /* Determine whether an expression T can be cheaply negated using
368 the function negate_expr without introducing undefined overflow. */
370 static bool
371 negate_expr_p (tree t)
373 tree type;
375 if (t == 0)
376 return false;
378 type = TREE_TYPE (t);
380 STRIP_SIGN_NOPS (t);
381 switch (TREE_CODE (t))
383 case INTEGER_CST:
384 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
385 return true;
387 /* Check that -CST will not overflow type. */
388 return may_negate_without_overflow_p (t);
389 case BIT_NOT_EXPR:
390 return (INTEGRAL_TYPE_P (type)
391 && TYPE_OVERFLOW_WRAPS (type));
393 case FIXED_CST:
394 return true;
396 case NEGATE_EXPR:
397 return !TYPE_OVERFLOW_SANITIZED (type);
399 case REAL_CST:
400 /* We want to canonicalize to positive real constants. Pretend
401 that only negative ones can be easily negated. */
402 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
404 case COMPLEX_CST:
405 return negate_expr_p (TREE_REALPART (t))
406 && negate_expr_p (TREE_IMAGPART (t));
408 case VECTOR_CST:
410 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
411 return true;
413 /* Steps don't prevent negation. */
414 unsigned int count = vector_cst_encoded_nelts (t);
415 for (unsigned int i = 0; i < count; ++i)
416 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
417 return false;
419 return true;
422 case COMPLEX_EXPR:
423 return negate_expr_p (TREE_OPERAND (t, 0))
424 && negate_expr_p (TREE_OPERAND (t, 1));
426 case CONJ_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0));
429 case PLUS_EXPR:
430 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
431 || HONOR_SIGNED_ZEROS (element_mode (type))
432 || (ANY_INTEGRAL_TYPE_P (type)
433 && ! TYPE_OVERFLOW_WRAPS (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1)))
437 return true;
438 /* -(A + B) -> (-A) - B. */
439 return negate_expr_p (TREE_OPERAND (t, 0));
441 case MINUS_EXPR:
442 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
443 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
444 && !HONOR_SIGNED_ZEROS (element_mode (type))
445 && (! ANY_INTEGRAL_TYPE_P (type)
446 || TYPE_OVERFLOW_WRAPS (type));
448 case MULT_EXPR:
449 if (TYPE_UNSIGNED (type))
450 break;
451 /* INT_MIN/n * n doesn't overflow while negating one operand it does
452 if n is a (negative) power of two. */
453 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
454 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
455 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
456 && (wi::popcount
457 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
458 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
459 && (wi::popcount
460 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
461 break;
463 /* Fall through. */
465 case RDIV_EXPR:
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
469 break;
471 case TRUNC_DIV_EXPR:
472 case ROUND_DIV_EXPR:
473 case EXACT_DIV_EXPR:
474 if (TYPE_UNSIGNED (type))
475 break;
476 if (negate_expr_p (TREE_OPERAND (t, 0)))
477 return true;
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
483 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t, 1))))
485 return negate_expr_p (TREE_OPERAND (t, 1));
486 break;
488 case NOP_EXPR:
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type) == REAL_TYPE)
492 tree tem = strip_float_extensions (t);
493 if (tem != t)
494 return negate_expr_p (tem);
496 break;
498 case CALL_EXPR:
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t)))
501 return negate_expr_p (CALL_EXPR_ARG (t, 0));
502 break;
504 case RSHIFT_EXPR:
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 tree op1 = TREE_OPERAND (t, 1);
509 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
510 return true;
512 break;
514 default:
515 break;
517 return false;
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
523 returned. */
525 static tree
526 fold_negate_expr_1 (location_t loc, tree t)
528 tree type = TREE_TYPE (t);
529 tree tem;
531 switch (TREE_CODE (t))
533 /* Convert - (~A) to A + 1. */
534 case BIT_NOT_EXPR:
535 if (INTEGRAL_TYPE_P (type))
536 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
537 build_one_cst (type));
538 break;
540 case INTEGER_CST:
541 tem = fold_negate_const (t, type);
542 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
543 || (ANY_INTEGRAL_TYPE_P (type)
544 && !TYPE_OVERFLOW_TRAPS (type)
545 && TYPE_OVERFLOW_WRAPS (type))
546 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
547 return tem;
548 break;
550 case POLY_INT_CST:
551 case REAL_CST:
552 case FIXED_CST:
553 tem = fold_negate_const (t, type);
554 return tem;
556 case COMPLEX_CST:
558 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
559 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
560 if (rpart && ipart)
561 return build_complex (type, rpart, ipart);
563 break;
565 case VECTOR_CST:
567 tree_vector_builder elts;
568 elts.new_unary_operation (type, t, true);
569 unsigned int count = elts.encoded_nelts ();
570 for (unsigned int i = 0; i < count; ++i)
572 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
573 if (elt == NULL_TREE)
574 return NULL_TREE;
575 elts.quick_push (elt);
578 return elts.build ();
581 case COMPLEX_EXPR:
582 if (negate_expr_p (t))
583 return fold_build2_loc (loc, COMPLEX_EXPR, type,
584 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
585 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
586 break;
588 case CONJ_EXPR:
589 if (negate_expr_p (t))
590 return fold_build1_loc (loc, CONJ_EXPR, type,
591 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
592 break;
594 case NEGATE_EXPR:
595 if (!TYPE_OVERFLOW_SANITIZED (type))
596 return TREE_OPERAND (t, 0);
597 break;
599 case PLUS_EXPR:
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
601 && !HONOR_SIGNED_ZEROS (element_mode (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1)))
606 tem = negate_expr (TREE_OPERAND (t, 1));
607 return fold_build2_loc (loc, MINUS_EXPR, type,
608 tem, TREE_OPERAND (t, 0));
611 /* -(A + B) -> (-A) - B. */
612 if (negate_expr_p (TREE_OPERAND (t, 0)))
614 tem = negate_expr (TREE_OPERAND (t, 0));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 1));
619 break;
621 case MINUS_EXPR:
622 /* - (A - B) -> B - A */
623 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
624 && !HONOR_SIGNED_ZEROS (element_mode (type)))
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
627 break;
629 case MULT_EXPR:
630 if (TYPE_UNSIGNED (type))
631 break;
633 /* Fall through. */
635 case RDIV_EXPR:
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 TREE_OPERAND (t, 0), negate_expr (tem));
642 tem = TREE_OPERAND (t, 0);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 negate_expr (tem), TREE_OPERAND (t, 1));
647 break;
649 case TRUNC_DIV_EXPR:
650 case ROUND_DIV_EXPR:
651 case EXACT_DIV_EXPR:
652 if (TYPE_UNSIGNED (type))
653 break;
654 if (negate_expr_p (TREE_OPERAND (t, 0)))
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 negate_expr (TREE_OPERAND (t, 0)),
657 TREE_OPERAND (t, 1));
658 /* In general we can't negate B in A / B, because if A is INT_MIN and
659 B is 1, we may turn this into INT_MIN / -1 which is undefined
660 and actually traps on some architectures. */
661 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
662 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
663 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
664 && ! integer_onep (TREE_OPERAND (t, 1))))
665 && negate_expr_p (TREE_OPERAND (t, 1)))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 TREE_OPERAND (t, 0),
668 negate_expr (TREE_OPERAND (t, 1)));
669 break;
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
679 break;
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (get_call_combined_fn (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 tree fndecl, arg;
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
692 break;
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 tree op1 = TREE_OPERAND (t, 1);
699 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* A wrapper for fold_negate_expr_1. */
720 static tree
721 fold_negate_expr (location_t loc, tree t)
723 tree type = TREE_TYPE (t);
724 STRIP_SIGN_NOPS (t);
725 tree tem = fold_negate_expr_1 (loc, t);
726 if (tem == NULL_TREE)
727 return NULL_TREE;
728 return fold_convert_loc (loc, type, tem);
731 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
732 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
733 return NULL_TREE. */
735 static tree
736 negate_expr (tree t)
738 tree type, tem;
739 location_t loc;
741 if (t == NULL_TREE)
742 return NULL_TREE;
744 loc = EXPR_LOCATION (t);
745 type = TREE_TYPE (t);
746 STRIP_SIGN_NOPS (t);
748 tem = fold_negate_expr (loc, t);
749 if (!tem)
750 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
751 return fold_convert_loc (loc, type, tem);
754 /* Split a tree IN into a constant, literal and variable parts that could be
755 combined with CODE to make IN. "constant" means an expression with
756 TREE_CONSTANT but that isn't an actual constant. CODE must be a
757 commutative arithmetic operation. Store the constant part into *CONP,
758 the literal in *LITP and return the variable part. If a part isn't
759 present, set it to null. If the tree does not decompose in this way,
760 return the entire tree as the variable part and the other parts as null.
762 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
763 case, we negate an operand that was subtracted. Except if it is a
764 literal for which we use *MINUS_LITP instead.
766 If NEGATE_P is true, we are negating all of IN, again except a literal
767 for which we use *MINUS_LITP instead. If a variable part is of pointer
768 type, it is negated after converting to TYPE. This prevents us from
769 generating illegal MINUS pointer expression. LOC is the location of
770 the converted variable part.
772 If IN is itself a literal or constant, return it as appropriate.
774 Note that we do not guarantee that any of the three values will be the
775 same type as IN, but they will have the same signedness and mode. */
777 static tree
778 split_tree (tree in, tree type, enum tree_code code,
779 tree *minus_varp, tree *conp, tree *minus_conp,
780 tree *litp, tree *minus_litp, int negate_p)
782 tree var = 0;
783 *minus_varp = 0;
784 *conp = 0;
785 *minus_conp = 0;
786 *litp = 0;
787 *minus_litp = 0;
789 /* Strip any conversions that don't change the machine mode or signedness. */
790 STRIP_SIGN_NOPS (in);
792 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
793 || TREE_CODE (in) == FIXED_CST)
794 *litp = in;
795 else if (TREE_CODE (in) == code
796 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
797 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
798 /* We can associate addition and subtraction together (even
799 though the C standard doesn't say so) for integers because
800 the value is not affected. For reals, the value might be
801 affected, so we can't. */
802 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
803 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
804 || (code == MINUS_EXPR
805 && (TREE_CODE (in) == PLUS_EXPR
806 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
808 tree op0 = TREE_OPERAND (in, 0);
809 tree op1 = TREE_OPERAND (in, 1);
810 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
811 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
813 /* First see if either of the operands is a literal, then a constant. */
814 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
815 || TREE_CODE (op0) == FIXED_CST)
816 *litp = op0, op0 = 0;
817 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
818 || TREE_CODE (op1) == FIXED_CST)
819 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
821 if (op0 != 0 && TREE_CONSTANT (op0))
822 *conp = op0, op0 = 0;
823 else if (op1 != 0 && TREE_CONSTANT (op1))
824 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
826 /* If we haven't dealt with either operand, this is not a case we can
827 decompose. Otherwise, VAR is either of the ones remaining, if any. */
828 if (op0 != 0 && op1 != 0)
829 var = in;
830 else if (op0 != 0)
831 var = op0;
832 else
833 var = op1, neg_var_p = neg1_p;
835 /* Now do any needed negations. */
836 if (neg_litp_p)
837 *minus_litp = *litp, *litp = 0;
838 if (neg_conp_p && *conp)
839 *minus_conp = *conp, *conp = 0;
840 if (neg_var_p && var)
841 *minus_varp = var, var = 0;
843 else if (TREE_CONSTANT (in))
844 *conp = in;
845 else if (TREE_CODE (in) == BIT_NOT_EXPR
846 && code == PLUS_EXPR)
848 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
849 when IN is constant. */
850 *litp = build_minus_one_cst (type);
851 *minus_varp = TREE_OPERAND (in, 0);
853 else
854 var = in;
856 if (negate_p)
858 if (*litp)
859 *minus_litp = *litp, *litp = 0;
860 else if (*minus_litp)
861 *litp = *minus_litp, *minus_litp = 0;
862 if (*conp)
863 *minus_conp = *conp, *conp = 0;
864 else if (*minus_conp)
865 *conp = *minus_conp, *minus_conp = 0;
866 if (var)
867 *minus_varp = var, var = 0;
868 else if (*minus_varp)
869 var = *minus_varp, *minus_varp = 0;
872 if (*litp
873 && TREE_OVERFLOW_P (*litp))
874 *litp = drop_tree_overflow (*litp);
875 if (*minus_litp
876 && TREE_OVERFLOW_P (*minus_litp))
877 *minus_litp = drop_tree_overflow (*minus_litp);
879 return var;
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
887 static tree
888 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
890 if (t1 == 0)
892 gcc_assert (t2 == 0 || code != MINUS_EXPR);
893 return t2;
895 else if (t2 == 0)
896 return t1;
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 if (code == PLUS_EXPR)
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
920 else if (code == MINUS_EXPR)
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
937 static bool
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 return false;
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
943 return false;
945 switch (code)
947 case LSHIFT_EXPR:
948 case RSHIFT_EXPR:
949 case LROTATE_EXPR:
950 case RROTATE_EXPR:
951 return true;
953 default:
954 break;
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
962 /* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs. */
964 static tree
965 int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
966 int overflowable)
968 wide_int res;
969 tree t;
970 tree type = TREE_TYPE (parg1);
971 signop sign = TYPE_SIGN (type);
972 bool overflow = false;
974 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
975 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
977 switch (code)
979 case BIT_IOR_EXPR:
980 res = wi::bit_or (arg1, arg2);
981 break;
983 case BIT_XOR_EXPR:
984 res = wi::bit_xor (arg1, arg2);
985 break;
987 case BIT_AND_EXPR:
988 res = wi::bit_and (arg1, arg2);
989 break;
991 case RSHIFT_EXPR:
992 case LSHIFT_EXPR:
993 if (wi::neg_p (arg2))
995 arg2 = -arg2;
996 if (code == RSHIFT_EXPR)
997 code = LSHIFT_EXPR;
998 else
999 code = RSHIFT_EXPR;
1002 if (code == RSHIFT_EXPR)
1003 /* It's unclear from the C standard whether shifts can overflow.
1004 The following code ignores overflow; perhaps a C standard
1005 interpretation ruling is needed. */
1006 res = wi::rshift (arg1, arg2, sign);
1007 else
1008 res = wi::lshift (arg1, arg2);
1009 break;
1011 case RROTATE_EXPR:
1012 case LROTATE_EXPR:
1013 if (wi::neg_p (arg2))
1015 arg2 = -arg2;
1016 if (code == RROTATE_EXPR)
1017 code = LROTATE_EXPR;
1018 else
1019 code = RROTATE_EXPR;
1022 if (code == RROTATE_EXPR)
1023 res = wi::rrotate (arg1, arg2);
1024 else
1025 res = wi::lrotate (arg1, arg2);
1026 break;
1028 case PLUS_EXPR:
1029 res = wi::add (arg1, arg2, sign, &overflow);
1030 break;
1032 case MINUS_EXPR:
1033 res = wi::sub (arg1, arg2, sign, &overflow);
1034 break;
1036 case MULT_EXPR:
1037 res = wi::mul (arg1, arg2, sign, &overflow);
1038 break;
1040 case MULT_HIGHPART_EXPR:
1041 res = wi::mul_high (arg1, arg2, sign);
1042 break;
1044 case TRUNC_DIV_EXPR:
1045 case EXACT_DIV_EXPR:
1046 if (arg2 == 0)
1047 return NULL_TREE;
1048 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1049 break;
1051 case FLOOR_DIV_EXPR:
1052 if (arg2 == 0)
1053 return NULL_TREE;
1054 res = wi::div_floor (arg1, arg2, sign, &overflow);
1055 break;
1057 case CEIL_DIV_EXPR:
1058 if (arg2 == 0)
1059 return NULL_TREE;
1060 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1061 break;
1063 case ROUND_DIV_EXPR:
1064 if (arg2 == 0)
1065 return NULL_TREE;
1066 res = wi::div_round (arg1, arg2, sign, &overflow);
1067 break;
1069 case TRUNC_MOD_EXPR:
1070 if (arg2 == 0)
1071 return NULL_TREE;
1072 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1073 break;
1075 case FLOOR_MOD_EXPR:
1076 if (arg2 == 0)
1077 return NULL_TREE;
1078 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1079 break;
1081 case CEIL_MOD_EXPR:
1082 if (arg2 == 0)
1083 return NULL_TREE;
1084 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1085 break;
1087 case ROUND_MOD_EXPR:
1088 if (arg2 == 0)
1089 return NULL_TREE;
1090 res = wi::mod_round (arg1, arg2, sign, &overflow);
1091 break;
1093 case MIN_EXPR:
1094 res = wi::min (arg1, arg2, sign);
1095 break;
1097 case MAX_EXPR:
1098 res = wi::max (arg1, arg2, sign);
1099 break;
1101 default:
1102 return NULL_TREE;
1105 t = force_fit_type (type, res, overflowable,
1106 (((sign == SIGNED || overflowable == -1)
1107 && overflow)
1108 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1110 return t;
1113 /* Combine two integer constants PARG1 and PARG2 under operation CODE
1114 to produce a new constant. Return NULL_TREE if we don't know how
1115 to evaluate CODE at compile-time. */
1117 static tree
1118 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
1119 int overflowable)
1121 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1122 return int_const_binop_2 (code, arg1, arg2, overflowable);
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1126 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1128 poly_wide_int res;
1129 bool overflow;
1130 tree type = TREE_TYPE (arg1);
1131 signop sign = TYPE_SIGN (type);
1132 switch (code)
1134 case PLUS_EXPR:
1135 res = wi::add (wi::to_poly_wide (arg1),
1136 wi::to_poly_wide (arg2), sign, &overflow);
1137 break;
1139 case MINUS_EXPR:
1140 res = wi::sub (wi::to_poly_wide (arg1),
1141 wi::to_poly_wide (arg2), sign, &overflow);
1142 break;
1144 case MULT_EXPR:
1145 if (TREE_CODE (arg2) == INTEGER_CST)
1146 res = wi::mul (wi::to_poly_wide (arg1),
1147 wi::to_wide (arg2), sign, &overflow);
1148 else if (TREE_CODE (arg1) == INTEGER_CST)
1149 res = wi::mul (wi::to_poly_wide (arg2),
1150 wi::to_wide (arg1), sign, &overflow);
1151 else
1152 return NULL_TREE;
1153 break;
1155 case LSHIFT_EXPR:
1156 if (TREE_CODE (arg2) == INTEGER_CST)
1157 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1158 else
1159 return NULL_TREE;
1160 break;
1162 case BIT_IOR_EXPR:
1163 if (TREE_CODE (arg2) != INTEGER_CST
1164 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1165 &res))
1166 return NULL_TREE;
1167 break;
1169 default:
1170 return NULL_TREE;
1172 return force_fit_type (type, res, overflowable,
1173 (((sign == SIGNED || overflowable == -1)
1174 && overflow)
1175 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1178 return NULL_TREE;
1181 tree
1182 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1184 return int_const_binop_1 (code, arg1, arg2, 1);
1187 /* Return true if binary operation OP distributes over addition in operand
1188 OPNO, with the other operand being held constant. OPNO counts from 1. */
1190 static bool
1191 distributes_over_addition_p (tree_code op, int opno)
1193 switch (op)
1195 case PLUS_EXPR:
1196 case MINUS_EXPR:
1197 case MULT_EXPR:
1198 return true;
1200 case LSHIFT_EXPR:
1201 return opno == 1;
1203 default:
1204 return false;
1208 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1209 constant. We assume ARG1 and ARG2 have the same data type, or at least
1210 are the same kind of constant and the same machine mode. Return zero if
1211 combining the constants is not allowed in the current operating mode. */
1213 static tree
1214 const_binop (enum tree_code code, tree arg1, tree arg2)
1216 /* Sanity check for the recursive cases. */
1217 if (!arg1 || !arg2)
1218 return NULL_TREE;
1220 STRIP_NOPS (arg1);
1221 STRIP_NOPS (arg2);
1223 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1225 if (code == POINTER_PLUS_EXPR)
1226 return int_const_binop (PLUS_EXPR,
1227 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1229 return int_const_binop (code, arg1, arg2);
1232 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1234 machine_mode mode;
1235 REAL_VALUE_TYPE d1;
1236 REAL_VALUE_TYPE d2;
1237 REAL_VALUE_TYPE value;
1238 REAL_VALUE_TYPE result;
1239 bool inexact;
1240 tree t, type;
1242 /* The following codes are handled by real_arithmetic. */
1243 switch (code)
1245 case PLUS_EXPR:
1246 case MINUS_EXPR:
1247 case MULT_EXPR:
1248 case RDIV_EXPR:
1249 case MIN_EXPR:
1250 case MAX_EXPR:
1251 break;
1253 default:
1254 return NULL_TREE;
1257 d1 = TREE_REAL_CST (arg1);
1258 d2 = TREE_REAL_CST (arg2);
1260 type = TREE_TYPE (arg1);
1261 mode = TYPE_MODE (type);
1263 /* Don't perform operation if we honor signaling NaNs and
1264 either operand is a signaling NaN. */
1265 if (HONOR_SNANS (mode)
1266 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1267 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1268 return NULL_TREE;
1270 /* Don't perform operation if it would raise a division
1271 by zero exception. */
1272 if (code == RDIV_EXPR
1273 && real_equal (&d2, &dconst0)
1274 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1275 return NULL_TREE;
1277 /* If either operand is a NaN, just return it. Otherwise, set up
1278 for floating-point trap; we return an overflow. */
1279 if (REAL_VALUE_ISNAN (d1))
1281 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1282 is off. */
1283 d1.signalling = 0;
1284 t = build_real (type, d1);
1285 return t;
1287 else if (REAL_VALUE_ISNAN (d2))
1289 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1290 is off. */
1291 d2.signalling = 0;
1292 t = build_real (type, d2);
1293 return t;
1296 inexact = real_arithmetic (&value, code, &d1, &d2);
1297 real_convert (&result, mode, &value);
1299 /* Don't constant fold this floating point operation if
1300 the result has overflowed and flag_trapping_math. */
1301 if (flag_trapping_math
1302 && MODE_HAS_INFINITIES (mode)
1303 && REAL_VALUE_ISINF (result)
1304 && !REAL_VALUE_ISINF (d1)
1305 && !REAL_VALUE_ISINF (d2))
1306 return NULL_TREE;
1308 /* Don't constant fold this floating point operation if the
1309 result may dependent upon the run-time rounding mode and
1310 flag_rounding_math is set, or if GCC's software emulation
1311 is unable to accurately represent the result. */
1312 if ((flag_rounding_math
1313 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1314 && (inexact || !real_identical (&result, &value)))
1315 return NULL_TREE;
1317 t = build_real (type, result);
1319 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1320 return t;
1323 if (TREE_CODE (arg1) == FIXED_CST)
1325 FIXED_VALUE_TYPE f1;
1326 FIXED_VALUE_TYPE f2;
1327 FIXED_VALUE_TYPE result;
1328 tree t, type;
1329 int sat_p;
1330 bool overflow_p;
1332 /* The following codes are handled by fixed_arithmetic. */
1333 switch (code)
1335 case PLUS_EXPR:
1336 case MINUS_EXPR:
1337 case MULT_EXPR:
1338 case TRUNC_DIV_EXPR:
1339 if (TREE_CODE (arg2) != FIXED_CST)
1340 return NULL_TREE;
1341 f2 = TREE_FIXED_CST (arg2);
1342 break;
1344 case LSHIFT_EXPR:
1345 case RSHIFT_EXPR:
1347 if (TREE_CODE (arg2) != INTEGER_CST)
1348 return NULL_TREE;
1349 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1350 f2.data.high = w2.elt (1);
1351 f2.data.low = w2.ulow ();
1352 f2.mode = SImode;
1354 break;
1356 default:
1357 return NULL_TREE;
1360 f1 = TREE_FIXED_CST (arg1);
1361 type = TREE_TYPE (arg1);
1362 sat_p = TYPE_SATURATING (type);
1363 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1364 t = build_fixed (type, result);
1365 /* Propagate overflow flags. */
1366 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1367 TREE_OVERFLOW (t) = 1;
1368 return t;
1371 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1373 tree type = TREE_TYPE (arg1);
1374 tree r1 = TREE_REALPART (arg1);
1375 tree i1 = TREE_IMAGPART (arg1);
1376 tree r2 = TREE_REALPART (arg2);
1377 tree i2 = TREE_IMAGPART (arg2);
1378 tree real, imag;
1380 switch (code)
1382 case PLUS_EXPR:
1383 case MINUS_EXPR:
1384 real = const_binop (code, r1, r2);
1385 imag = const_binop (code, i1, i2);
1386 break;
1388 case MULT_EXPR:
1389 if (COMPLEX_FLOAT_TYPE_P (type))
1390 return do_mpc_arg2 (arg1, arg2, type,
1391 /* do_nonfinite= */ folding_initializer,
1392 mpc_mul);
1394 real = const_binop (MINUS_EXPR,
1395 const_binop (MULT_EXPR, r1, r2),
1396 const_binop (MULT_EXPR, i1, i2));
1397 imag = const_binop (PLUS_EXPR,
1398 const_binop (MULT_EXPR, r1, i2),
1399 const_binop (MULT_EXPR, i1, r2));
1400 break;
1402 case RDIV_EXPR:
1403 if (COMPLEX_FLOAT_TYPE_P (type))
1404 return do_mpc_arg2 (arg1, arg2, type,
1405 /* do_nonfinite= */ folding_initializer,
1406 mpc_div);
1407 /* Fallthru. */
1408 case TRUNC_DIV_EXPR:
1409 case CEIL_DIV_EXPR:
1410 case FLOOR_DIV_EXPR:
1411 case ROUND_DIV_EXPR:
1412 if (flag_complex_method == 0)
1414 /* Keep this algorithm in sync with
1415 tree-complex.c:expand_complex_div_straight().
1417 Expand complex division to scalars, straightforward algorithm.
1418 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1419 t = br*br + bi*bi
1421 tree magsquared
1422 = const_binop (PLUS_EXPR,
1423 const_binop (MULT_EXPR, r2, r2),
1424 const_binop (MULT_EXPR, i2, i2));
1425 tree t1
1426 = const_binop (PLUS_EXPR,
1427 const_binop (MULT_EXPR, r1, r2),
1428 const_binop (MULT_EXPR, i1, i2));
1429 tree t2
1430 = const_binop (MINUS_EXPR,
1431 const_binop (MULT_EXPR, i1, r2),
1432 const_binop (MULT_EXPR, r1, i2));
1434 real = const_binop (code, t1, magsquared);
1435 imag = const_binop (code, t2, magsquared);
1437 else
1439 /* Keep this algorithm in sync with
1440 tree-complex.c:expand_complex_div_wide().
1442 Expand complex division to scalars, modified algorithm to minimize
1443 overflow with wide input ranges. */
1444 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1445 fold_abs_const (r2, TREE_TYPE (type)),
1446 fold_abs_const (i2, TREE_TYPE (type)));
1448 if (integer_nonzerop (compare))
1450 /* In the TRUE branch, we compute
1451 ratio = br/bi;
1452 div = (br * ratio) + bi;
1453 tr = (ar * ratio) + ai;
1454 ti = (ai * ratio) - ar;
1455 tr = tr / div;
1456 ti = ti / div; */
1457 tree ratio = const_binop (code, r2, i2);
1458 tree div = const_binop (PLUS_EXPR, i2,
1459 const_binop (MULT_EXPR, r2, ratio));
1460 real = const_binop (MULT_EXPR, r1, ratio);
1461 real = const_binop (PLUS_EXPR, real, i1);
1462 real = const_binop (code, real, div);
1464 imag = const_binop (MULT_EXPR, i1, ratio);
1465 imag = const_binop (MINUS_EXPR, imag, r1);
1466 imag = const_binop (code, imag, div);
1468 else
1470 /* In the FALSE branch, we compute
1471 ratio = d/c;
1472 divisor = (d * ratio) + c;
1473 tr = (b * ratio) + a;
1474 ti = b - (a * ratio);
1475 tr = tr / div;
1476 ti = ti / div; */
1477 tree ratio = const_binop (code, i2, r2);
1478 tree div = const_binop (PLUS_EXPR, r2,
1479 const_binop (MULT_EXPR, i2, ratio));
1481 real = const_binop (MULT_EXPR, i1, ratio);
1482 real = const_binop (PLUS_EXPR, real, r1);
1483 real = const_binop (code, real, div);
1485 imag = const_binop (MULT_EXPR, r1, ratio);
1486 imag = const_binop (MINUS_EXPR, i1, imag);
1487 imag = const_binop (code, imag, div);
1490 break;
1492 default:
1493 return NULL_TREE;
1496 if (real && imag)
1497 return build_complex (type, real, imag);
1500 if (TREE_CODE (arg1) == VECTOR_CST
1501 && TREE_CODE (arg2) == VECTOR_CST
1502 && (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))
1503 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1505 tree type = TREE_TYPE (arg1);
1506 bool step_ok_p;
1507 if (VECTOR_CST_STEPPED_P (arg1)
1508 && VECTOR_CST_STEPPED_P (arg2))
1509 /* We can operate directly on the encoding if:
1511 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1512 implies
1513 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1515 Addition and subtraction are the supported operators
1516 for which this is true. */
1517 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1518 else if (VECTOR_CST_STEPPED_P (arg1))
1519 /* We can operate directly on stepped encodings if:
1521 a3 - a2 == a2 - a1
1522 implies:
1523 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1525 which is true if (x -> x op c) distributes over addition. */
1526 step_ok_p = distributes_over_addition_p (code, 1);
1527 else
1528 /* Similarly in reverse. */
1529 step_ok_p = distributes_over_addition_p (code, 2);
1530 tree_vector_builder elts;
1531 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1532 return NULL_TREE;
1533 unsigned int count = elts.encoded_nelts ();
1534 for (unsigned int i = 0; i < count; ++i)
1536 tree elem1 = VECTOR_CST_ELT (arg1, i);
1537 tree elem2 = VECTOR_CST_ELT (arg2, i);
1539 tree elt = const_binop (code, elem1, elem2);
1541 /* It is possible that const_binop cannot handle the given
1542 code and return NULL_TREE */
1543 if (elt == NULL_TREE)
1544 return NULL_TREE;
1545 elts.quick_push (elt);
1548 return elts.build ();
1551 /* Shifts allow a scalar offset for a vector. */
1552 if (TREE_CODE (arg1) == VECTOR_CST
1553 && TREE_CODE (arg2) == INTEGER_CST)
1555 tree type = TREE_TYPE (arg1);
1556 bool step_ok_p = distributes_over_addition_p (code, 1);
1557 tree_vector_builder elts;
1558 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1559 return NULL_TREE;
1560 unsigned int count = elts.encoded_nelts ();
1561 for (unsigned int i = 0; i < count; ++i)
1563 tree elem1 = VECTOR_CST_ELT (arg1, i);
1565 tree elt = const_binop (code, elem1, arg2);
1567 /* It is possible that const_binop cannot handle the given
1568 code and return NULL_TREE. */
1569 if (elt == NULL_TREE)
1570 return NULL_TREE;
1571 elts.quick_push (elt);
1574 return elts.build ();
1576 return NULL_TREE;
1579 /* Overload that adds a TYPE parameter to be able to dispatch
1580 to fold_relational_const. */
1582 tree
1583 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1585 if (TREE_CODE_CLASS (code) == tcc_comparison)
1586 return fold_relational_const (code, type, arg1, arg2);
1588 /* ??? Until we make the const_binop worker take the type of the
1589 result as argument put those cases that need it here. */
1590 switch (code)
1592 case VEC_SERIES_EXPR:
1593 if (CONSTANT_CLASS_P (arg1)
1594 && CONSTANT_CLASS_P (arg2))
1595 return build_vec_series (type, arg1, arg2);
1596 return NULL_TREE;
1598 case COMPLEX_EXPR:
1599 if ((TREE_CODE (arg1) == REAL_CST
1600 && TREE_CODE (arg2) == REAL_CST)
1601 || (TREE_CODE (arg1) == INTEGER_CST
1602 && TREE_CODE (arg2) == INTEGER_CST))
1603 return build_complex (type, arg1, arg2);
1604 return NULL_TREE;
1606 case POINTER_DIFF_EXPR:
1607 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1609 offset_int res = wi::sub (wi::to_offset (arg1),
1610 wi::to_offset (arg2));
1611 return force_fit_type (type, res, 1,
1612 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1614 return NULL_TREE;
1616 case VEC_PACK_TRUNC_EXPR:
1617 case VEC_PACK_FIX_TRUNC_EXPR:
1619 unsigned int out_nelts, in_nelts, i;
1621 if (TREE_CODE (arg1) != VECTOR_CST
1622 || TREE_CODE (arg2) != VECTOR_CST)
1623 return NULL_TREE;
1625 in_nelts = VECTOR_CST_NELTS (arg1);
1626 out_nelts = in_nelts * 2;
1627 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1628 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1630 tree_vector_builder elts (type, out_nelts, 1);
1631 for (i = 0; i < out_nelts; i++)
1633 tree elt = (i < in_nelts
1634 ? VECTOR_CST_ELT (arg1, i)
1635 : VECTOR_CST_ELT (arg2, i - in_nelts));
1636 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1637 ? NOP_EXPR : FIX_TRUNC_EXPR,
1638 TREE_TYPE (type), elt);
1639 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1640 return NULL_TREE;
1641 elts.quick_push (elt);
1644 return elts.build ();
1647 case VEC_WIDEN_MULT_LO_EXPR:
1648 case VEC_WIDEN_MULT_HI_EXPR:
1649 case VEC_WIDEN_MULT_EVEN_EXPR:
1650 case VEC_WIDEN_MULT_ODD_EXPR:
1652 unsigned int out_nelts, in_nelts, out, ofs, scale;
1654 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1655 return NULL_TREE;
1657 in_nelts = VECTOR_CST_NELTS (arg1);
1658 out_nelts = in_nelts / 2;
1659 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1660 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1662 if (code == VEC_WIDEN_MULT_LO_EXPR)
1663 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1664 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1665 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1666 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1667 scale = 1, ofs = 0;
1668 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1669 scale = 1, ofs = 1;
1671 tree_vector_builder elts (type, out_nelts, 1);
1672 for (out = 0; out < out_nelts; out++)
1674 unsigned int in = (out << scale) + ofs;
1675 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1676 VECTOR_CST_ELT (arg1, in));
1677 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1678 VECTOR_CST_ELT (arg2, in));
1680 if (t1 == NULL_TREE || t2 == NULL_TREE)
1681 return NULL_TREE;
1682 tree elt = const_binop (MULT_EXPR, t1, t2);
1683 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1684 return NULL_TREE;
1685 elts.quick_push (elt);
1688 return elts.build ();
1691 default:;
1694 if (TREE_CODE_CLASS (code) != tcc_binary)
1695 return NULL_TREE;
1697 /* Make sure type and arg0 have the same saturating flag. */
1698 gcc_checking_assert (TYPE_SATURATING (type)
1699 == TYPE_SATURATING (TREE_TYPE (arg1)));
1701 return const_binop (code, arg1, arg2);
1704 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1705 Return zero if computing the constants is not possible. */
1707 tree
1708 const_unop (enum tree_code code, tree type, tree arg0)
1710 /* Don't perform the operation, other than NEGATE and ABS, if
1711 flag_signaling_nans is on and the operand is a signaling NaN. */
1712 if (TREE_CODE (arg0) == REAL_CST
1713 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1714 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1715 && code != NEGATE_EXPR
1716 && code != ABS_EXPR)
1717 return NULL_TREE;
1719 switch (code)
1721 CASE_CONVERT:
1722 case FLOAT_EXPR:
1723 case FIX_TRUNC_EXPR:
1724 case FIXED_CONVERT_EXPR:
1725 return fold_convert_const (code, type, arg0);
1727 case ADDR_SPACE_CONVERT_EXPR:
1728 /* If the source address is 0, and the source address space
1729 cannot have a valid object at 0, fold to dest type null. */
1730 if (integer_zerop (arg0)
1731 && !(targetm.addr_space.zero_address_valid
1732 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1733 return fold_convert_const (code, type, arg0);
1734 break;
1736 case VIEW_CONVERT_EXPR:
1737 return fold_view_convert_expr (type, arg0);
1739 case NEGATE_EXPR:
1741 /* Can't call fold_negate_const directly here as that doesn't
1742 handle all cases and we might not be able to negate some
1743 constants. */
1744 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1745 if (tem && CONSTANT_CLASS_P (tem))
1746 return tem;
1747 break;
1750 case ABS_EXPR:
1751 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1752 return fold_abs_const (arg0, type);
1753 break;
1755 case CONJ_EXPR:
1756 if (TREE_CODE (arg0) == COMPLEX_CST)
1758 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1759 TREE_TYPE (type));
1760 return build_complex (type, TREE_REALPART (arg0), ipart);
1762 break;
1764 case BIT_NOT_EXPR:
1765 if (TREE_CODE (arg0) == INTEGER_CST)
1766 return fold_not_const (arg0, type);
1767 else if (POLY_INT_CST_P (arg0))
1768 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1769 /* Perform BIT_NOT_EXPR on each element individually. */
1770 else if (TREE_CODE (arg0) == VECTOR_CST)
1772 tree elem;
1774 /* This can cope with stepped encodings because ~x == -1 - x. */
1775 tree_vector_builder elements;
1776 elements.new_unary_operation (type, arg0, true);
1777 unsigned int i, count = elements.encoded_nelts ();
1778 for (i = 0; i < count; ++i)
1780 elem = VECTOR_CST_ELT (arg0, i);
1781 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1782 if (elem == NULL_TREE)
1783 break;
1784 elements.quick_push (elem);
1786 if (i == count)
1787 return elements.build ();
1789 break;
1791 case TRUTH_NOT_EXPR:
1792 if (TREE_CODE (arg0) == INTEGER_CST)
1793 return constant_boolean_node (integer_zerop (arg0), type);
1794 break;
1796 case REALPART_EXPR:
1797 if (TREE_CODE (arg0) == COMPLEX_CST)
1798 return fold_convert (type, TREE_REALPART (arg0));
1799 break;
1801 case IMAGPART_EXPR:
1802 if (TREE_CODE (arg0) == COMPLEX_CST)
1803 return fold_convert (type, TREE_IMAGPART (arg0));
1804 break;
1806 case VEC_UNPACK_LO_EXPR:
1807 case VEC_UNPACK_HI_EXPR:
1808 case VEC_UNPACK_FLOAT_LO_EXPR:
1809 case VEC_UNPACK_FLOAT_HI_EXPR:
1811 unsigned int out_nelts, in_nelts, i;
1812 enum tree_code subcode;
1814 if (TREE_CODE (arg0) != VECTOR_CST)
1815 return NULL_TREE;
1817 in_nelts = VECTOR_CST_NELTS (arg0);
1818 out_nelts = in_nelts / 2;
1819 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
1821 unsigned int offset = 0;
1822 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1823 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1824 offset = out_nelts;
1826 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1827 subcode = NOP_EXPR;
1828 else
1829 subcode = FLOAT_EXPR;
1831 tree_vector_builder elts (type, out_nelts, 1);
1832 for (i = 0; i < out_nelts; i++)
1834 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1835 VECTOR_CST_ELT (arg0, i + offset));
1836 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1837 return NULL_TREE;
1838 elts.quick_push (elt);
1841 return elts.build ();
1844 case VEC_DUPLICATE_EXPR:
1845 if (CONSTANT_CLASS_P (arg0))
1846 return build_vector_from_val (type, arg0);
1847 return NULL_TREE;
1849 default:
1850 break;
1853 return NULL_TREE;
1856 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1857 indicates which particular sizetype to create. */
1859 tree
1860 size_int_kind (poly_int64 number, enum size_type_kind kind)
1862 return build_int_cst (sizetype_tab[(int) kind], number);
1865 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1866 is a tree code. The type of the result is taken from the operands.
1867 Both must be equivalent integer types, ala int_binop_types_match_p.
1868 If the operands are constant, so is the result. */
1870 tree
1871 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1873 tree type = TREE_TYPE (arg0);
1875 if (arg0 == error_mark_node || arg1 == error_mark_node)
1876 return error_mark_node;
1878 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1879 TREE_TYPE (arg1)));
1881 /* Handle the special case of two poly_int constants faster. */
1882 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1884 /* And some specific cases even faster than that. */
1885 if (code == PLUS_EXPR)
1887 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1888 return arg1;
1889 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1890 return arg0;
1892 else if (code == MINUS_EXPR)
1894 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1895 return arg0;
1897 else if (code == MULT_EXPR)
1899 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1900 return arg1;
1903 /* Handle general case of two integer constants. For sizetype
1904 constant calculations we always want to know about overflow,
1905 even in the unsigned case. */
1906 tree res = int_const_binop_1 (code, arg0, arg1, -1);
1907 if (res != NULL_TREE)
1908 return res;
1911 return fold_build2_loc (loc, code, type, arg0, arg1);
1914 /* Given two values, either both of sizetype or both of bitsizetype,
1915 compute the difference between the two values. Return the value
1916 in signed type corresponding to the type of the operands. */
1918 tree
1919 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1921 tree type = TREE_TYPE (arg0);
1922 tree ctype;
1924 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1925 TREE_TYPE (arg1)));
1927 /* If the type is already signed, just do the simple thing. */
1928 if (!TYPE_UNSIGNED (type))
1929 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1931 if (type == sizetype)
1932 ctype = ssizetype;
1933 else if (type == bitsizetype)
1934 ctype = sbitsizetype;
1935 else
1936 ctype = signed_type_for (type);
1938 /* If either operand is not a constant, do the conversions to the signed
1939 type and subtract. The hardware will do the right thing with any
1940 overflow in the subtraction. */
1941 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1942 return size_binop_loc (loc, MINUS_EXPR,
1943 fold_convert_loc (loc, ctype, arg0),
1944 fold_convert_loc (loc, ctype, arg1));
1946 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1947 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1948 overflow) and negate (which can't either). Special-case a result
1949 of zero while we're here. */
1950 if (tree_int_cst_equal (arg0, arg1))
1951 return build_int_cst (ctype, 0);
1952 else if (tree_int_cst_lt (arg1, arg0))
1953 return fold_convert_loc (loc, ctype,
1954 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1955 else
1956 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1957 fold_convert_loc (loc, ctype,
1958 size_binop_loc (loc,
1959 MINUS_EXPR,
1960 arg1, arg0)));
1963 /* A subroutine of fold_convert_const handling conversions of an
1964 INTEGER_CST to another integer type. */
1966 static tree
1967 fold_convert_const_int_from_int (tree type, const_tree arg1)
1969 /* Given an integer constant, make new constant with new type,
1970 appropriately sign-extended or truncated. Use widest_int
1971 so that any extension is done according ARG1's type. */
1972 return force_fit_type (type, wi::to_widest (arg1),
1973 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1974 TREE_OVERFLOW (arg1));
1977 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1978 to an integer type. */
1980 static tree
1981 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1983 bool overflow = false;
1984 tree t;
1986 /* The following code implements the floating point to integer
1987 conversion rules required by the Java Language Specification,
1988 that IEEE NaNs are mapped to zero and values that overflow
1989 the target precision saturate, i.e. values greater than
1990 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1991 are mapped to INT_MIN. These semantics are allowed by the
1992 C and C++ standards that simply state that the behavior of
1993 FP-to-integer conversion is unspecified upon overflow. */
1995 wide_int val;
1996 REAL_VALUE_TYPE r;
1997 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1999 switch (code)
2001 case FIX_TRUNC_EXPR:
2002 real_trunc (&r, VOIDmode, &x);
2003 break;
2005 default:
2006 gcc_unreachable ();
2009 /* If R is NaN, return zero and show we have an overflow. */
2010 if (REAL_VALUE_ISNAN (r))
2012 overflow = true;
2013 val = wi::zero (TYPE_PRECISION (type));
2016 /* See if R is less than the lower bound or greater than the
2017 upper bound. */
2019 if (! overflow)
2021 tree lt = TYPE_MIN_VALUE (type);
2022 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2023 if (real_less (&r, &l))
2025 overflow = true;
2026 val = wi::to_wide (lt);
2030 if (! overflow)
2032 tree ut = TYPE_MAX_VALUE (type);
2033 if (ut)
2035 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2036 if (real_less (&u, &r))
2038 overflow = true;
2039 val = wi::to_wide (ut);
2044 if (! overflow)
2045 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2047 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2048 return t;
2051 /* A subroutine of fold_convert_const handling conversions of a
2052 FIXED_CST to an integer type. */
2054 static tree
2055 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2057 tree t;
2058 double_int temp, temp_trunc;
2059 scalar_mode mode;
2061 /* Right shift FIXED_CST to temp by fbit. */
2062 temp = TREE_FIXED_CST (arg1).data;
2063 mode = TREE_FIXED_CST (arg1).mode;
2064 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2066 temp = temp.rshift (GET_MODE_FBIT (mode),
2067 HOST_BITS_PER_DOUBLE_INT,
2068 SIGNED_FIXED_POINT_MODE_P (mode));
2070 /* Left shift temp to temp_trunc by fbit. */
2071 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2072 HOST_BITS_PER_DOUBLE_INT,
2073 SIGNED_FIXED_POINT_MODE_P (mode));
2075 else
2077 temp = double_int_zero;
2078 temp_trunc = double_int_zero;
2081 /* If FIXED_CST is negative, we need to round the value toward 0.
2082 By checking if the fractional bits are not zero to add 1 to temp. */
2083 if (SIGNED_FIXED_POINT_MODE_P (mode)
2084 && temp_trunc.is_negative ()
2085 && TREE_FIXED_CST (arg1).data != temp_trunc)
2086 temp += double_int_one;
2088 /* Given a fixed-point constant, make new constant with new type,
2089 appropriately sign-extended or truncated. */
2090 t = force_fit_type (type, temp, -1,
2091 (temp.is_negative ()
2092 && (TYPE_UNSIGNED (type)
2093 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2094 | TREE_OVERFLOW (arg1));
2096 return t;
2099 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2100 to another floating point type. */
2102 static tree
2103 fold_convert_const_real_from_real (tree type, const_tree arg1)
2105 REAL_VALUE_TYPE value;
2106 tree t;
2108 /* Don't perform the operation if flag_signaling_nans is on
2109 and the operand is a signaling NaN. */
2110 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2111 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2112 return NULL_TREE;
2114 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2115 t = build_real (type, value);
2117 /* If converting an infinity or NAN to a representation that doesn't
2118 have one, set the overflow bit so that we can produce some kind of
2119 error message at the appropriate point if necessary. It's not the
2120 most user-friendly message, but it's better than nothing. */
2121 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2122 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2123 TREE_OVERFLOW (t) = 1;
2124 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2125 && !MODE_HAS_NANS (TYPE_MODE (type)))
2126 TREE_OVERFLOW (t) = 1;
2127 /* Regular overflow, conversion produced an infinity in a mode that
2128 can't represent them. */
2129 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2130 && REAL_VALUE_ISINF (value)
2131 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2132 TREE_OVERFLOW (t) = 1;
2133 else
2134 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2135 return t;
2138 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2139 to a floating point type. */
2141 static tree
2142 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2144 REAL_VALUE_TYPE value;
2145 tree t;
2147 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2148 &TREE_FIXED_CST (arg1));
2149 t = build_real (type, value);
2151 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2152 return t;
2155 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2156 to another fixed-point type. */
2158 static tree
2159 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2161 FIXED_VALUE_TYPE value;
2162 tree t;
2163 bool overflow_p;
2165 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2166 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2167 t = build_fixed (type, value);
2169 /* Propagate overflow flags. */
2170 if (overflow_p | TREE_OVERFLOW (arg1))
2171 TREE_OVERFLOW (t) = 1;
2172 return t;
2175 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2176 to a fixed-point type. */
2178 static tree
2179 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2181 FIXED_VALUE_TYPE value;
2182 tree t;
2183 bool overflow_p;
2184 double_int di;
2186 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2188 di.low = TREE_INT_CST_ELT (arg1, 0);
2189 if (TREE_INT_CST_NUNITS (arg1) == 1)
2190 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2191 else
2192 di.high = TREE_INT_CST_ELT (arg1, 1);
2194 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2195 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2196 TYPE_SATURATING (type));
2197 t = build_fixed (type, value);
2199 /* Propagate overflow flags. */
2200 if (overflow_p | TREE_OVERFLOW (arg1))
2201 TREE_OVERFLOW (t) = 1;
2202 return t;
2205 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2206 to a fixed-point type. */
2208 static tree
2209 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2211 FIXED_VALUE_TYPE value;
2212 tree t;
2213 bool overflow_p;
2215 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2216 &TREE_REAL_CST (arg1),
2217 TYPE_SATURATING (type));
2218 t = build_fixed (type, value);
2220 /* Propagate overflow flags. */
2221 if (overflow_p | TREE_OVERFLOW (arg1))
2222 TREE_OVERFLOW (t) = 1;
2223 return t;
2226 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2227 type TYPE. If no simplification can be done return NULL_TREE. */
2229 static tree
2230 fold_convert_const (enum tree_code code, tree type, tree arg1)
2232 tree arg_type = TREE_TYPE (arg1);
2233 if (arg_type == type)
2234 return arg1;
2236 /* We can't widen types, since the runtime value could overflow the
2237 original type before being extended to the new type. */
2238 if (POLY_INT_CST_P (arg1)
2239 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2240 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2241 return build_poly_int_cst (type,
2242 poly_wide_int::from (poly_int_cst_value (arg1),
2243 TYPE_PRECISION (type),
2244 TYPE_SIGN (arg_type)));
2246 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2247 || TREE_CODE (type) == OFFSET_TYPE)
2249 if (TREE_CODE (arg1) == INTEGER_CST)
2250 return fold_convert_const_int_from_int (type, arg1);
2251 else if (TREE_CODE (arg1) == REAL_CST)
2252 return fold_convert_const_int_from_real (code, type, arg1);
2253 else if (TREE_CODE (arg1) == FIXED_CST)
2254 return fold_convert_const_int_from_fixed (type, arg1);
2256 else if (TREE_CODE (type) == REAL_TYPE)
2258 if (TREE_CODE (arg1) == INTEGER_CST)
2259 return build_real_from_int_cst (type, arg1);
2260 else if (TREE_CODE (arg1) == REAL_CST)
2261 return fold_convert_const_real_from_real (type, arg1);
2262 else if (TREE_CODE (arg1) == FIXED_CST)
2263 return fold_convert_const_real_from_fixed (type, arg1);
2265 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2267 if (TREE_CODE (arg1) == FIXED_CST)
2268 return fold_convert_const_fixed_from_fixed (type, arg1);
2269 else if (TREE_CODE (arg1) == INTEGER_CST)
2270 return fold_convert_const_fixed_from_int (type, arg1);
2271 else if (TREE_CODE (arg1) == REAL_CST)
2272 return fold_convert_const_fixed_from_real (type, arg1);
2274 else if (TREE_CODE (type) == VECTOR_TYPE)
2276 if (TREE_CODE (arg1) == VECTOR_CST
2277 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2279 tree elttype = TREE_TYPE (type);
2280 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2281 /* We can't handle steps directly when extending, since the
2282 values need to wrap at the original precision first. */
2283 bool step_ok_p
2284 = (INTEGRAL_TYPE_P (elttype)
2285 && INTEGRAL_TYPE_P (arg1_elttype)
2286 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2287 tree_vector_builder v;
2288 if (!v.new_unary_operation (type, arg1, step_ok_p))
2289 return NULL_TREE;
2290 unsigned int len = v.encoded_nelts ();
2291 for (unsigned int i = 0; i < len; ++i)
2293 tree elt = VECTOR_CST_ELT (arg1, i);
2294 tree cvt = fold_convert_const (code, elttype, elt);
2295 if (cvt == NULL_TREE)
2296 return NULL_TREE;
2297 v.quick_push (cvt);
2299 return v.build ();
2302 return NULL_TREE;
2305 /* Construct a vector of zero elements of vector type TYPE. */
2307 static tree
2308 build_zero_vector (tree type)
2310 tree t;
2312 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2313 return build_vector_from_val (type, t);
2316 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2318 bool
2319 fold_convertible_p (const_tree type, const_tree arg)
2321 tree orig = TREE_TYPE (arg);
2323 if (type == orig)
2324 return true;
2326 if (TREE_CODE (arg) == ERROR_MARK
2327 || TREE_CODE (type) == ERROR_MARK
2328 || TREE_CODE (orig) == ERROR_MARK)
2329 return false;
2331 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2332 return true;
2334 switch (TREE_CODE (type))
2336 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2337 case POINTER_TYPE: case REFERENCE_TYPE:
2338 case OFFSET_TYPE:
2339 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2340 || TREE_CODE (orig) == OFFSET_TYPE);
2342 case REAL_TYPE:
2343 case FIXED_POINT_TYPE:
2344 case VECTOR_TYPE:
2345 case VOID_TYPE:
2346 return TREE_CODE (type) == TREE_CODE (orig);
2348 default:
2349 return false;
2353 /* Convert expression ARG to type TYPE. Used by the middle-end for
2354 simple conversions in preference to calling the front-end's convert. */
2356 tree
2357 fold_convert_loc (location_t loc, tree type, tree arg)
2359 tree orig = TREE_TYPE (arg);
2360 tree tem;
2362 if (type == orig)
2363 return arg;
2365 if (TREE_CODE (arg) == ERROR_MARK
2366 || TREE_CODE (type) == ERROR_MARK
2367 || TREE_CODE (orig) == ERROR_MARK)
2368 return error_mark_node;
2370 switch (TREE_CODE (type))
2372 case POINTER_TYPE:
2373 case REFERENCE_TYPE:
2374 /* Handle conversions between pointers to different address spaces. */
2375 if (POINTER_TYPE_P (orig)
2376 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2377 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2378 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2379 /* fall through */
2381 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2382 case OFFSET_TYPE:
2383 if (TREE_CODE (arg) == INTEGER_CST)
2385 tem = fold_convert_const (NOP_EXPR, type, arg);
2386 if (tem != NULL_TREE)
2387 return tem;
2389 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2390 || TREE_CODE (orig) == OFFSET_TYPE)
2391 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2392 if (TREE_CODE (orig) == COMPLEX_TYPE)
2393 return fold_convert_loc (loc, type,
2394 fold_build1_loc (loc, REALPART_EXPR,
2395 TREE_TYPE (orig), arg));
2396 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2397 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2398 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2400 case REAL_TYPE:
2401 if (TREE_CODE (arg) == INTEGER_CST)
2403 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2404 if (tem != NULL_TREE)
2405 return tem;
2407 else if (TREE_CODE (arg) == REAL_CST)
2409 tem = fold_convert_const (NOP_EXPR, type, arg);
2410 if (tem != NULL_TREE)
2411 return tem;
2413 else if (TREE_CODE (arg) == FIXED_CST)
2415 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2416 if (tem != NULL_TREE)
2417 return tem;
2420 switch (TREE_CODE (orig))
2422 case INTEGER_TYPE:
2423 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2424 case POINTER_TYPE: case REFERENCE_TYPE:
2425 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2427 case REAL_TYPE:
2428 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2430 case FIXED_POINT_TYPE:
2431 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2433 case COMPLEX_TYPE:
2434 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2435 return fold_convert_loc (loc, type, tem);
2437 default:
2438 gcc_unreachable ();
2441 case FIXED_POINT_TYPE:
2442 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2443 || TREE_CODE (arg) == REAL_CST)
2445 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2446 if (tem != NULL_TREE)
2447 goto fold_convert_exit;
2450 switch (TREE_CODE (orig))
2452 case FIXED_POINT_TYPE:
2453 case INTEGER_TYPE:
2454 case ENUMERAL_TYPE:
2455 case BOOLEAN_TYPE:
2456 case REAL_TYPE:
2457 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2459 case COMPLEX_TYPE:
2460 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2461 return fold_convert_loc (loc, type, tem);
2463 default:
2464 gcc_unreachable ();
2467 case COMPLEX_TYPE:
2468 switch (TREE_CODE (orig))
2470 case INTEGER_TYPE:
2471 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2472 case POINTER_TYPE: case REFERENCE_TYPE:
2473 case REAL_TYPE:
2474 case FIXED_POINT_TYPE:
2475 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2476 fold_convert_loc (loc, TREE_TYPE (type), arg),
2477 fold_convert_loc (loc, TREE_TYPE (type),
2478 integer_zero_node));
2479 case COMPLEX_TYPE:
2481 tree rpart, ipart;
2483 if (TREE_CODE (arg) == COMPLEX_EXPR)
2485 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2486 TREE_OPERAND (arg, 0));
2487 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2488 TREE_OPERAND (arg, 1));
2489 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2492 arg = save_expr (arg);
2493 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2494 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2495 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2496 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2497 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2500 default:
2501 gcc_unreachable ();
2504 case VECTOR_TYPE:
2505 if (integer_zerop (arg))
2506 return build_zero_vector (type);
2507 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2508 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2509 || TREE_CODE (orig) == VECTOR_TYPE);
2510 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2512 case VOID_TYPE:
2513 tem = fold_ignored_result (arg);
2514 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2516 default:
2517 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2518 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2519 gcc_unreachable ();
2521 fold_convert_exit:
2522 protected_set_expr_location_unshare (tem, loc);
2523 return tem;
2526 /* Return false if expr can be assumed not to be an lvalue, true
2527 otherwise. */
2529 static bool
2530 maybe_lvalue_p (const_tree x)
2532 /* We only need to wrap lvalue tree codes. */
2533 switch (TREE_CODE (x))
2535 case VAR_DECL:
2536 case PARM_DECL:
2537 case RESULT_DECL:
2538 case LABEL_DECL:
2539 case FUNCTION_DECL:
2540 case SSA_NAME:
2542 case COMPONENT_REF:
2543 case MEM_REF:
2544 case INDIRECT_REF:
2545 case ARRAY_REF:
2546 case ARRAY_RANGE_REF:
2547 case BIT_FIELD_REF:
2548 case OBJ_TYPE_REF:
2550 case REALPART_EXPR:
2551 case IMAGPART_EXPR:
2552 case PREINCREMENT_EXPR:
2553 case PREDECREMENT_EXPR:
2554 case SAVE_EXPR:
2555 case TRY_CATCH_EXPR:
2556 case WITH_CLEANUP_EXPR:
2557 case COMPOUND_EXPR:
2558 case MODIFY_EXPR:
2559 case TARGET_EXPR:
2560 case COND_EXPR:
2561 case BIND_EXPR:
2562 break;
2564 default:
2565 /* Assume the worst for front-end tree codes. */
2566 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2567 break;
2568 return false;
2571 return true;
2574 /* Return an expr equal to X but certainly not valid as an lvalue. */
2576 tree
2577 non_lvalue_loc (location_t loc, tree x)
2579 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2580 us. */
2581 if (in_gimple_form)
2582 return x;
2584 if (! maybe_lvalue_p (x))
2585 return x;
2586 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2589 /* When pedantic, return an expr equal to X but certainly not valid as a
2590 pedantic lvalue. Otherwise, return X. */
2592 static tree
2593 pedantic_non_lvalue_loc (location_t loc, tree x)
2595 return protected_set_expr_location_unshare (x, loc);
2598 /* Given a tree comparison code, return the code that is the logical inverse.
2599 It is generally not safe to do this for floating-point comparisons, except
2600 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2601 ERROR_MARK in this case. */
2603 enum tree_code
2604 invert_tree_comparison (enum tree_code code, bool honor_nans)
2606 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2607 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2608 return ERROR_MARK;
2610 switch (code)
2612 case EQ_EXPR:
2613 return NE_EXPR;
2614 case NE_EXPR:
2615 return EQ_EXPR;
2616 case GT_EXPR:
2617 return honor_nans ? UNLE_EXPR : LE_EXPR;
2618 case GE_EXPR:
2619 return honor_nans ? UNLT_EXPR : LT_EXPR;
2620 case LT_EXPR:
2621 return honor_nans ? UNGE_EXPR : GE_EXPR;
2622 case LE_EXPR:
2623 return honor_nans ? UNGT_EXPR : GT_EXPR;
2624 case LTGT_EXPR:
2625 return UNEQ_EXPR;
2626 case UNEQ_EXPR:
2627 return LTGT_EXPR;
2628 case UNGT_EXPR:
2629 return LE_EXPR;
2630 case UNGE_EXPR:
2631 return LT_EXPR;
2632 case UNLT_EXPR:
2633 return GE_EXPR;
2634 case UNLE_EXPR:
2635 return GT_EXPR;
2636 case ORDERED_EXPR:
2637 return UNORDERED_EXPR;
2638 case UNORDERED_EXPR:
2639 return ORDERED_EXPR;
2640 default:
2641 gcc_unreachable ();
2645 /* Similar, but return the comparison that results if the operands are
2646 swapped. This is safe for floating-point. */
2648 enum tree_code
2649 swap_tree_comparison (enum tree_code code)
2651 switch (code)
2653 case EQ_EXPR:
2654 case NE_EXPR:
2655 case ORDERED_EXPR:
2656 case UNORDERED_EXPR:
2657 case LTGT_EXPR:
2658 case UNEQ_EXPR:
2659 return code;
2660 case GT_EXPR:
2661 return LT_EXPR;
2662 case GE_EXPR:
2663 return LE_EXPR;
2664 case LT_EXPR:
2665 return GT_EXPR;
2666 case LE_EXPR:
2667 return GE_EXPR;
2668 case UNGT_EXPR:
2669 return UNLT_EXPR;
2670 case UNGE_EXPR:
2671 return UNLE_EXPR;
2672 case UNLT_EXPR:
2673 return UNGT_EXPR;
2674 case UNLE_EXPR:
2675 return UNGE_EXPR;
2676 default:
2677 gcc_unreachable ();
2682 /* Convert a comparison tree code from an enum tree_code representation
2683 into a compcode bit-based encoding. This function is the inverse of
2684 compcode_to_comparison. */
2686 static enum comparison_code
2687 comparison_to_compcode (enum tree_code code)
2689 switch (code)
2691 case LT_EXPR:
2692 return COMPCODE_LT;
2693 case EQ_EXPR:
2694 return COMPCODE_EQ;
2695 case LE_EXPR:
2696 return COMPCODE_LE;
2697 case GT_EXPR:
2698 return COMPCODE_GT;
2699 case NE_EXPR:
2700 return COMPCODE_NE;
2701 case GE_EXPR:
2702 return COMPCODE_GE;
2703 case ORDERED_EXPR:
2704 return COMPCODE_ORD;
2705 case UNORDERED_EXPR:
2706 return COMPCODE_UNORD;
2707 case UNLT_EXPR:
2708 return COMPCODE_UNLT;
2709 case UNEQ_EXPR:
2710 return COMPCODE_UNEQ;
2711 case UNLE_EXPR:
2712 return COMPCODE_UNLE;
2713 case UNGT_EXPR:
2714 return COMPCODE_UNGT;
2715 case LTGT_EXPR:
2716 return COMPCODE_LTGT;
2717 case UNGE_EXPR:
2718 return COMPCODE_UNGE;
2719 default:
2720 gcc_unreachable ();
2724 /* Convert a compcode bit-based encoding of a comparison operator back
2725 to GCC's enum tree_code representation. This function is the
2726 inverse of comparison_to_compcode. */
2728 static enum tree_code
2729 compcode_to_comparison (enum comparison_code code)
2731 switch (code)
2733 case COMPCODE_LT:
2734 return LT_EXPR;
2735 case COMPCODE_EQ:
2736 return EQ_EXPR;
2737 case COMPCODE_LE:
2738 return LE_EXPR;
2739 case COMPCODE_GT:
2740 return GT_EXPR;
2741 case COMPCODE_NE:
2742 return NE_EXPR;
2743 case COMPCODE_GE:
2744 return GE_EXPR;
2745 case COMPCODE_ORD:
2746 return ORDERED_EXPR;
2747 case COMPCODE_UNORD:
2748 return UNORDERED_EXPR;
2749 case COMPCODE_UNLT:
2750 return UNLT_EXPR;
2751 case COMPCODE_UNEQ:
2752 return UNEQ_EXPR;
2753 case COMPCODE_UNLE:
2754 return UNLE_EXPR;
2755 case COMPCODE_UNGT:
2756 return UNGT_EXPR;
2757 case COMPCODE_LTGT:
2758 return LTGT_EXPR;
2759 case COMPCODE_UNGE:
2760 return UNGE_EXPR;
2761 default:
2762 gcc_unreachable ();
2766 /* Return a tree for the comparison which is the combination of
2767 doing the AND or OR (depending on CODE) of the two operations LCODE
2768 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2769 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2770 if this makes the transformation invalid. */
2772 tree
2773 combine_comparisons (location_t loc,
2774 enum tree_code code, enum tree_code lcode,
2775 enum tree_code rcode, tree truth_type,
2776 tree ll_arg, tree lr_arg)
2778 bool honor_nans = HONOR_NANS (ll_arg);
2779 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2780 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2781 int compcode;
2783 switch (code)
2785 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2786 compcode = lcompcode & rcompcode;
2787 break;
2789 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2790 compcode = lcompcode | rcompcode;
2791 break;
2793 default:
2794 return NULL_TREE;
2797 if (!honor_nans)
2799 /* Eliminate unordered comparisons, as well as LTGT and ORD
2800 which are not used unless the mode has NaNs. */
2801 compcode &= ~COMPCODE_UNORD;
2802 if (compcode == COMPCODE_LTGT)
2803 compcode = COMPCODE_NE;
2804 else if (compcode == COMPCODE_ORD)
2805 compcode = COMPCODE_TRUE;
2807 else if (flag_trapping_math)
2809 /* Check that the original operation and the optimized ones will trap
2810 under the same condition. */
2811 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2812 && (lcompcode != COMPCODE_EQ)
2813 && (lcompcode != COMPCODE_ORD);
2814 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2815 && (rcompcode != COMPCODE_EQ)
2816 && (rcompcode != COMPCODE_ORD);
2817 bool trap = (compcode & COMPCODE_UNORD) == 0
2818 && (compcode != COMPCODE_EQ)
2819 && (compcode != COMPCODE_ORD);
2821 /* In a short-circuited boolean expression the LHS might be
2822 such that the RHS, if evaluated, will never trap. For
2823 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2824 if neither x nor y is NaN. (This is a mixed blessing: for
2825 example, the expression above will never trap, hence
2826 optimizing it to x < y would be invalid). */
2827 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2828 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2829 rtrap = false;
2831 /* If the comparison was short-circuited, and only the RHS
2832 trapped, we may now generate a spurious trap. */
2833 if (rtrap && !ltrap
2834 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2835 return NULL_TREE;
2837 /* If we changed the conditions that cause a trap, we lose. */
2838 if ((ltrap || rtrap) != trap)
2839 return NULL_TREE;
2842 if (compcode == COMPCODE_TRUE)
2843 return constant_boolean_node (true, truth_type);
2844 else if (compcode == COMPCODE_FALSE)
2845 return constant_boolean_node (false, truth_type);
2846 else
2848 enum tree_code tcode;
2850 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2851 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2855 /* Return nonzero if two operands (typically of the same tree node)
2856 are necessarily equal. FLAGS modifies behavior as follows:
2858 If OEP_ONLY_CONST is set, only return nonzero for constants.
2859 This function tests whether the operands are indistinguishable;
2860 it does not test whether they are equal using C's == operation.
2861 The distinction is important for IEEE floating point, because
2862 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2863 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2865 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2866 even though it may hold multiple values during a function.
2867 This is because a GCC tree node guarantees that nothing else is
2868 executed between the evaluation of its "operands" (which may often
2869 be evaluated in arbitrary order). Hence if the operands themselves
2870 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2871 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2872 unset means assuming isochronic (or instantaneous) tree equivalence.
2873 Unless comparing arbitrary expression trees, such as from different
2874 statements, this flag can usually be left unset.
2876 If OEP_PURE_SAME is set, then pure functions with identical arguments
2877 are considered the same. It is used when the caller has other ways
2878 to ensure that global memory is unchanged in between.
2880 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2881 not values of expressions.
2883 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2884 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2886 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2887 any operand with side effect. This is unnecesarily conservative in the
2888 case we know that arg0 and arg1 are in disjoint code paths (such as in
2889 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2890 addresses with TREE_CONSTANT flag set so we know that &var == &var
2891 even if var is volatile. */
2894 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2896 /* When checking, verify at the outermost operand_equal_p call that
2897 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2898 hash value. */
2899 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2901 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2903 if (arg0 != arg1)
2905 inchash::hash hstate0 (0), hstate1 (0);
2906 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2907 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2908 hashval_t h0 = hstate0.end ();
2909 hashval_t h1 = hstate1.end ();
2910 gcc_assert (h0 == h1);
2912 return 1;
2914 else
2915 return 0;
2918 /* If either is ERROR_MARK, they aren't equal. */
2919 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2920 || TREE_TYPE (arg0) == error_mark_node
2921 || TREE_TYPE (arg1) == error_mark_node)
2922 return 0;
2924 /* Similar, if either does not have a type (like a released SSA name),
2925 they aren't equal. */
2926 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2927 return 0;
2929 /* We cannot consider pointers to different address space equal. */
2930 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2931 && POINTER_TYPE_P (TREE_TYPE (arg1))
2932 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2933 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2934 return 0;
2936 /* Check equality of integer constants before bailing out due to
2937 precision differences. */
2938 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2940 /* Address of INTEGER_CST is not defined; check that we did not forget
2941 to drop the OEP_ADDRESS_OF flags. */
2942 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2943 return tree_int_cst_equal (arg0, arg1);
2946 if (!(flags & OEP_ADDRESS_OF))
2948 /* If both types don't have the same signedness, then we can't consider
2949 them equal. We must check this before the STRIP_NOPS calls
2950 because they may change the signedness of the arguments. As pointers
2951 strictly don't have a signedness, require either two pointers or
2952 two non-pointers as well. */
2953 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2954 || POINTER_TYPE_P (TREE_TYPE (arg0))
2955 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2956 return 0;
2958 /* If both types don't have the same precision, then it is not safe
2959 to strip NOPs. */
2960 if (element_precision (TREE_TYPE (arg0))
2961 != element_precision (TREE_TYPE (arg1)))
2962 return 0;
2964 STRIP_NOPS (arg0);
2965 STRIP_NOPS (arg1);
2967 #if 0
2968 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2969 sanity check once the issue is solved. */
2970 else
2971 /* Addresses of conversions and SSA_NAMEs (and many other things)
2972 are not defined. Check that we did not forget to drop the
2973 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2974 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2975 && TREE_CODE (arg0) != SSA_NAME);
2976 #endif
2978 /* In case both args are comparisons but with different comparison
2979 code, try to swap the comparison operands of one arg to produce
2980 a match and compare that variant. */
2981 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2982 && COMPARISON_CLASS_P (arg0)
2983 && COMPARISON_CLASS_P (arg1))
2985 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2987 if (TREE_CODE (arg0) == swap_code)
2988 return operand_equal_p (TREE_OPERAND (arg0, 0),
2989 TREE_OPERAND (arg1, 1), flags)
2990 && operand_equal_p (TREE_OPERAND (arg0, 1),
2991 TREE_OPERAND (arg1, 0), flags);
2994 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2996 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2997 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2999 else if (flags & OEP_ADDRESS_OF)
3001 /* If we are interested in comparing addresses ignore
3002 MEM_REF wrappings of the base that can appear just for
3003 TBAA reasons. */
3004 if (TREE_CODE (arg0) == MEM_REF
3005 && DECL_P (arg1)
3006 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3007 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3008 && integer_zerop (TREE_OPERAND (arg0, 1)))
3009 return 1;
3010 else if (TREE_CODE (arg1) == MEM_REF
3011 && DECL_P (arg0)
3012 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3013 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3014 && integer_zerop (TREE_OPERAND (arg1, 1)))
3015 return 1;
3016 return 0;
3018 else
3019 return 0;
3022 /* When not checking adddresses, this is needed for conversions and for
3023 COMPONENT_REF. Might as well play it safe and always test this. */
3024 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3025 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3026 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3027 && !(flags & OEP_ADDRESS_OF)))
3028 return 0;
3030 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3031 We don't care about side effects in that case because the SAVE_EXPR
3032 takes care of that for us. In all other cases, two expressions are
3033 equal if they have no side effects. If we have two identical
3034 expressions with side effects that should be treated the same due
3035 to the only side effects being identical SAVE_EXPR's, that will
3036 be detected in the recursive calls below.
3037 If we are taking an invariant address of two identical objects
3038 they are necessarily equal as well. */
3039 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3040 && (TREE_CODE (arg0) == SAVE_EXPR
3041 || (flags & OEP_MATCH_SIDE_EFFECTS)
3042 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3043 return 1;
3045 /* Next handle constant cases, those for which we can return 1 even
3046 if ONLY_CONST is set. */
3047 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3048 switch (TREE_CODE (arg0))
3050 case INTEGER_CST:
3051 return tree_int_cst_equal (arg0, arg1);
3053 case FIXED_CST:
3054 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3055 TREE_FIXED_CST (arg1));
3057 case REAL_CST:
3058 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3059 return 1;
3062 if (!HONOR_SIGNED_ZEROS (arg0))
3064 /* If we do not distinguish between signed and unsigned zero,
3065 consider them equal. */
3066 if (real_zerop (arg0) && real_zerop (arg1))
3067 return 1;
3069 return 0;
3071 case VECTOR_CST:
3073 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3074 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3075 return 0;
3077 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3078 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3079 return 0;
3081 unsigned int count = vector_cst_encoded_nelts (arg0);
3082 for (unsigned int i = 0; i < count; ++i)
3083 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3084 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3085 return 0;
3086 return 1;
3089 case COMPLEX_CST:
3090 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3091 flags)
3092 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3093 flags));
3095 case STRING_CST:
3096 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3097 && ! memcmp (TREE_STRING_POINTER (arg0),
3098 TREE_STRING_POINTER (arg1),
3099 TREE_STRING_LENGTH (arg0)));
3101 case ADDR_EXPR:
3102 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3103 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3104 flags | OEP_ADDRESS_OF
3105 | OEP_MATCH_SIDE_EFFECTS);
3106 case CONSTRUCTOR:
3107 /* In GIMPLE empty constructors are allowed in initializers of
3108 aggregates. */
3109 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3110 default:
3111 break;
3114 if (flags & OEP_ONLY_CONST)
3115 return 0;
3117 /* Define macros to test an operand from arg0 and arg1 for equality and a
3118 variant that allows null and views null as being different from any
3119 non-null value. In the latter case, if either is null, the both
3120 must be; otherwise, do the normal comparison. */
3121 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3122 TREE_OPERAND (arg1, N), flags)
3124 #define OP_SAME_WITH_NULL(N) \
3125 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3126 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3128 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3130 case tcc_unary:
3131 /* Two conversions are equal only if signedness and modes match. */
3132 switch (TREE_CODE (arg0))
3134 CASE_CONVERT:
3135 case FIX_TRUNC_EXPR:
3136 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3137 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3138 return 0;
3139 break;
3140 default:
3141 break;
3144 return OP_SAME (0);
3147 case tcc_comparison:
3148 case tcc_binary:
3149 if (OP_SAME (0) && OP_SAME (1))
3150 return 1;
3152 /* For commutative ops, allow the other order. */
3153 return (commutative_tree_code (TREE_CODE (arg0))
3154 && operand_equal_p (TREE_OPERAND (arg0, 0),
3155 TREE_OPERAND (arg1, 1), flags)
3156 && operand_equal_p (TREE_OPERAND (arg0, 1),
3157 TREE_OPERAND (arg1, 0), flags));
3159 case tcc_reference:
3160 /* If either of the pointer (or reference) expressions we are
3161 dereferencing contain a side effect, these cannot be equal,
3162 but their addresses can be. */
3163 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3164 && (TREE_SIDE_EFFECTS (arg0)
3165 || TREE_SIDE_EFFECTS (arg1)))
3166 return 0;
3168 switch (TREE_CODE (arg0))
3170 case INDIRECT_REF:
3171 if (!(flags & OEP_ADDRESS_OF)
3172 && (TYPE_ALIGN (TREE_TYPE (arg0))
3173 != TYPE_ALIGN (TREE_TYPE (arg1))))
3174 return 0;
3175 flags &= ~OEP_ADDRESS_OF;
3176 return OP_SAME (0);
3178 case IMAGPART_EXPR:
3179 /* Require the same offset. */
3180 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3181 TYPE_SIZE (TREE_TYPE (arg1)),
3182 flags & ~OEP_ADDRESS_OF))
3183 return 0;
3185 /* Fallthru. */
3186 case REALPART_EXPR:
3187 case VIEW_CONVERT_EXPR:
3188 return OP_SAME (0);
3190 case TARGET_MEM_REF:
3191 case MEM_REF:
3192 if (!(flags & OEP_ADDRESS_OF))
3194 /* Require equal access sizes */
3195 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3196 && (!TYPE_SIZE (TREE_TYPE (arg0))
3197 || !TYPE_SIZE (TREE_TYPE (arg1))
3198 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3199 TYPE_SIZE (TREE_TYPE (arg1)),
3200 flags)))
3201 return 0;
3202 /* Verify that access happens in similar types. */
3203 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3204 return 0;
3205 /* Verify that accesses are TBAA compatible. */
3206 if (!alias_ptr_types_compatible_p
3207 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3208 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3209 || (MR_DEPENDENCE_CLIQUE (arg0)
3210 != MR_DEPENDENCE_CLIQUE (arg1))
3211 || (MR_DEPENDENCE_BASE (arg0)
3212 != MR_DEPENDENCE_BASE (arg1)))
3213 return 0;
3214 /* Verify that alignment is compatible. */
3215 if (TYPE_ALIGN (TREE_TYPE (arg0))
3216 != TYPE_ALIGN (TREE_TYPE (arg1)))
3217 return 0;
3219 flags &= ~OEP_ADDRESS_OF;
3220 return (OP_SAME (0) && OP_SAME (1)
3221 /* TARGET_MEM_REF require equal extra operands. */
3222 && (TREE_CODE (arg0) != TARGET_MEM_REF
3223 || (OP_SAME_WITH_NULL (2)
3224 && OP_SAME_WITH_NULL (3)
3225 && OP_SAME_WITH_NULL (4))));
3227 case ARRAY_REF:
3228 case ARRAY_RANGE_REF:
3229 if (!OP_SAME (0))
3230 return 0;
3231 flags &= ~OEP_ADDRESS_OF;
3232 /* Compare the array index by value if it is constant first as we
3233 may have different types but same value here. */
3234 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3235 TREE_OPERAND (arg1, 1))
3236 || OP_SAME (1))
3237 && OP_SAME_WITH_NULL (2)
3238 && OP_SAME_WITH_NULL (3)
3239 /* Compare low bound and element size as with OEP_ADDRESS_OF
3240 we have to account for the offset of the ref. */
3241 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3242 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3243 || (operand_equal_p (array_ref_low_bound
3244 (CONST_CAST_TREE (arg0)),
3245 array_ref_low_bound
3246 (CONST_CAST_TREE (arg1)), flags)
3247 && operand_equal_p (array_ref_element_size
3248 (CONST_CAST_TREE (arg0)),
3249 array_ref_element_size
3250 (CONST_CAST_TREE (arg1)),
3251 flags))));
3253 case COMPONENT_REF:
3254 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3255 may be NULL when we're called to compare MEM_EXPRs. */
3256 if (!OP_SAME_WITH_NULL (0)
3257 || !OP_SAME (1))
3258 return 0;
3259 flags &= ~OEP_ADDRESS_OF;
3260 return OP_SAME_WITH_NULL (2);
3262 case BIT_FIELD_REF:
3263 if (!OP_SAME (0))
3264 return 0;
3265 flags &= ~OEP_ADDRESS_OF;
3266 return OP_SAME (1) && OP_SAME (2);
3268 default:
3269 return 0;
3272 case tcc_expression:
3273 switch (TREE_CODE (arg0))
3275 case ADDR_EXPR:
3276 /* Be sure we pass right ADDRESS_OF flag. */
3277 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3278 return operand_equal_p (TREE_OPERAND (arg0, 0),
3279 TREE_OPERAND (arg1, 0),
3280 flags | OEP_ADDRESS_OF);
3282 case TRUTH_NOT_EXPR:
3283 return OP_SAME (0);
3285 case TRUTH_ANDIF_EXPR:
3286 case TRUTH_ORIF_EXPR:
3287 return OP_SAME (0) && OP_SAME (1);
3289 case FMA_EXPR:
3290 case WIDEN_MULT_PLUS_EXPR:
3291 case WIDEN_MULT_MINUS_EXPR:
3292 if (!OP_SAME (2))
3293 return 0;
3294 /* The multiplcation operands are commutative. */
3295 /* FALLTHRU */
3297 case TRUTH_AND_EXPR:
3298 case TRUTH_OR_EXPR:
3299 case TRUTH_XOR_EXPR:
3300 if (OP_SAME (0) && OP_SAME (1))
3301 return 1;
3303 /* Otherwise take into account this is a commutative operation. */
3304 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3305 TREE_OPERAND (arg1, 1), flags)
3306 && operand_equal_p (TREE_OPERAND (arg0, 1),
3307 TREE_OPERAND (arg1, 0), flags));
3309 case COND_EXPR:
3310 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3311 return 0;
3312 flags &= ~OEP_ADDRESS_OF;
3313 return OP_SAME (0);
3315 case BIT_INSERT_EXPR:
3316 /* BIT_INSERT_EXPR has an implict operand as the type precision
3317 of op1. Need to check to make sure they are the same. */
3318 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3319 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3320 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3321 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3322 return false;
3323 /* FALLTHRU */
3325 case VEC_COND_EXPR:
3326 case DOT_PROD_EXPR:
3327 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3329 case MODIFY_EXPR:
3330 case INIT_EXPR:
3331 case COMPOUND_EXPR:
3332 case PREDECREMENT_EXPR:
3333 case PREINCREMENT_EXPR:
3334 case POSTDECREMENT_EXPR:
3335 case POSTINCREMENT_EXPR:
3336 if (flags & OEP_LEXICOGRAPHIC)
3337 return OP_SAME (0) && OP_SAME (1);
3338 return 0;
3340 case CLEANUP_POINT_EXPR:
3341 case EXPR_STMT:
3342 if (flags & OEP_LEXICOGRAPHIC)
3343 return OP_SAME (0);
3344 return 0;
3346 default:
3347 return 0;
3350 case tcc_vl_exp:
3351 switch (TREE_CODE (arg0))
3353 case CALL_EXPR:
3354 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3355 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3356 /* If not both CALL_EXPRs are either internal or normal function
3357 functions, then they are not equal. */
3358 return 0;
3359 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3361 /* If the CALL_EXPRs call different internal functions, then they
3362 are not equal. */
3363 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3364 return 0;
3366 else
3368 /* If the CALL_EXPRs call different functions, then they are not
3369 equal. */
3370 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3371 flags))
3372 return 0;
3375 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3377 unsigned int cef = call_expr_flags (arg0);
3378 if (flags & OEP_PURE_SAME)
3379 cef &= ECF_CONST | ECF_PURE;
3380 else
3381 cef &= ECF_CONST;
3382 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3383 return 0;
3386 /* Now see if all the arguments are the same. */
3388 const_call_expr_arg_iterator iter0, iter1;
3389 const_tree a0, a1;
3390 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3391 a1 = first_const_call_expr_arg (arg1, &iter1);
3392 a0 && a1;
3393 a0 = next_const_call_expr_arg (&iter0),
3394 a1 = next_const_call_expr_arg (&iter1))
3395 if (! operand_equal_p (a0, a1, flags))
3396 return 0;
3398 /* If we get here and both argument lists are exhausted
3399 then the CALL_EXPRs are equal. */
3400 return ! (a0 || a1);
3402 default:
3403 return 0;
3406 case tcc_declaration:
3407 /* Consider __builtin_sqrt equal to sqrt. */
3408 return (TREE_CODE (arg0) == FUNCTION_DECL
3409 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3410 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3411 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3413 case tcc_exceptional:
3414 if (TREE_CODE (arg0) == CONSTRUCTOR)
3416 /* In GIMPLE constructors are used only to build vectors from
3417 elements. Individual elements in the constructor must be
3418 indexed in increasing order and form an initial sequence.
3420 We make no effort to compare constructors in generic.
3421 (see sem_variable::equals in ipa-icf which can do so for
3422 constants). */
3423 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3424 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3425 return 0;
3427 /* Be sure that vectors constructed have the same representation.
3428 We only tested element precision and modes to match.
3429 Vectors may be BLKmode and thus also check that the number of
3430 parts match. */
3431 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3432 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3433 return 0;
3435 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3436 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3437 unsigned int len = vec_safe_length (v0);
3439 if (len != vec_safe_length (v1))
3440 return 0;
3442 for (unsigned int i = 0; i < len; i++)
3444 constructor_elt *c0 = &(*v0)[i];
3445 constructor_elt *c1 = &(*v1)[i];
3447 if (!operand_equal_p (c0->value, c1->value, flags)
3448 /* In GIMPLE the indexes can be either NULL or matching i.
3449 Double check this so we won't get false
3450 positives for GENERIC. */
3451 || (c0->index
3452 && (TREE_CODE (c0->index) != INTEGER_CST
3453 || !compare_tree_int (c0->index, i)))
3454 || (c1->index
3455 && (TREE_CODE (c1->index) != INTEGER_CST
3456 || !compare_tree_int (c1->index, i))))
3457 return 0;
3459 return 1;
3461 else if (TREE_CODE (arg0) == STATEMENT_LIST
3462 && (flags & OEP_LEXICOGRAPHIC))
3464 /* Compare the STATEMENT_LISTs. */
3465 tree_stmt_iterator tsi1, tsi2;
3466 tree body1 = CONST_CAST_TREE (arg0);
3467 tree body2 = CONST_CAST_TREE (arg1);
3468 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3469 tsi_next (&tsi1), tsi_next (&tsi2))
3471 /* The lists don't have the same number of statements. */
3472 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3473 return 0;
3474 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3475 return 1;
3476 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3477 OEP_LEXICOGRAPHIC))
3478 return 0;
3481 return 0;
3483 case tcc_statement:
3484 switch (TREE_CODE (arg0))
3486 case RETURN_EXPR:
3487 if (flags & OEP_LEXICOGRAPHIC)
3488 return OP_SAME_WITH_NULL (0);
3489 return 0;
3490 default:
3491 return 0;
3494 default:
3495 return 0;
3498 #undef OP_SAME
3499 #undef OP_SAME_WITH_NULL
3502 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3503 with a different signedness or a narrower precision. */
3505 static bool
3506 operand_equal_for_comparison_p (tree arg0, tree arg1)
3508 if (operand_equal_p (arg0, arg1, 0))
3509 return true;
3511 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3512 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3513 return false;
3515 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3516 and see if the inner values are the same. This removes any
3517 signedness comparison, which doesn't matter here. */
3518 tree op0 = arg0;
3519 tree op1 = arg1;
3520 STRIP_NOPS (op0);
3521 STRIP_NOPS (op1);
3522 if (operand_equal_p (op0, op1, 0))
3523 return true;
3525 /* Discard a single widening conversion from ARG1 and see if the inner
3526 value is the same as ARG0. */
3527 if (CONVERT_EXPR_P (arg1)
3528 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3529 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3530 < TYPE_PRECISION (TREE_TYPE (arg1))
3531 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3532 return true;
3534 return false;
3537 /* See if ARG is an expression that is either a comparison or is performing
3538 arithmetic on comparisons. The comparisons must only be comparing
3539 two different values, which will be stored in *CVAL1 and *CVAL2; if
3540 they are nonzero it means that some operands have already been found.
3541 No variables may be used anywhere else in the expression except in the
3542 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3543 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3545 If this is true, return 1. Otherwise, return zero. */
3547 static int
3548 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3550 enum tree_code code = TREE_CODE (arg);
3551 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3553 /* We can handle some of the tcc_expression cases here. */
3554 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3555 tclass = tcc_unary;
3556 else if (tclass == tcc_expression
3557 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3558 || code == COMPOUND_EXPR))
3559 tclass = tcc_binary;
3561 else if (tclass == tcc_expression && code == SAVE_EXPR
3562 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3564 /* If we've already found a CVAL1 or CVAL2, this expression is
3565 two complex to handle. */
3566 if (*cval1 || *cval2)
3567 return 0;
3569 tclass = tcc_unary;
3570 *save_p = 1;
3573 switch (tclass)
3575 case tcc_unary:
3576 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3578 case tcc_binary:
3579 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3580 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3581 cval1, cval2, save_p));
3583 case tcc_constant:
3584 return 1;
3586 case tcc_expression:
3587 if (code == COND_EXPR)
3588 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3589 cval1, cval2, save_p)
3590 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3591 cval1, cval2, save_p)
3592 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3593 cval1, cval2, save_p));
3594 return 0;
3596 case tcc_comparison:
3597 /* First see if we can handle the first operand, then the second. For
3598 the second operand, we know *CVAL1 can't be zero. It must be that
3599 one side of the comparison is each of the values; test for the
3600 case where this isn't true by failing if the two operands
3601 are the same. */
3603 if (operand_equal_p (TREE_OPERAND (arg, 0),
3604 TREE_OPERAND (arg, 1), 0))
3605 return 0;
3607 if (*cval1 == 0)
3608 *cval1 = TREE_OPERAND (arg, 0);
3609 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3611 else if (*cval2 == 0)
3612 *cval2 = TREE_OPERAND (arg, 0);
3613 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3615 else
3616 return 0;
3618 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3620 else if (*cval2 == 0)
3621 *cval2 = TREE_OPERAND (arg, 1);
3622 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3624 else
3625 return 0;
3627 return 1;
3629 default:
3630 return 0;
3634 /* ARG is a tree that is known to contain just arithmetic operations and
3635 comparisons. Evaluate the operations in the tree substituting NEW0 for
3636 any occurrence of OLD0 as an operand of a comparison and likewise for
3637 NEW1 and OLD1. */
3639 static tree
3640 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3641 tree old1, tree new1)
3643 tree type = TREE_TYPE (arg);
3644 enum tree_code code = TREE_CODE (arg);
3645 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3647 /* We can handle some of the tcc_expression cases here. */
3648 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3649 tclass = tcc_unary;
3650 else if (tclass == tcc_expression
3651 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3652 tclass = tcc_binary;
3654 switch (tclass)
3656 case tcc_unary:
3657 return fold_build1_loc (loc, code, type,
3658 eval_subst (loc, TREE_OPERAND (arg, 0),
3659 old0, new0, old1, new1));
3661 case tcc_binary:
3662 return fold_build2_loc (loc, code, type,
3663 eval_subst (loc, TREE_OPERAND (arg, 0),
3664 old0, new0, old1, new1),
3665 eval_subst (loc, TREE_OPERAND (arg, 1),
3666 old0, new0, old1, new1));
3668 case tcc_expression:
3669 switch (code)
3671 case SAVE_EXPR:
3672 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3673 old1, new1);
3675 case COMPOUND_EXPR:
3676 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3677 old1, new1);
3679 case COND_EXPR:
3680 return fold_build3_loc (loc, code, type,
3681 eval_subst (loc, TREE_OPERAND (arg, 0),
3682 old0, new0, old1, new1),
3683 eval_subst (loc, TREE_OPERAND (arg, 1),
3684 old0, new0, old1, new1),
3685 eval_subst (loc, TREE_OPERAND (arg, 2),
3686 old0, new0, old1, new1));
3687 default:
3688 break;
3690 /* Fall through - ??? */
3692 case tcc_comparison:
3694 tree arg0 = TREE_OPERAND (arg, 0);
3695 tree arg1 = TREE_OPERAND (arg, 1);
3697 /* We need to check both for exact equality and tree equality. The
3698 former will be true if the operand has a side-effect. In that
3699 case, we know the operand occurred exactly once. */
3701 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3702 arg0 = new0;
3703 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3704 arg0 = new1;
3706 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3707 arg1 = new0;
3708 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3709 arg1 = new1;
3711 return fold_build2_loc (loc, code, type, arg0, arg1);
3714 default:
3715 return arg;
3719 /* Return a tree for the case when the result of an expression is RESULT
3720 converted to TYPE and OMITTED was previously an operand of the expression
3721 but is now not needed (e.g., we folded OMITTED * 0).
3723 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3724 the conversion of RESULT to TYPE. */
3726 tree
3727 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3729 tree t = fold_convert_loc (loc, type, result);
3731 /* If the resulting operand is an empty statement, just return the omitted
3732 statement casted to void. */
3733 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3734 return build1_loc (loc, NOP_EXPR, void_type_node,
3735 fold_ignored_result (omitted));
3737 if (TREE_SIDE_EFFECTS (omitted))
3738 return build2_loc (loc, COMPOUND_EXPR, type,
3739 fold_ignored_result (omitted), t);
3741 return non_lvalue_loc (loc, t);
3744 /* Return a tree for the case when the result of an expression is RESULT
3745 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3746 of the expression but are now not needed.
3748 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3749 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3750 evaluated before OMITTED2. Otherwise, if neither has side effects,
3751 just do the conversion of RESULT to TYPE. */
3753 tree
3754 omit_two_operands_loc (location_t loc, tree type, tree result,
3755 tree omitted1, tree omitted2)
3757 tree t = fold_convert_loc (loc, type, result);
3759 if (TREE_SIDE_EFFECTS (omitted2))
3760 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3761 if (TREE_SIDE_EFFECTS (omitted1))
3762 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3764 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3768 /* Return a simplified tree node for the truth-negation of ARG. This
3769 never alters ARG itself. We assume that ARG is an operation that
3770 returns a truth value (0 or 1).
3772 FIXME: one would think we would fold the result, but it causes
3773 problems with the dominator optimizer. */
3775 static tree
3776 fold_truth_not_expr (location_t loc, tree arg)
3778 tree type = TREE_TYPE (arg);
3779 enum tree_code code = TREE_CODE (arg);
3780 location_t loc1, loc2;
3782 /* If this is a comparison, we can simply invert it, except for
3783 floating-point non-equality comparisons, in which case we just
3784 enclose a TRUTH_NOT_EXPR around what we have. */
3786 if (TREE_CODE_CLASS (code) == tcc_comparison)
3788 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3789 if (FLOAT_TYPE_P (op_type)
3790 && flag_trapping_math
3791 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3792 && code != NE_EXPR && code != EQ_EXPR)
3793 return NULL_TREE;
3795 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3796 if (code == ERROR_MARK)
3797 return NULL_TREE;
3799 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3800 TREE_OPERAND (arg, 1));
3801 if (TREE_NO_WARNING (arg))
3802 TREE_NO_WARNING (ret) = 1;
3803 return ret;
3806 switch (code)
3808 case INTEGER_CST:
3809 return constant_boolean_node (integer_zerop (arg), type);
3811 case TRUTH_AND_EXPR:
3812 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3813 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3814 return build2_loc (loc, TRUTH_OR_EXPR, type,
3815 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3816 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3818 case TRUTH_OR_EXPR:
3819 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3820 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3821 return build2_loc (loc, TRUTH_AND_EXPR, type,
3822 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3823 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3825 case TRUTH_XOR_EXPR:
3826 /* Here we can invert either operand. We invert the first operand
3827 unless the second operand is a TRUTH_NOT_EXPR in which case our
3828 result is the XOR of the first operand with the inside of the
3829 negation of the second operand. */
3831 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3832 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3833 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3834 else
3835 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3836 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3837 TREE_OPERAND (arg, 1));
3839 case TRUTH_ANDIF_EXPR:
3840 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3841 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3842 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3843 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3844 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3846 case TRUTH_ORIF_EXPR:
3847 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3848 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3849 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3850 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3851 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3853 case TRUTH_NOT_EXPR:
3854 return TREE_OPERAND (arg, 0);
3856 case COND_EXPR:
3858 tree arg1 = TREE_OPERAND (arg, 1);
3859 tree arg2 = TREE_OPERAND (arg, 2);
3861 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3862 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3864 /* A COND_EXPR may have a throw as one operand, which
3865 then has void type. Just leave void operands
3866 as they are. */
3867 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3868 VOID_TYPE_P (TREE_TYPE (arg1))
3869 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3870 VOID_TYPE_P (TREE_TYPE (arg2))
3871 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3874 case COMPOUND_EXPR:
3875 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3876 return build2_loc (loc, COMPOUND_EXPR, type,
3877 TREE_OPERAND (arg, 0),
3878 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3880 case NON_LVALUE_EXPR:
3881 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3882 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3884 CASE_CONVERT:
3885 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3886 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3888 /* fall through */
3890 case FLOAT_EXPR:
3891 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3892 return build1_loc (loc, TREE_CODE (arg), type,
3893 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3895 case BIT_AND_EXPR:
3896 if (!integer_onep (TREE_OPERAND (arg, 1)))
3897 return NULL_TREE;
3898 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3900 case SAVE_EXPR:
3901 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3903 case CLEANUP_POINT_EXPR:
3904 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3905 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3906 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3908 default:
3909 return NULL_TREE;
3913 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3914 assume that ARG is an operation that returns a truth value (0 or 1
3915 for scalars, 0 or -1 for vectors). Return the folded expression if
3916 folding is successful. Otherwise, return NULL_TREE. */
3918 static tree
3919 fold_invert_truthvalue (location_t loc, tree arg)
3921 tree type = TREE_TYPE (arg);
3922 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3923 ? BIT_NOT_EXPR
3924 : TRUTH_NOT_EXPR,
3925 type, arg);
3928 /* Return a simplified tree node for the truth-negation of ARG. This
3929 never alters ARG itself. We assume that ARG is an operation that
3930 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3932 tree
3933 invert_truthvalue_loc (location_t loc, tree arg)
3935 if (TREE_CODE (arg) == ERROR_MARK)
3936 return arg;
3938 tree type = TREE_TYPE (arg);
3939 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3940 ? BIT_NOT_EXPR
3941 : TRUTH_NOT_EXPR,
3942 type, arg);
3945 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3946 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3947 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3948 is the original memory reference used to preserve the alias set of
3949 the access. */
3951 static tree
3952 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3953 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3954 int unsignedp, int reversep)
3956 tree result, bftype;
3958 /* Attempt not to lose the access path if possible. */
3959 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3961 tree ninner = TREE_OPERAND (orig_inner, 0);
3962 machine_mode nmode;
3963 HOST_WIDE_INT nbitsize, nbitpos;
3964 tree noffset;
3965 int nunsignedp, nreversep, nvolatilep = 0;
3966 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3967 &noffset, &nmode, &nunsignedp,
3968 &nreversep, &nvolatilep);
3969 if (base == inner
3970 && noffset == NULL_TREE
3971 && nbitsize >= bitsize
3972 && nbitpos <= bitpos
3973 && bitpos + bitsize <= nbitpos + nbitsize
3974 && !reversep
3975 && !nreversep
3976 && !nvolatilep)
3978 inner = ninner;
3979 bitpos -= nbitpos;
3983 alias_set_type iset = get_alias_set (orig_inner);
3984 if (iset == 0 && get_alias_set (inner) != iset)
3985 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3986 build_fold_addr_expr (inner),
3987 build_int_cst (ptr_type_node, 0));
3989 if (bitpos == 0 && !reversep)
3991 tree size = TYPE_SIZE (TREE_TYPE (inner));
3992 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3993 || POINTER_TYPE_P (TREE_TYPE (inner)))
3994 && tree_fits_shwi_p (size)
3995 && tree_to_shwi (size) == bitsize)
3996 return fold_convert_loc (loc, type, inner);
3999 bftype = type;
4000 if (TYPE_PRECISION (bftype) != bitsize
4001 || TYPE_UNSIGNED (bftype) == !unsignedp)
4002 bftype = build_nonstandard_integer_type (bitsize, 0);
4004 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4005 bitsize_int (bitsize), bitsize_int (bitpos));
4006 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4008 if (bftype != type)
4009 result = fold_convert_loc (loc, type, result);
4011 return result;
4014 /* Optimize a bit-field compare.
4016 There are two cases: First is a compare against a constant and the
4017 second is a comparison of two items where the fields are at the same
4018 bit position relative to the start of a chunk (byte, halfword, word)
4019 large enough to contain it. In these cases we can avoid the shift
4020 implicit in bitfield extractions.
4022 For constants, we emit a compare of the shifted constant with the
4023 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4024 compared. For two fields at the same position, we do the ANDs with the
4025 similar mask and compare the result of the ANDs.
4027 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4028 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4029 are the left and right operands of the comparison, respectively.
4031 If the optimization described above can be done, we return the resulting
4032 tree. Otherwise we return zero. */
4034 static tree
4035 optimize_bit_field_compare (location_t loc, enum tree_code code,
4036 tree compare_type, tree lhs, tree rhs)
4038 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4039 tree type = TREE_TYPE (lhs);
4040 tree unsigned_type;
4041 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4042 machine_mode lmode, rmode;
4043 scalar_int_mode nmode;
4044 int lunsignedp, runsignedp;
4045 int lreversep, rreversep;
4046 int lvolatilep = 0, rvolatilep = 0;
4047 tree linner, rinner = NULL_TREE;
4048 tree mask;
4049 tree offset;
4051 /* Get all the information about the extractions being done. If the bit size
4052 if the same as the size of the underlying object, we aren't doing an
4053 extraction at all and so can do nothing. We also don't want to
4054 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4055 then will no longer be able to replace it. */
4056 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4057 &lunsignedp, &lreversep, &lvolatilep);
4058 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4059 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
4060 return 0;
4062 if (const_p)
4063 rreversep = lreversep;
4064 else
4066 /* If this is not a constant, we can only do something if bit positions,
4067 sizes, signedness and storage order are the same. */
4068 rinner
4069 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4070 &runsignedp, &rreversep, &rvolatilep);
4072 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4073 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
4074 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
4075 return 0;
4078 /* Honor the C++ memory model and mimic what RTL expansion does. */
4079 unsigned HOST_WIDE_INT bitstart = 0;
4080 unsigned HOST_WIDE_INT bitend = 0;
4081 if (TREE_CODE (lhs) == COMPONENT_REF)
4083 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
4084 if (offset != NULL_TREE)
4085 return 0;
4088 /* See if we can find a mode to refer to this field. We should be able to,
4089 but fail if we can't. */
4090 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4091 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4092 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4093 TYPE_ALIGN (TREE_TYPE (rinner))),
4094 BITS_PER_WORD, false, &nmode))
4095 return 0;
4097 /* Set signed and unsigned types of the precision of this mode for the
4098 shifts below. */
4099 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4101 /* Compute the bit position and size for the new reference and our offset
4102 within it. If the new reference is the same size as the original, we
4103 won't optimize anything, so return zero. */
4104 nbitsize = GET_MODE_BITSIZE (nmode);
4105 nbitpos = lbitpos & ~ (nbitsize - 1);
4106 lbitpos -= nbitpos;
4107 if (nbitsize == lbitsize)
4108 return 0;
4110 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4111 lbitpos = nbitsize - lbitsize - lbitpos;
4113 /* Make the mask to be used against the extracted field. */
4114 mask = build_int_cst_type (unsigned_type, -1);
4115 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4116 mask = const_binop (RSHIFT_EXPR, mask,
4117 size_int (nbitsize - lbitsize - lbitpos));
4119 if (! const_p)
4121 if (nbitpos < 0)
4122 return 0;
4124 /* If not comparing with constant, just rework the comparison
4125 and return. */
4126 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4127 nbitsize, nbitpos, 1, lreversep);
4128 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4129 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4130 nbitsize, nbitpos, 1, rreversep);
4131 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4132 return fold_build2_loc (loc, code, compare_type, t1, t2);
4135 /* Otherwise, we are handling the constant case. See if the constant is too
4136 big for the field. Warn and return a tree for 0 (false) if so. We do
4137 this not only for its own sake, but to avoid having to test for this
4138 error case below. If we didn't, we might generate wrong code.
4140 For unsigned fields, the constant shifted right by the field length should
4141 be all zero. For signed fields, the high-order bits should agree with
4142 the sign bit. */
4144 if (lunsignedp)
4146 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4148 warning (0, "comparison is always %d due to width of bit-field",
4149 code == NE_EXPR);
4150 return constant_boolean_node (code == NE_EXPR, compare_type);
4153 else
4155 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4156 if (tem != 0 && tem != -1)
4158 warning (0, "comparison is always %d due to width of bit-field",
4159 code == NE_EXPR);
4160 return constant_boolean_node (code == NE_EXPR, compare_type);
4164 if (nbitpos < 0)
4165 return 0;
4167 /* Single-bit compares should always be against zero. */
4168 if (lbitsize == 1 && ! integer_zerop (rhs))
4170 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4171 rhs = build_int_cst (type, 0);
4174 /* Make a new bitfield reference, shift the constant over the
4175 appropriate number of bits and mask it with the computed mask
4176 (in case this was a signed field). If we changed it, make a new one. */
4177 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4178 nbitsize, nbitpos, 1, lreversep);
4180 rhs = const_binop (BIT_AND_EXPR,
4181 const_binop (LSHIFT_EXPR,
4182 fold_convert_loc (loc, unsigned_type, rhs),
4183 size_int (lbitpos)),
4184 mask);
4186 lhs = build2_loc (loc, code, compare_type,
4187 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4188 return lhs;
4191 /* Subroutine for fold_truth_andor_1: decode a field reference.
4193 If EXP is a comparison reference, we return the innermost reference.
4195 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4196 set to the starting bit number.
4198 If the innermost field can be completely contained in a mode-sized
4199 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4201 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4202 otherwise it is not changed.
4204 *PUNSIGNEDP is set to the signedness of the field.
4206 *PREVERSEP is set to the storage order of the field.
4208 *PMASK is set to the mask used. This is either contained in a
4209 BIT_AND_EXPR or derived from the width of the field.
4211 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4213 Return 0 if this is not a component reference or is one that we can't
4214 do anything with. */
4216 static tree
4217 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4218 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4219 int *punsignedp, int *preversep, int *pvolatilep,
4220 tree *pmask, tree *pand_mask)
4222 tree exp = *exp_;
4223 tree outer_type = 0;
4224 tree and_mask = 0;
4225 tree mask, inner, offset;
4226 tree unsigned_type;
4227 unsigned int precision;
4229 /* All the optimizations using this function assume integer fields.
4230 There are problems with FP fields since the type_for_size call
4231 below can fail for, e.g., XFmode. */
4232 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4233 return 0;
4235 /* We are interested in the bare arrangement of bits, so strip everything
4236 that doesn't affect the machine mode. However, record the type of the
4237 outermost expression if it may matter below. */
4238 if (CONVERT_EXPR_P (exp)
4239 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4240 outer_type = TREE_TYPE (exp);
4241 STRIP_NOPS (exp);
4243 if (TREE_CODE (exp) == BIT_AND_EXPR)
4245 and_mask = TREE_OPERAND (exp, 1);
4246 exp = TREE_OPERAND (exp, 0);
4247 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4248 if (TREE_CODE (and_mask) != INTEGER_CST)
4249 return 0;
4252 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4253 punsignedp, preversep, pvolatilep);
4254 if ((inner == exp && and_mask == 0)
4255 || *pbitsize < 0 || offset != 0
4256 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4257 /* Reject out-of-bound accesses (PR79731). */
4258 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4259 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4260 *pbitpos + *pbitsize) < 0))
4261 return 0;
4263 *exp_ = exp;
4265 /* If the number of bits in the reference is the same as the bitsize of
4266 the outer type, then the outer type gives the signedness. Otherwise
4267 (in case of a small bitfield) the signedness is unchanged. */
4268 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4269 *punsignedp = TYPE_UNSIGNED (outer_type);
4271 /* Compute the mask to access the bitfield. */
4272 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4273 precision = TYPE_PRECISION (unsigned_type);
4275 mask = build_int_cst_type (unsigned_type, -1);
4277 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4278 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4280 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4281 if (and_mask != 0)
4282 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4283 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4285 *pmask = mask;
4286 *pand_mask = and_mask;
4287 return inner;
4290 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4291 bit positions and MASK is SIGNED. */
4293 static int
4294 all_ones_mask_p (const_tree mask, unsigned int size)
4296 tree type = TREE_TYPE (mask);
4297 unsigned int precision = TYPE_PRECISION (type);
4299 /* If this function returns true when the type of the mask is
4300 UNSIGNED, then there will be errors. In particular see
4301 gcc.c-torture/execute/990326-1.c. There does not appear to be
4302 any documentation paper trail as to why this is so. But the pre
4303 wide-int worked with that restriction and it has been preserved
4304 here. */
4305 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4306 return false;
4308 return wi::mask (size, false, precision) == wi::to_wide (mask);
4311 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4312 represents the sign bit of EXP's type. If EXP represents a sign
4313 or zero extension, also test VAL against the unextended type.
4314 The return value is the (sub)expression whose sign bit is VAL,
4315 or NULL_TREE otherwise. */
4317 tree
4318 sign_bit_p (tree exp, const_tree val)
4320 int width;
4321 tree t;
4323 /* Tree EXP must have an integral type. */
4324 t = TREE_TYPE (exp);
4325 if (! INTEGRAL_TYPE_P (t))
4326 return NULL_TREE;
4328 /* Tree VAL must be an integer constant. */
4329 if (TREE_CODE (val) != INTEGER_CST
4330 || TREE_OVERFLOW (val))
4331 return NULL_TREE;
4333 width = TYPE_PRECISION (t);
4334 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4335 return exp;
4337 /* Handle extension from a narrower type. */
4338 if (TREE_CODE (exp) == NOP_EXPR
4339 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4340 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4342 return NULL_TREE;
4345 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4346 to be evaluated unconditionally. */
4348 static int
4349 simple_operand_p (const_tree exp)
4351 /* Strip any conversions that don't change the machine mode. */
4352 STRIP_NOPS (exp);
4354 return (CONSTANT_CLASS_P (exp)
4355 || TREE_CODE (exp) == SSA_NAME
4356 || (DECL_P (exp)
4357 && ! TREE_ADDRESSABLE (exp)
4358 && ! TREE_THIS_VOLATILE (exp)
4359 && ! DECL_NONLOCAL (exp)
4360 /* Don't regard global variables as simple. They may be
4361 allocated in ways unknown to the compiler (shared memory,
4362 #pragma weak, etc). */
4363 && ! TREE_PUBLIC (exp)
4364 && ! DECL_EXTERNAL (exp)
4365 /* Weakrefs are not safe to be read, since they can be NULL.
4366 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4367 have DECL_WEAK flag set. */
4368 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4369 /* Loading a static variable is unduly expensive, but global
4370 registers aren't expensive. */
4371 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4374 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4375 to be evaluated unconditionally.
4376 I addition to simple_operand_p, we assume that comparisons, conversions,
4377 and logic-not operations are simple, if their operands are simple, too. */
4379 static bool
4380 simple_operand_p_2 (tree exp)
4382 enum tree_code code;
4384 if (TREE_SIDE_EFFECTS (exp)
4385 || tree_could_trap_p (exp))
4386 return false;
4388 while (CONVERT_EXPR_P (exp))
4389 exp = TREE_OPERAND (exp, 0);
4391 code = TREE_CODE (exp);
4393 if (TREE_CODE_CLASS (code) == tcc_comparison)
4394 return (simple_operand_p (TREE_OPERAND (exp, 0))
4395 && simple_operand_p (TREE_OPERAND (exp, 1)));
4397 if (code == TRUTH_NOT_EXPR)
4398 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4400 return simple_operand_p (exp);
4404 /* The following functions are subroutines to fold_range_test and allow it to
4405 try to change a logical combination of comparisons into a range test.
4407 For example, both
4408 X == 2 || X == 3 || X == 4 || X == 5
4410 X >= 2 && X <= 5
4411 are converted to
4412 (unsigned) (X - 2) <= 3
4414 We describe each set of comparisons as being either inside or outside
4415 a range, using a variable named like IN_P, and then describe the
4416 range with a lower and upper bound. If one of the bounds is omitted,
4417 it represents either the highest or lowest value of the type.
4419 In the comments below, we represent a range by two numbers in brackets
4420 preceded by a "+" to designate being inside that range, or a "-" to
4421 designate being outside that range, so the condition can be inverted by
4422 flipping the prefix. An omitted bound is represented by a "-". For
4423 example, "- [-, 10]" means being outside the range starting at the lowest
4424 possible value and ending at 10, in other words, being greater than 10.
4425 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4426 always false.
4428 We set up things so that the missing bounds are handled in a consistent
4429 manner so neither a missing bound nor "true" and "false" need to be
4430 handled using a special case. */
4432 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4433 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4434 and UPPER1_P are nonzero if the respective argument is an upper bound
4435 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4436 must be specified for a comparison. ARG1 will be converted to ARG0's
4437 type if both are specified. */
4439 static tree
4440 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4441 tree arg1, int upper1_p)
4443 tree tem;
4444 int result;
4445 int sgn0, sgn1;
4447 /* If neither arg represents infinity, do the normal operation.
4448 Else, if not a comparison, return infinity. Else handle the special
4449 comparison rules. Note that most of the cases below won't occur, but
4450 are handled for consistency. */
4452 if (arg0 != 0 && arg1 != 0)
4454 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4455 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4456 STRIP_NOPS (tem);
4457 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4460 if (TREE_CODE_CLASS (code) != tcc_comparison)
4461 return 0;
4463 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4464 for neither. In real maths, we cannot assume open ended ranges are
4465 the same. But, this is computer arithmetic, where numbers are finite.
4466 We can therefore make the transformation of any unbounded range with
4467 the value Z, Z being greater than any representable number. This permits
4468 us to treat unbounded ranges as equal. */
4469 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4470 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4471 switch (code)
4473 case EQ_EXPR:
4474 result = sgn0 == sgn1;
4475 break;
4476 case NE_EXPR:
4477 result = sgn0 != sgn1;
4478 break;
4479 case LT_EXPR:
4480 result = sgn0 < sgn1;
4481 break;
4482 case LE_EXPR:
4483 result = sgn0 <= sgn1;
4484 break;
4485 case GT_EXPR:
4486 result = sgn0 > sgn1;
4487 break;
4488 case GE_EXPR:
4489 result = sgn0 >= sgn1;
4490 break;
4491 default:
4492 gcc_unreachable ();
4495 return constant_boolean_node (result, type);
4498 /* Helper routine for make_range. Perform one step for it, return
4499 new expression if the loop should continue or NULL_TREE if it should
4500 stop. */
4502 tree
4503 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4504 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4505 bool *strict_overflow_p)
4507 tree arg0_type = TREE_TYPE (arg0);
4508 tree n_low, n_high, low = *p_low, high = *p_high;
4509 int in_p = *p_in_p, n_in_p;
4511 switch (code)
4513 case TRUTH_NOT_EXPR:
4514 /* We can only do something if the range is testing for zero. */
4515 if (low == NULL_TREE || high == NULL_TREE
4516 || ! integer_zerop (low) || ! integer_zerop (high))
4517 return NULL_TREE;
4518 *p_in_p = ! in_p;
4519 return arg0;
4521 case EQ_EXPR: case NE_EXPR:
4522 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4523 /* We can only do something if the range is testing for zero
4524 and if the second operand is an integer constant. Note that
4525 saying something is "in" the range we make is done by
4526 complementing IN_P since it will set in the initial case of
4527 being not equal to zero; "out" is leaving it alone. */
4528 if (low == NULL_TREE || high == NULL_TREE
4529 || ! integer_zerop (low) || ! integer_zerop (high)
4530 || TREE_CODE (arg1) != INTEGER_CST)
4531 return NULL_TREE;
4533 switch (code)
4535 case NE_EXPR: /* - [c, c] */
4536 low = high = arg1;
4537 break;
4538 case EQ_EXPR: /* + [c, c] */
4539 in_p = ! in_p, low = high = arg1;
4540 break;
4541 case GT_EXPR: /* - [-, c] */
4542 low = 0, high = arg1;
4543 break;
4544 case GE_EXPR: /* + [c, -] */
4545 in_p = ! in_p, low = arg1, high = 0;
4546 break;
4547 case LT_EXPR: /* - [c, -] */
4548 low = arg1, high = 0;
4549 break;
4550 case LE_EXPR: /* + [-, c] */
4551 in_p = ! in_p, low = 0, high = arg1;
4552 break;
4553 default:
4554 gcc_unreachable ();
4557 /* If this is an unsigned comparison, we also know that EXP is
4558 greater than or equal to zero. We base the range tests we make
4559 on that fact, so we record it here so we can parse existing
4560 range tests. We test arg0_type since often the return type
4561 of, e.g. EQ_EXPR, is boolean. */
4562 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4564 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4565 in_p, low, high, 1,
4566 build_int_cst (arg0_type, 0),
4567 NULL_TREE))
4568 return NULL_TREE;
4570 in_p = n_in_p, low = n_low, high = n_high;
4572 /* If the high bound is missing, but we have a nonzero low
4573 bound, reverse the range so it goes from zero to the low bound
4574 minus 1. */
4575 if (high == 0 && low && ! integer_zerop (low))
4577 in_p = ! in_p;
4578 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4579 build_int_cst (TREE_TYPE (low), 1), 0);
4580 low = build_int_cst (arg0_type, 0);
4584 *p_low = low;
4585 *p_high = high;
4586 *p_in_p = in_p;
4587 return arg0;
4589 case NEGATE_EXPR:
4590 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4591 low and high are non-NULL, then normalize will DTRT. */
4592 if (!TYPE_UNSIGNED (arg0_type)
4593 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4595 if (low == NULL_TREE)
4596 low = TYPE_MIN_VALUE (arg0_type);
4597 if (high == NULL_TREE)
4598 high = TYPE_MAX_VALUE (arg0_type);
4601 /* (-x) IN [a,b] -> x in [-b, -a] */
4602 n_low = range_binop (MINUS_EXPR, exp_type,
4603 build_int_cst (exp_type, 0),
4604 0, high, 1);
4605 n_high = range_binop (MINUS_EXPR, exp_type,
4606 build_int_cst (exp_type, 0),
4607 0, low, 0);
4608 if (n_high != 0 && TREE_OVERFLOW (n_high))
4609 return NULL_TREE;
4610 goto normalize;
4612 case BIT_NOT_EXPR:
4613 /* ~ X -> -X - 1 */
4614 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4615 build_int_cst (exp_type, 1));
4617 case PLUS_EXPR:
4618 case MINUS_EXPR:
4619 if (TREE_CODE (arg1) != INTEGER_CST)
4620 return NULL_TREE;
4622 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4623 move a constant to the other side. */
4624 if (!TYPE_UNSIGNED (arg0_type)
4625 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4626 return NULL_TREE;
4628 /* If EXP is signed, any overflow in the computation is undefined,
4629 so we don't worry about it so long as our computations on
4630 the bounds don't overflow. For unsigned, overflow is defined
4631 and this is exactly the right thing. */
4632 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4633 arg0_type, low, 0, arg1, 0);
4634 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4635 arg0_type, high, 1, arg1, 0);
4636 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4637 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4638 return NULL_TREE;
4640 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4641 *strict_overflow_p = true;
4643 normalize:
4644 /* Check for an unsigned range which has wrapped around the maximum
4645 value thus making n_high < n_low, and normalize it. */
4646 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4648 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4649 build_int_cst (TREE_TYPE (n_high), 1), 0);
4650 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4651 build_int_cst (TREE_TYPE (n_low), 1), 0);
4653 /* If the range is of the form +/- [ x+1, x ], we won't
4654 be able to normalize it. But then, it represents the
4655 whole range or the empty set, so make it
4656 +/- [ -, - ]. */
4657 if (tree_int_cst_equal (n_low, low)
4658 && tree_int_cst_equal (n_high, high))
4659 low = high = 0;
4660 else
4661 in_p = ! in_p;
4663 else
4664 low = n_low, high = n_high;
4666 *p_low = low;
4667 *p_high = high;
4668 *p_in_p = in_p;
4669 return arg0;
4671 CASE_CONVERT:
4672 case NON_LVALUE_EXPR:
4673 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4674 return NULL_TREE;
4676 if (! INTEGRAL_TYPE_P (arg0_type)
4677 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4678 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4679 return NULL_TREE;
4681 n_low = low, n_high = high;
4683 if (n_low != 0)
4684 n_low = fold_convert_loc (loc, arg0_type, n_low);
4686 if (n_high != 0)
4687 n_high = fold_convert_loc (loc, arg0_type, n_high);
4689 /* If we're converting arg0 from an unsigned type, to exp,
4690 a signed type, we will be doing the comparison as unsigned.
4691 The tests above have already verified that LOW and HIGH
4692 are both positive.
4694 So we have to ensure that we will handle large unsigned
4695 values the same way that the current signed bounds treat
4696 negative values. */
4698 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4700 tree high_positive;
4701 tree equiv_type;
4702 /* For fixed-point modes, we need to pass the saturating flag
4703 as the 2nd parameter. */
4704 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4705 equiv_type
4706 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4707 TYPE_SATURATING (arg0_type));
4708 else
4709 equiv_type
4710 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4712 /* A range without an upper bound is, naturally, unbounded.
4713 Since convert would have cropped a very large value, use
4714 the max value for the destination type. */
4715 high_positive
4716 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4717 : TYPE_MAX_VALUE (arg0_type);
4719 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4720 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4721 fold_convert_loc (loc, arg0_type,
4722 high_positive),
4723 build_int_cst (arg0_type, 1));
4725 /* If the low bound is specified, "and" the range with the
4726 range for which the original unsigned value will be
4727 positive. */
4728 if (low != 0)
4730 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4731 1, fold_convert_loc (loc, arg0_type,
4732 integer_zero_node),
4733 high_positive))
4734 return NULL_TREE;
4736 in_p = (n_in_p == in_p);
4738 else
4740 /* Otherwise, "or" the range with the range of the input
4741 that will be interpreted as negative. */
4742 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4743 1, fold_convert_loc (loc, arg0_type,
4744 integer_zero_node),
4745 high_positive))
4746 return NULL_TREE;
4748 in_p = (in_p != n_in_p);
4752 *p_low = n_low;
4753 *p_high = n_high;
4754 *p_in_p = in_p;
4755 return arg0;
4757 default:
4758 return NULL_TREE;
4762 /* Given EXP, a logical expression, set the range it is testing into
4763 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4764 actually being tested. *PLOW and *PHIGH will be made of the same
4765 type as the returned expression. If EXP is not a comparison, we
4766 will most likely not be returning a useful value and range. Set
4767 *STRICT_OVERFLOW_P to true if the return value is only valid
4768 because signed overflow is undefined; otherwise, do not change
4769 *STRICT_OVERFLOW_P. */
4771 tree
4772 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4773 bool *strict_overflow_p)
4775 enum tree_code code;
4776 tree arg0, arg1 = NULL_TREE;
4777 tree exp_type, nexp;
4778 int in_p;
4779 tree low, high;
4780 location_t loc = EXPR_LOCATION (exp);
4782 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4783 and see if we can refine the range. Some of the cases below may not
4784 happen, but it doesn't seem worth worrying about this. We "continue"
4785 the outer loop when we've changed something; otherwise we "break"
4786 the switch, which will "break" the while. */
4788 in_p = 0;
4789 low = high = build_int_cst (TREE_TYPE (exp), 0);
4791 while (1)
4793 code = TREE_CODE (exp);
4794 exp_type = TREE_TYPE (exp);
4795 arg0 = NULL_TREE;
4797 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4799 if (TREE_OPERAND_LENGTH (exp) > 0)
4800 arg0 = TREE_OPERAND (exp, 0);
4801 if (TREE_CODE_CLASS (code) == tcc_binary
4802 || TREE_CODE_CLASS (code) == tcc_comparison
4803 || (TREE_CODE_CLASS (code) == tcc_expression
4804 && TREE_OPERAND_LENGTH (exp) > 1))
4805 arg1 = TREE_OPERAND (exp, 1);
4807 if (arg0 == NULL_TREE)
4808 break;
4810 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4811 &high, &in_p, strict_overflow_p);
4812 if (nexp == NULL_TREE)
4813 break;
4814 exp = nexp;
4817 /* If EXP is a constant, we can evaluate whether this is true or false. */
4818 if (TREE_CODE (exp) == INTEGER_CST)
4820 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4821 exp, 0, low, 0))
4822 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4823 exp, 1, high, 1)));
4824 low = high = 0;
4825 exp = 0;
4828 *pin_p = in_p, *plow = low, *phigh = high;
4829 return exp;
4832 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4833 a bitwise check i.e. when
4834 LOW == 0xXX...X00...0
4835 HIGH == 0xXX...X11...1
4836 Return corresponding mask in MASK and stem in VALUE. */
4838 static bool
4839 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4840 tree *value)
4842 if (TREE_CODE (low) != INTEGER_CST
4843 || TREE_CODE (high) != INTEGER_CST)
4844 return false;
4846 unsigned prec = TYPE_PRECISION (type);
4847 wide_int lo = wi::to_wide (low, prec);
4848 wide_int hi = wi::to_wide (high, prec);
4850 wide_int end_mask = lo ^ hi;
4851 if ((end_mask & (end_mask + 1)) != 0
4852 || (lo & end_mask) != 0)
4853 return false;
4855 wide_int stem_mask = ~end_mask;
4856 wide_int stem = lo & stem_mask;
4857 if (stem != (hi & stem_mask))
4858 return false;
4860 *mask = wide_int_to_tree (type, stem_mask);
4861 *value = wide_int_to_tree (type, stem);
4863 return true;
4866 /* Helper routine for build_range_check and match.pd. Return the type to
4867 perform the check or NULL if it shouldn't be optimized. */
4869 tree
4870 range_check_type (tree etype)
4872 /* First make sure that arithmetics in this type is valid, then make sure
4873 that it wraps around. */
4874 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4875 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4876 TYPE_UNSIGNED (etype));
4878 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4880 tree utype, minv, maxv;
4882 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4883 for the type in question, as we rely on this here. */
4884 utype = unsigned_type_for (etype);
4885 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4886 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4887 build_int_cst (TREE_TYPE (maxv), 1), 1);
4888 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4890 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4891 minv, 1, maxv, 1)))
4892 etype = utype;
4893 else
4894 return NULL_TREE;
4896 return etype;
4899 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4900 type, TYPE, return an expression to test if EXP is in (or out of, depending
4901 on IN_P) the range. Return 0 if the test couldn't be created. */
4903 tree
4904 build_range_check (location_t loc, tree type, tree exp, int in_p,
4905 tree low, tree high)
4907 tree etype = TREE_TYPE (exp), mask, value;
4909 /* Disable this optimization for function pointer expressions
4910 on targets that require function pointer canonicalization. */
4911 if (targetm.have_canonicalize_funcptr_for_compare ()
4912 && TREE_CODE (etype) == POINTER_TYPE
4913 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4914 return NULL_TREE;
4916 if (! in_p)
4918 value = build_range_check (loc, type, exp, 1, low, high);
4919 if (value != 0)
4920 return invert_truthvalue_loc (loc, value);
4922 return 0;
4925 if (low == 0 && high == 0)
4926 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4928 if (low == 0)
4929 return fold_build2_loc (loc, LE_EXPR, type, exp,
4930 fold_convert_loc (loc, etype, high));
4932 if (high == 0)
4933 return fold_build2_loc (loc, GE_EXPR, type, exp,
4934 fold_convert_loc (loc, etype, low));
4936 if (operand_equal_p (low, high, 0))
4937 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4938 fold_convert_loc (loc, etype, low));
4940 if (TREE_CODE (exp) == BIT_AND_EXPR
4941 && maskable_range_p (low, high, etype, &mask, &value))
4942 return fold_build2_loc (loc, EQ_EXPR, type,
4943 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4944 exp, mask),
4945 value);
4947 if (integer_zerop (low))
4949 if (! TYPE_UNSIGNED (etype))
4951 etype = unsigned_type_for (etype);
4952 high = fold_convert_loc (loc, etype, high);
4953 exp = fold_convert_loc (loc, etype, exp);
4955 return build_range_check (loc, type, exp, 1, 0, high);
4958 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4959 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4961 int prec = TYPE_PRECISION (etype);
4963 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4965 if (TYPE_UNSIGNED (etype))
4967 tree signed_etype = signed_type_for (etype);
4968 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4969 etype
4970 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4971 else
4972 etype = signed_etype;
4973 exp = fold_convert_loc (loc, etype, exp);
4975 return fold_build2_loc (loc, GT_EXPR, type, exp,
4976 build_int_cst (etype, 0));
4980 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4981 This requires wrap-around arithmetics for the type of the expression. */
4982 etype = range_check_type (etype);
4983 if (etype == NULL_TREE)
4984 return NULL_TREE;
4986 if (POINTER_TYPE_P (etype))
4987 etype = unsigned_type_for (etype);
4989 high = fold_convert_loc (loc, etype, high);
4990 low = fold_convert_loc (loc, etype, low);
4991 exp = fold_convert_loc (loc, etype, exp);
4993 value = const_binop (MINUS_EXPR, high, low);
4995 if (value != 0 && !TREE_OVERFLOW (value))
4996 return build_range_check (loc, type,
4997 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4998 1, build_int_cst (etype, 0), value);
5000 return 0;
5003 /* Return the predecessor of VAL in its type, handling the infinite case. */
5005 static tree
5006 range_predecessor (tree val)
5008 tree type = TREE_TYPE (val);
5010 if (INTEGRAL_TYPE_P (type)
5011 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5012 return 0;
5013 else
5014 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5015 build_int_cst (TREE_TYPE (val), 1), 0);
5018 /* Return the successor of VAL in its type, handling the infinite case. */
5020 static tree
5021 range_successor (tree val)
5023 tree type = TREE_TYPE (val);
5025 if (INTEGRAL_TYPE_P (type)
5026 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5027 return 0;
5028 else
5029 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5030 build_int_cst (TREE_TYPE (val), 1), 0);
5033 /* Given two ranges, see if we can merge them into one. Return 1 if we
5034 can, 0 if we can't. Set the output range into the specified parameters. */
5036 bool
5037 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5038 tree high0, int in1_p, tree low1, tree high1)
5040 int no_overlap;
5041 int subset;
5042 int temp;
5043 tree tem;
5044 int in_p;
5045 tree low, high;
5046 int lowequal = ((low0 == 0 && low1 == 0)
5047 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5048 low0, 0, low1, 0)));
5049 int highequal = ((high0 == 0 && high1 == 0)
5050 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5051 high0, 1, high1, 1)));
5053 /* Make range 0 be the range that starts first, or ends last if they
5054 start at the same value. Swap them if it isn't. */
5055 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5056 low0, 0, low1, 0))
5057 || (lowequal
5058 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5059 high1, 1, high0, 1))))
5061 temp = in0_p, in0_p = in1_p, in1_p = temp;
5062 tem = low0, low0 = low1, low1 = tem;
5063 tem = high0, high0 = high1, high1 = tem;
5066 /* Now flag two cases, whether the ranges are disjoint or whether the
5067 second range is totally subsumed in the first. Note that the tests
5068 below are simplified by the ones above. */
5069 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5070 high0, 1, low1, 0));
5071 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5072 high1, 1, high0, 1));
5074 /* We now have four cases, depending on whether we are including or
5075 excluding the two ranges. */
5076 if (in0_p && in1_p)
5078 /* If they don't overlap, the result is false. If the second range
5079 is a subset it is the result. Otherwise, the range is from the start
5080 of the second to the end of the first. */
5081 if (no_overlap)
5082 in_p = 0, low = high = 0;
5083 else if (subset)
5084 in_p = 1, low = low1, high = high1;
5085 else
5086 in_p = 1, low = low1, high = high0;
5089 else if (in0_p && ! in1_p)
5091 /* If they don't overlap, the result is the first range. If they are
5092 equal, the result is false. If the second range is a subset of the
5093 first, and the ranges begin at the same place, we go from just after
5094 the end of the second range to the end of the first. If the second
5095 range is not a subset of the first, or if it is a subset and both
5096 ranges end at the same place, the range starts at the start of the
5097 first range and ends just before the second range.
5098 Otherwise, we can't describe this as a single range. */
5099 if (no_overlap)
5100 in_p = 1, low = low0, high = high0;
5101 else if (lowequal && highequal)
5102 in_p = 0, low = high = 0;
5103 else if (subset && lowequal)
5105 low = range_successor (high1);
5106 high = high0;
5107 in_p = 1;
5108 if (low == 0)
5110 /* We are in the weird situation where high0 > high1 but
5111 high1 has no successor. Punt. */
5112 return 0;
5115 else if (! subset || highequal)
5117 low = low0;
5118 high = range_predecessor (low1);
5119 in_p = 1;
5120 if (high == 0)
5122 /* low0 < low1 but low1 has no predecessor. Punt. */
5123 return 0;
5126 else
5127 return 0;
5130 else if (! in0_p && in1_p)
5132 /* If they don't overlap, the result is the second range. If the second
5133 is a subset of the first, the result is false. Otherwise,
5134 the range starts just after the first range and ends at the
5135 end of the second. */
5136 if (no_overlap)
5137 in_p = 1, low = low1, high = high1;
5138 else if (subset || highequal)
5139 in_p = 0, low = high = 0;
5140 else
5142 low = range_successor (high0);
5143 high = high1;
5144 in_p = 1;
5145 if (low == 0)
5147 /* high1 > high0 but high0 has no successor. Punt. */
5148 return 0;
5153 else
5155 /* The case where we are excluding both ranges. Here the complex case
5156 is if they don't overlap. In that case, the only time we have a
5157 range is if they are adjacent. If the second is a subset of the
5158 first, the result is the first. Otherwise, the range to exclude
5159 starts at the beginning of the first range and ends at the end of the
5160 second. */
5161 if (no_overlap)
5163 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5164 range_successor (high0),
5165 1, low1, 0)))
5166 in_p = 0, low = low0, high = high1;
5167 else
5169 /* Canonicalize - [min, x] into - [-, x]. */
5170 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5171 switch (TREE_CODE (TREE_TYPE (low0)))
5173 case ENUMERAL_TYPE:
5174 if (TYPE_PRECISION (TREE_TYPE (low0))
5175 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5176 break;
5177 /* FALLTHROUGH */
5178 case INTEGER_TYPE:
5179 if (tree_int_cst_equal (low0,
5180 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5181 low0 = 0;
5182 break;
5183 case POINTER_TYPE:
5184 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5185 && integer_zerop (low0))
5186 low0 = 0;
5187 break;
5188 default:
5189 break;
5192 /* Canonicalize - [x, max] into - [x, -]. */
5193 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5194 switch (TREE_CODE (TREE_TYPE (high1)))
5196 case ENUMERAL_TYPE:
5197 if (TYPE_PRECISION (TREE_TYPE (high1))
5198 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5199 break;
5200 /* FALLTHROUGH */
5201 case INTEGER_TYPE:
5202 if (tree_int_cst_equal (high1,
5203 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5204 high1 = 0;
5205 break;
5206 case POINTER_TYPE:
5207 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5208 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5209 high1, 1,
5210 build_int_cst (TREE_TYPE (high1), 1),
5211 1)))
5212 high1 = 0;
5213 break;
5214 default:
5215 break;
5218 /* The ranges might be also adjacent between the maximum and
5219 minimum values of the given type. For
5220 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5221 return + [x + 1, y - 1]. */
5222 if (low0 == 0 && high1 == 0)
5224 low = range_successor (high0);
5225 high = range_predecessor (low1);
5226 if (low == 0 || high == 0)
5227 return 0;
5229 in_p = 1;
5231 else
5232 return 0;
5235 else if (subset)
5236 in_p = 0, low = low0, high = high0;
5237 else
5238 in_p = 0, low = low0, high = high1;
5241 *pin_p = in_p, *plow = low, *phigh = high;
5242 return 1;
5246 /* Subroutine of fold, looking inside expressions of the form
5247 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5248 of the COND_EXPR. This function is being used also to optimize
5249 A op B ? C : A, by reversing the comparison first.
5251 Return a folded expression whose code is not a COND_EXPR
5252 anymore, or NULL_TREE if no folding opportunity is found. */
5254 static tree
5255 fold_cond_expr_with_comparison (location_t loc, tree type,
5256 tree arg0, tree arg1, tree arg2)
5258 enum tree_code comp_code = TREE_CODE (arg0);
5259 tree arg00 = TREE_OPERAND (arg0, 0);
5260 tree arg01 = TREE_OPERAND (arg0, 1);
5261 tree arg1_type = TREE_TYPE (arg1);
5262 tree tem;
5264 STRIP_NOPS (arg1);
5265 STRIP_NOPS (arg2);
5267 /* If we have A op 0 ? A : -A, consider applying the following
5268 transformations:
5270 A == 0? A : -A same as -A
5271 A != 0? A : -A same as A
5272 A >= 0? A : -A same as abs (A)
5273 A > 0? A : -A same as abs (A)
5274 A <= 0? A : -A same as -abs (A)
5275 A < 0? A : -A same as -abs (A)
5277 None of these transformations work for modes with signed
5278 zeros. If A is +/-0, the first two transformations will
5279 change the sign of the result (from +0 to -0, or vice
5280 versa). The last four will fix the sign of the result,
5281 even though the original expressions could be positive or
5282 negative, depending on the sign of A.
5284 Note that all these transformations are correct if A is
5285 NaN, since the two alternatives (A and -A) are also NaNs. */
5286 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5287 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5288 ? real_zerop (arg01)
5289 : integer_zerop (arg01))
5290 && ((TREE_CODE (arg2) == NEGATE_EXPR
5291 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5292 /* In the case that A is of the form X-Y, '-A' (arg2) may
5293 have already been folded to Y-X, check for that. */
5294 || (TREE_CODE (arg1) == MINUS_EXPR
5295 && TREE_CODE (arg2) == MINUS_EXPR
5296 && operand_equal_p (TREE_OPERAND (arg1, 0),
5297 TREE_OPERAND (arg2, 1), 0)
5298 && operand_equal_p (TREE_OPERAND (arg1, 1),
5299 TREE_OPERAND (arg2, 0), 0))))
5300 switch (comp_code)
5302 case EQ_EXPR:
5303 case UNEQ_EXPR:
5304 tem = fold_convert_loc (loc, arg1_type, arg1);
5305 return fold_convert_loc (loc, type, negate_expr (tem));
5306 case NE_EXPR:
5307 case LTGT_EXPR:
5308 return fold_convert_loc (loc, type, arg1);
5309 case UNGE_EXPR:
5310 case UNGT_EXPR:
5311 if (flag_trapping_math)
5312 break;
5313 /* Fall through. */
5314 case GE_EXPR:
5315 case GT_EXPR:
5316 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5317 break;
5318 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5319 return fold_convert_loc (loc, type, tem);
5320 case UNLE_EXPR:
5321 case UNLT_EXPR:
5322 if (flag_trapping_math)
5323 break;
5324 /* FALLTHRU */
5325 case LE_EXPR:
5326 case LT_EXPR:
5327 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5328 break;
5329 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5330 return negate_expr (fold_convert_loc (loc, type, tem));
5331 default:
5332 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5333 break;
5336 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5337 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5338 both transformations are correct when A is NaN: A != 0
5339 is then true, and A == 0 is false. */
5341 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5342 && integer_zerop (arg01) && integer_zerop (arg2))
5344 if (comp_code == NE_EXPR)
5345 return fold_convert_loc (loc, type, arg1);
5346 else if (comp_code == EQ_EXPR)
5347 return build_zero_cst (type);
5350 /* Try some transformations of A op B ? A : B.
5352 A == B? A : B same as B
5353 A != B? A : B same as A
5354 A >= B? A : B same as max (A, B)
5355 A > B? A : B same as max (B, A)
5356 A <= B? A : B same as min (A, B)
5357 A < B? A : B same as min (B, A)
5359 As above, these transformations don't work in the presence
5360 of signed zeros. For example, if A and B are zeros of
5361 opposite sign, the first two transformations will change
5362 the sign of the result. In the last four, the original
5363 expressions give different results for (A=+0, B=-0) and
5364 (A=-0, B=+0), but the transformed expressions do not.
5366 The first two transformations are correct if either A or B
5367 is a NaN. In the first transformation, the condition will
5368 be false, and B will indeed be chosen. In the case of the
5369 second transformation, the condition A != B will be true,
5370 and A will be chosen.
5372 The conversions to max() and min() are not correct if B is
5373 a number and A is not. The conditions in the original
5374 expressions will be false, so all four give B. The min()
5375 and max() versions would give a NaN instead. */
5376 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5377 && operand_equal_for_comparison_p (arg01, arg2)
5378 /* Avoid these transformations if the COND_EXPR may be used
5379 as an lvalue in the C++ front-end. PR c++/19199. */
5380 && (in_gimple_form
5381 || VECTOR_TYPE_P (type)
5382 || (! lang_GNU_CXX ()
5383 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5384 || ! maybe_lvalue_p (arg1)
5385 || ! maybe_lvalue_p (arg2)))
5387 tree comp_op0 = arg00;
5388 tree comp_op1 = arg01;
5389 tree comp_type = TREE_TYPE (comp_op0);
5391 switch (comp_code)
5393 case EQ_EXPR:
5394 return fold_convert_loc (loc, type, arg2);
5395 case NE_EXPR:
5396 return fold_convert_loc (loc, type, arg1);
5397 case LE_EXPR:
5398 case LT_EXPR:
5399 case UNLE_EXPR:
5400 case UNLT_EXPR:
5401 /* In C++ a ?: expression can be an lvalue, so put the
5402 operand which will be used if they are equal first
5403 so that we can convert this back to the
5404 corresponding COND_EXPR. */
5405 if (!HONOR_NANS (arg1))
5407 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5408 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5409 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5410 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5411 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5412 comp_op1, comp_op0);
5413 return fold_convert_loc (loc, type, tem);
5415 break;
5416 case GE_EXPR:
5417 case GT_EXPR:
5418 case UNGE_EXPR:
5419 case UNGT_EXPR:
5420 if (!HONOR_NANS (arg1))
5422 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5423 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5424 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5425 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5426 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5427 comp_op1, comp_op0);
5428 return fold_convert_loc (loc, type, tem);
5430 break;
5431 case UNEQ_EXPR:
5432 if (!HONOR_NANS (arg1))
5433 return fold_convert_loc (loc, type, arg2);
5434 break;
5435 case LTGT_EXPR:
5436 if (!HONOR_NANS (arg1))
5437 return fold_convert_loc (loc, type, arg1);
5438 break;
5439 default:
5440 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5441 break;
5445 return NULL_TREE;
5450 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5451 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5452 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5453 false) >= 2)
5454 #endif
5456 /* EXP is some logical combination of boolean tests. See if we can
5457 merge it into some range test. Return the new tree if so. */
5459 static tree
5460 fold_range_test (location_t loc, enum tree_code code, tree type,
5461 tree op0, tree op1)
5463 int or_op = (code == TRUTH_ORIF_EXPR
5464 || code == TRUTH_OR_EXPR);
5465 int in0_p, in1_p, in_p;
5466 tree low0, low1, low, high0, high1, high;
5467 bool strict_overflow_p = false;
5468 tree tem, lhs, rhs;
5469 const char * const warnmsg = G_("assuming signed overflow does not occur "
5470 "when simplifying range test");
5472 if (!INTEGRAL_TYPE_P (type))
5473 return 0;
5475 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5476 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5478 /* If this is an OR operation, invert both sides; we will invert
5479 again at the end. */
5480 if (or_op)
5481 in0_p = ! in0_p, in1_p = ! in1_p;
5483 /* If both expressions are the same, if we can merge the ranges, and we
5484 can build the range test, return it or it inverted. If one of the
5485 ranges is always true or always false, consider it to be the same
5486 expression as the other. */
5487 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5488 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5489 in1_p, low1, high1)
5490 && (tem = (build_range_check (loc, type,
5491 lhs != 0 ? lhs
5492 : rhs != 0 ? rhs : integer_zero_node,
5493 in_p, low, high))) != 0)
5495 if (strict_overflow_p)
5496 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5497 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5500 /* On machines where the branch cost is expensive, if this is a
5501 short-circuited branch and the underlying object on both sides
5502 is the same, make a non-short-circuit operation. */
5503 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5504 && !flag_sanitize_coverage
5505 && lhs != 0 && rhs != 0
5506 && (code == TRUTH_ANDIF_EXPR
5507 || code == TRUTH_ORIF_EXPR)
5508 && operand_equal_p (lhs, rhs, 0))
5510 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5511 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5512 which cases we can't do this. */
5513 if (simple_operand_p (lhs))
5514 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5515 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5516 type, op0, op1);
5518 else if (!lang_hooks.decls.global_bindings_p ()
5519 && !CONTAINS_PLACEHOLDER_P (lhs))
5521 tree common = save_expr (lhs);
5523 if ((lhs = build_range_check (loc, type, common,
5524 or_op ? ! in0_p : in0_p,
5525 low0, high0)) != 0
5526 && (rhs = build_range_check (loc, type, common,
5527 or_op ? ! in1_p : in1_p,
5528 low1, high1)) != 0)
5530 if (strict_overflow_p)
5531 fold_overflow_warning (warnmsg,
5532 WARN_STRICT_OVERFLOW_COMPARISON);
5533 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5534 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5535 type, lhs, rhs);
5540 return 0;
5543 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5544 bit value. Arrange things so the extra bits will be set to zero if and
5545 only if C is signed-extended to its full width. If MASK is nonzero,
5546 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5548 static tree
5549 unextend (tree c, int p, int unsignedp, tree mask)
5551 tree type = TREE_TYPE (c);
5552 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5553 tree temp;
5555 if (p == modesize || unsignedp)
5556 return c;
5558 /* We work by getting just the sign bit into the low-order bit, then
5559 into the high-order bit, then sign-extend. We then XOR that value
5560 with C. */
5561 temp = build_int_cst (TREE_TYPE (c),
5562 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5564 /* We must use a signed type in order to get an arithmetic right shift.
5565 However, we must also avoid introducing accidental overflows, so that
5566 a subsequent call to integer_zerop will work. Hence we must
5567 do the type conversion here. At this point, the constant is either
5568 zero or one, and the conversion to a signed type can never overflow.
5569 We could get an overflow if this conversion is done anywhere else. */
5570 if (TYPE_UNSIGNED (type))
5571 temp = fold_convert (signed_type_for (type), temp);
5573 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5574 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5575 if (mask != 0)
5576 temp = const_binop (BIT_AND_EXPR, temp,
5577 fold_convert (TREE_TYPE (c), mask));
5578 /* If necessary, convert the type back to match the type of C. */
5579 if (TYPE_UNSIGNED (type))
5580 temp = fold_convert (type, temp);
5582 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5585 /* For an expression that has the form
5586 (A && B) || ~B
5588 (A || B) && ~B,
5589 we can drop one of the inner expressions and simplify to
5590 A || ~B
5592 A && ~B
5593 LOC is the location of the resulting expression. OP is the inner
5594 logical operation; the left-hand side in the examples above, while CMPOP
5595 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5596 removing a condition that guards another, as in
5597 (A != NULL && A->...) || A == NULL
5598 which we must not transform. If RHS_ONLY is true, only eliminate the
5599 right-most operand of the inner logical operation. */
5601 static tree
5602 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5603 bool rhs_only)
5605 tree type = TREE_TYPE (cmpop);
5606 enum tree_code code = TREE_CODE (cmpop);
5607 enum tree_code truthop_code = TREE_CODE (op);
5608 tree lhs = TREE_OPERAND (op, 0);
5609 tree rhs = TREE_OPERAND (op, 1);
5610 tree orig_lhs = lhs, orig_rhs = rhs;
5611 enum tree_code rhs_code = TREE_CODE (rhs);
5612 enum tree_code lhs_code = TREE_CODE (lhs);
5613 enum tree_code inv_code;
5615 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5616 return NULL_TREE;
5618 if (TREE_CODE_CLASS (code) != tcc_comparison)
5619 return NULL_TREE;
5621 if (rhs_code == truthop_code)
5623 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5624 if (newrhs != NULL_TREE)
5626 rhs = newrhs;
5627 rhs_code = TREE_CODE (rhs);
5630 if (lhs_code == truthop_code && !rhs_only)
5632 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5633 if (newlhs != NULL_TREE)
5635 lhs = newlhs;
5636 lhs_code = TREE_CODE (lhs);
5640 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5641 if (inv_code == rhs_code
5642 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5643 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5644 return lhs;
5645 if (!rhs_only && inv_code == lhs_code
5646 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5647 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5648 return rhs;
5649 if (rhs != orig_rhs || lhs != orig_lhs)
5650 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5651 lhs, rhs);
5652 return NULL_TREE;
5655 /* Find ways of folding logical expressions of LHS and RHS:
5656 Try to merge two comparisons to the same innermost item.
5657 Look for range tests like "ch >= '0' && ch <= '9'".
5658 Look for combinations of simple terms on machines with expensive branches
5659 and evaluate the RHS unconditionally.
5661 For example, if we have p->a == 2 && p->b == 4 and we can make an
5662 object large enough to span both A and B, we can do this with a comparison
5663 against the object ANDed with the a mask.
5665 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5666 operations to do this with one comparison.
5668 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5669 function and the one above.
5671 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5672 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5674 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5675 two operands.
5677 We return the simplified tree or 0 if no optimization is possible. */
5679 static tree
5680 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5681 tree lhs, tree rhs)
5683 /* If this is the "or" of two comparisons, we can do something if
5684 the comparisons are NE_EXPR. If this is the "and", we can do something
5685 if the comparisons are EQ_EXPR. I.e.,
5686 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5688 WANTED_CODE is this operation code. For single bit fields, we can
5689 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5690 comparison for one-bit fields. */
5692 enum tree_code wanted_code;
5693 enum tree_code lcode, rcode;
5694 tree ll_arg, lr_arg, rl_arg, rr_arg;
5695 tree ll_inner, lr_inner, rl_inner, rr_inner;
5696 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5697 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5698 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5699 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5700 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5701 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5702 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5703 scalar_int_mode lnmode, rnmode;
5704 tree ll_mask, lr_mask, rl_mask, rr_mask;
5705 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5706 tree l_const, r_const;
5707 tree lntype, rntype, result;
5708 HOST_WIDE_INT first_bit, end_bit;
5709 int volatilep;
5711 /* Start by getting the comparison codes. Fail if anything is volatile.
5712 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5713 it were surrounded with a NE_EXPR. */
5715 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5716 return 0;
5718 lcode = TREE_CODE (lhs);
5719 rcode = TREE_CODE (rhs);
5721 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5723 lhs = build2 (NE_EXPR, truth_type, lhs,
5724 build_int_cst (TREE_TYPE (lhs), 0));
5725 lcode = NE_EXPR;
5728 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5730 rhs = build2 (NE_EXPR, truth_type, rhs,
5731 build_int_cst (TREE_TYPE (rhs), 0));
5732 rcode = NE_EXPR;
5735 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5736 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5737 return 0;
5739 ll_arg = TREE_OPERAND (lhs, 0);
5740 lr_arg = TREE_OPERAND (lhs, 1);
5741 rl_arg = TREE_OPERAND (rhs, 0);
5742 rr_arg = TREE_OPERAND (rhs, 1);
5744 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5745 if (simple_operand_p (ll_arg)
5746 && simple_operand_p (lr_arg))
5748 if (operand_equal_p (ll_arg, rl_arg, 0)
5749 && operand_equal_p (lr_arg, rr_arg, 0))
5751 result = combine_comparisons (loc, code, lcode, rcode,
5752 truth_type, ll_arg, lr_arg);
5753 if (result)
5754 return result;
5756 else if (operand_equal_p (ll_arg, rr_arg, 0)
5757 && operand_equal_p (lr_arg, rl_arg, 0))
5759 result = combine_comparisons (loc, code, lcode,
5760 swap_tree_comparison (rcode),
5761 truth_type, ll_arg, lr_arg);
5762 if (result)
5763 return result;
5767 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5768 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5770 /* If the RHS can be evaluated unconditionally and its operands are
5771 simple, it wins to evaluate the RHS unconditionally on machines
5772 with expensive branches. In this case, this isn't a comparison
5773 that can be merged. */
5775 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5776 false) >= 2
5777 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5778 && simple_operand_p (rl_arg)
5779 && simple_operand_p (rr_arg))
5781 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5782 if (code == TRUTH_OR_EXPR
5783 && lcode == NE_EXPR && integer_zerop (lr_arg)
5784 && rcode == NE_EXPR && integer_zerop (rr_arg)
5785 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5786 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5787 return build2_loc (loc, NE_EXPR, truth_type,
5788 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5789 ll_arg, rl_arg),
5790 build_int_cst (TREE_TYPE (ll_arg), 0));
5792 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5793 if (code == TRUTH_AND_EXPR
5794 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5795 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5796 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5797 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5798 return build2_loc (loc, EQ_EXPR, truth_type,
5799 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5800 ll_arg, rl_arg),
5801 build_int_cst (TREE_TYPE (ll_arg), 0));
5804 /* See if the comparisons can be merged. Then get all the parameters for
5805 each side. */
5807 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5808 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5809 return 0;
5811 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5812 volatilep = 0;
5813 ll_inner = decode_field_reference (loc, &ll_arg,
5814 &ll_bitsize, &ll_bitpos, &ll_mode,
5815 &ll_unsignedp, &ll_reversep, &volatilep,
5816 &ll_mask, &ll_and_mask);
5817 lr_inner = decode_field_reference (loc, &lr_arg,
5818 &lr_bitsize, &lr_bitpos, &lr_mode,
5819 &lr_unsignedp, &lr_reversep, &volatilep,
5820 &lr_mask, &lr_and_mask);
5821 rl_inner = decode_field_reference (loc, &rl_arg,
5822 &rl_bitsize, &rl_bitpos, &rl_mode,
5823 &rl_unsignedp, &rl_reversep, &volatilep,
5824 &rl_mask, &rl_and_mask);
5825 rr_inner = decode_field_reference (loc, &rr_arg,
5826 &rr_bitsize, &rr_bitpos, &rr_mode,
5827 &rr_unsignedp, &rr_reversep, &volatilep,
5828 &rr_mask, &rr_and_mask);
5830 /* It must be true that the inner operation on the lhs of each
5831 comparison must be the same if we are to be able to do anything.
5832 Then see if we have constants. If not, the same must be true for
5833 the rhs's. */
5834 if (volatilep
5835 || ll_reversep != rl_reversep
5836 || ll_inner == 0 || rl_inner == 0
5837 || ! operand_equal_p (ll_inner, rl_inner, 0))
5838 return 0;
5840 if (TREE_CODE (lr_arg) == INTEGER_CST
5841 && TREE_CODE (rr_arg) == INTEGER_CST)
5843 l_const = lr_arg, r_const = rr_arg;
5844 lr_reversep = ll_reversep;
5846 else if (lr_reversep != rr_reversep
5847 || lr_inner == 0 || rr_inner == 0
5848 || ! operand_equal_p (lr_inner, rr_inner, 0))
5849 return 0;
5850 else
5851 l_const = r_const = 0;
5853 /* If either comparison code is not correct for our logical operation,
5854 fail. However, we can convert a one-bit comparison against zero into
5855 the opposite comparison against that bit being set in the field. */
5857 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5858 if (lcode != wanted_code)
5860 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5862 /* Make the left operand unsigned, since we are only interested
5863 in the value of one bit. Otherwise we are doing the wrong
5864 thing below. */
5865 ll_unsignedp = 1;
5866 l_const = ll_mask;
5868 else
5869 return 0;
5872 /* This is analogous to the code for l_const above. */
5873 if (rcode != wanted_code)
5875 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5877 rl_unsignedp = 1;
5878 r_const = rl_mask;
5880 else
5881 return 0;
5884 /* See if we can find a mode that contains both fields being compared on
5885 the left. If we can't, fail. Otherwise, update all constants and masks
5886 to be relative to a field of that size. */
5887 first_bit = MIN (ll_bitpos, rl_bitpos);
5888 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5889 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5890 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5891 volatilep, &lnmode))
5892 return 0;
5894 lnbitsize = GET_MODE_BITSIZE (lnmode);
5895 lnbitpos = first_bit & ~ (lnbitsize - 1);
5896 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5897 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5899 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5901 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5902 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5905 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5906 size_int (xll_bitpos));
5907 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5908 size_int (xrl_bitpos));
5910 if (l_const)
5912 l_const = fold_convert_loc (loc, lntype, l_const);
5913 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5914 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5915 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5916 fold_build1_loc (loc, BIT_NOT_EXPR,
5917 lntype, ll_mask))))
5919 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5921 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5924 if (r_const)
5926 r_const = fold_convert_loc (loc, lntype, r_const);
5927 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5928 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5929 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5930 fold_build1_loc (loc, BIT_NOT_EXPR,
5931 lntype, rl_mask))))
5933 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5935 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5939 /* If the right sides are not constant, do the same for it. Also,
5940 disallow this optimization if a size or signedness mismatch occurs
5941 between the left and right sides. */
5942 if (l_const == 0)
5944 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5945 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5946 /* Make sure the two fields on the right
5947 correspond to the left without being swapped. */
5948 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5949 return 0;
5951 first_bit = MIN (lr_bitpos, rr_bitpos);
5952 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5953 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5954 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5955 volatilep, &rnmode))
5956 return 0;
5958 rnbitsize = GET_MODE_BITSIZE (rnmode);
5959 rnbitpos = first_bit & ~ (rnbitsize - 1);
5960 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5961 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5963 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5965 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5966 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5969 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5970 rntype, lr_mask),
5971 size_int (xlr_bitpos));
5972 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5973 rntype, rr_mask),
5974 size_int (xrr_bitpos));
5976 /* Make a mask that corresponds to both fields being compared.
5977 Do this for both items being compared. If the operands are the
5978 same size and the bits being compared are in the same position
5979 then we can do this by masking both and comparing the masked
5980 results. */
5981 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5982 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5983 if (lnbitsize == rnbitsize
5984 && xll_bitpos == xlr_bitpos
5985 && lnbitpos >= 0
5986 && rnbitpos >= 0)
5988 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5989 lntype, lnbitsize, lnbitpos,
5990 ll_unsignedp || rl_unsignedp, ll_reversep);
5991 if (! all_ones_mask_p (ll_mask, lnbitsize))
5992 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5994 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5995 rntype, rnbitsize, rnbitpos,
5996 lr_unsignedp || rr_unsignedp, lr_reversep);
5997 if (! all_ones_mask_p (lr_mask, rnbitsize))
5998 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6000 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6003 /* There is still another way we can do something: If both pairs of
6004 fields being compared are adjacent, we may be able to make a wider
6005 field containing them both.
6007 Note that we still must mask the lhs/rhs expressions. Furthermore,
6008 the mask must be shifted to account for the shift done by
6009 make_bit_field_ref. */
6010 if (((ll_bitsize + ll_bitpos == rl_bitpos
6011 && lr_bitsize + lr_bitpos == rr_bitpos)
6012 || (ll_bitpos == rl_bitpos + rl_bitsize
6013 && lr_bitpos == rr_bitpos + rr_bitsize))
6014 && ll_bitpos >= 0
6015 && rl_bitpos >= 0
6016 && lr_bitpos >= 0
6017 && rr_bitpos >= 0)
6019 tree type;
6021 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6022 ll_bitsize + rl_bitsize,
6023 MIN (ll_bitpos, rl_bitpos),
6024 ll_unsignedp, ll_reversep);
6025 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6026 lr_bitsize + rr_bitsize,
6027 MIN (lr_bitpos, rr_bitpos),
6028 lr_unsignedp, lr_reversep);
6030 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6031 size_int (MIN (xll_bitpos, xrl_bitpos)));
6032 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6033 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6035 /* Convert to the smaller type before masking out unwanted bits. */
6036 type = lntype;
6037 if (lntype != rntype)
6039 if (lnbitsize > rnbitsize)
6041 lhs = fold_convert_loc (loc, rntype, lhs);
6042 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6043 type = rntype;
6045 else if (lnbitsize < rnbitsize)
6047 rhs = fold_convert_loc (loc, lntype, rhs);
6048 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6049 type = lntype;
6053 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6054 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6056 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6057 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6059 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6062 return 0;
6065 /* Handle the case of comparisons with constants. If there is something in
6066 common between the masks, those bits of the constants must be the same.
6067 If not, the condition is always false. Test for this to avoid generating
6068 incorrect code below. */
6069 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6070 if (! integer_zerop (result)
6071 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6072 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6074 if (wanted_code == NE_EXPR)
6076 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6077 return constant_boolean_node (true, truth_type);
6079 else
6081 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6082 return constant_boolean_node (false, truth_type);
6086 if (lnbitpos < 0)
6087 return 0;
6089 /* Construct the expression we will return. First get the component
6090 reference we will make. Unless the mask is all ones the width of
6091 that field, perform the mask operation. Then compare with the
6092 merged constant. */
6093 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6094 lntype, lnbitsize, lnbitpos,
6095 ll_unsignedp || rl_unsignedp, ll_reversep);
6097 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6098 if (! all_ones_mask_p (ll_mask, lnbitsize))
6099 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6101 return build2_loc (loc, wanted_code, truth_type, result,
6102 const_binop (BIT_IOR_EXPR, l_const, r_const));
6105 /* T is an integer expression that is being multiplied, divided, or taken a
6106 modulus (CODE says which and what kind of divide or modulus) by a
6107 constant C. See if we can eliminate that operation by folding it with
6108 other operations already in T. WIDE_TYPE, if non-null, is a type that
6109 should be used for the computation if wider than our type.
6111 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6112 (X * 2) + (Y * 4). We must, however, be assured that either the original
6113 expression would not overflow or that overflow is undefined for the type
6114 in the language in question.
6116 If we return a non-null expression, it is an equivalent form of the
6117 original computation, but need not be in the original type.
6119 We set *STRICT_OVERFLOW_P to true if the return values depends on
6120 signed overflow being undefined. Otherwise we do not change
6121 *STRICT_OVERFLOW_P. */
6123 static tree
6124 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6125 bool *strict_overflow_p)
6127 /* To avoid exponential search depth, refuse to allow recursion past
6128 three levels. Beyond that (1) it's highly unlikely that we'll find
6129 something interesting and (2) we've probably processed it before
6130 when we built the inner expression. */
6132 static int depth;
6133 tree ret;
6135 if (depth > 3)
6136 return NULL;
6138 depth++;
6139 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6140 depth--;
6142 return ret;
6145 static tree
6146 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6147 bool *strict_overflow_p)
6149 tree type = TREE_TYPE (t);
6150 enum tree_code tcode = TREE_CODE (t);
6151 tree ctype = (wide_type != 0
6152 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6153 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6154 ? wide_type : type);
6155 tree t1, t2;
6156 int same_p = tcode == code;
6157 tree op0 = NULL_TREE, op1 = NULL_TREE;
6158 bool sub_strict_overflow_p;
6160 /* Don't deal with constants of zero here; they confuse the code below. */
6161 if (integer_zerop (c))
6162 return NULL_TREE;
6164 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6165 op0 = TREE_OPERAND (t, 0);
6167 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6168 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6170 /* Note that we need not handle conditional operations here since fold
6171 already handles those cases. So just do arithmetic here. */
6172 switch (tcode)
6174 case INTEGER_CST:
6175 /* For a constant, we can always simplify if we are a multiply
6176 or (for divide and modulus) if it is a multiple of our constant. */
6177 if (code == MULT_EXPR
6178 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6179 TYPE_SIGN (type)))
6181 tree tem = const_binop (code, fold_convert (ctype, t),
6182 fold_convert (ctype, c));
6183 /* If the multiplication overflowed, we lost information on it.
6184 See PR68142 and PR69845. */
6185 if (TREE_OVERFLOW (tem))
6186 return NULL_TREE;
6187 return tem;
6189 break;
6191 CASE_CONVERT: case NON_LVALUE_EXPR:
6192 /* If op0 is an expression ... */
6193 if ((COMPARISON_CLASS_P (op0)
6194 || UNARY_CLASS_P (op0)
6195 || BINARY_CLASS_P (op0)
6196 || VL_EXP_CLASS_P (op0)
6197 || EXPRESSION_CLASS_P (op0))
6198 /* ... and has wrapping overflow, and its type is smaller
6199 than ctype, then we cannot pass through as widening. */
6200 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6201 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6202 && (TYPE_PRECISION (ctype)
6203 > TYPE_PRECISION (TREE_TYPE (op0))))
6204 /* ... or this is a truncation (t is narrower than op0),
6205 then we cannot pass through this narrowing. */
6206 || (TYPE_PRECISION (type)
6207 < TYPE_PRECISION (TREE_TYPE (op0)))
6208 /* ... or signedness changes for division or modulus,
6209 then we cannot pass through this conversion. */
6210 || (code != MULT_EXPR
6211 && (TYPE_UNSIGNED (ctype)
6212 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6213 /* ... or has undefined overflow while the converted to
6214 type has not, we cannot do the operation in the inner type
6215 as that would introduce undefined overflow. */
6216 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6217 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6218 && !TYPE_OVERFLOW_UNDEFINED (type))))
6219 break;
6221 /* Pass the constant down and see if we can make a simplification. If
6222 we can, replace this expression with the inner simplification for
6223 possible later conversion to our or some other type. */
6224 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6225 && TREE_CODE (t2) == INTEGER_CST
6226 && !TREE_OVERFLOW (t2)
6227 && (t1 = extract_muldiv (op0, t2, code,
6228 code == MULT_EXPR ? ctype : NULL_TREE,
6229 strict_overflow_p)) != 0)
6230 return t1;
6231 break;
6233 case ABS_EXPR:
6234 /* If widening the type changes it from signed to unsigned, then we
6235 must avoid building ABS_EXPR itself as unsigned. */
6236 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6238 tree cstype = (*signed_type_for) (ctype);
6239 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6240 != 0)
6242 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6243 return fold_convert (ctype, t1);
6245 break;
6247 /* If the constant is negative, we cannot simplify this. */
6248 if (tree_int_cst_sgn (c) == -1)
6249 break;
6250 /* FALLTHROUGH */
6251 case NEGATE_EXPR:
6252 /* For division and modulus, type can't be unsigned, as e.g.
6253 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6254 For signed types, even with wrapping overflow, this is fine. */
6255 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6256 break;
6257 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6258 != 0)
6259 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6260 break;
6262 case MIN_EXPR: case MAX_EXPR:
6263 /* If widening the type changes the signedness, then we can't perform
6264 this optimization as that changes the result. */
6265 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6266 break;
6268 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6269 sub_strict_overflow_p = false;
6270 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6271 &sub_strict_overflow_p)) != 0
6272 && (t2 = extract_muldiv (op1, c, code, wide_type,
6273 &sub_strict_overflow_p)) != 0)
6275 if (tree_int_cst_sgn (c) < 0)
6276 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6277 if (sub_strict_overflow_p)
6278 *strict_overflow_p = true;
6279 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6280 fold_convert (ctype, t2));
6282 break;
6284 case LSHIFT_EXPR: case RSHIFT_EXPR:
6285 /* If the second operand is constant, this is a multiplication
6286 or floor division, by a power of two, so we can treat it that
6287 way unless the multiplier or divisor overflows. Signed
6288 left-shift overflow is implementation-defined rather than
6289 undefined in C90, so do not convert signed left shift into
6290 multiplication. */
6291 if (TREE_CODE (op1) == INTEGER_CST
6292 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6293 /* const_binop may not detect overflow correctly,
6294 so check for it explicitly here. */
6295 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6296 wi::to_wide (op1))
6297 && (t1 = fold_convert (ctype,
6298 const_binop (LSHIFT_EXPR, size_one_node,
6299 op1))) != 0
6300 && !TREE_OVERFLOW (t1))
6301 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6302 ? MULT_EXPR : FLOOR_DIV_EXPR,
6303 ctype,
6304 fold_convert (ctype, op0),
6305 t1),
6306 c, code, wide_type, strict_overflow_p);
6307 break;
6309 case PLUS_EXPR: case MINUS_EXPR:
6310 /* See if we can eliminate the operation on both sides. If we can, we
6311 can return a new PLUS or MINUS. If we can't, the only remaining
6312 cases where we can do anything are if the second operand is a
6313 constant. */
6314 sub_strict_overflow_p = false;
6315 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6316 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6317 if (t1 != 0 && t2 != 0
6318 && TYPE_OVERFLOW_WRAPS (ctype)
6319 && (code == MULT_EXPR
6320 /* If not multiplication, we can only do this if both operands
6321 are divisible by c. */
6322 || (multiple_of_p (ctype, op0, c)
6323 && multiple_of_p (ctype, op1, c))))
6325 if (sub_strict_overflow_p)
6326 *strict_overflow_p = true;
6327 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6328 fold_convert (ctype, t2));
6331 /* If this was a subtraction, negate OP1 and set it to be an addition.
6332 This simplifies the logic below. */
6333 if (tcode == MINUS_EXPR)
6335 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6336 /* If OP1 was not easily negatable, the constant may be OP0. */
6337 if (TREE_CODE (op0) == INTEGER_CST)
6339 std::swap (op0, op1);
6340 std::swap (t1, t2);
6344 if (TREE_CODE (op1) != INTEGER_CST)
6345 break;
6347 /* If either OP1 or C are negative, this optimization is not safe for
6348 some of the division and remainder types while for others we need
6349 to change the code. */
6350 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6352 if (code == CEIL_DIV_EXPR)
6353 code = FLOOR_DIV_EXPR;
6354 else if (code == FLOOR_DIV_EXPR)
6355 code = CEIL_DIV_EXPR;
6356 else if (code != MULT_EXPR
6357 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6358 break;
6361 /* If it's a multiply or a division/modulus operation of a multiple
6362 of our constant, do the operation and verify it doesn't overflow. */
6363 if (code == MULT_EXPR
6364 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6365 TYPE_SIGN (type)))
6367 op1 = const_binop (code, fold_convert (ctype, op1),
6368 fold_convert (ctype, c));
6369 /* We allow the constant to overflow with wrapping semantics. */
6370 if (op1 == 0
6371 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6372 break;
6374 else
6375 break;
6377 /* If we have an unsigned type, we cannot widen the operation since it
6378 will change the result if the original computation overflowed. */
6379 if (TYPE_UNSIGNED (ctype) && ctype != type)
6380 break;
6382 /* The last case is if we are a multiply. In that case, we can
6383 apply the distributive law to commute the multiply and addition
6384 if the multiplication of the constants doesn't overflow
6385 and overflow is defined. With undefined overflow
6386 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6387 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6388 return fold_build2 (tcode, ctype,
6389 fold_build2 (code, ctype,
6390 fold_convert (ctype, op0),
6391 fold_convert (ctype, c)),
6392 op1);
6394 break;
6396 case MULT_EXPR:
6397 /* We have a special case here if we are doing something like
6398 (C * 8) % 4 since we know that's zero. */
6399 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6400 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6401 /* If the multiplication can overflow we cannot optimize this. */
6402 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6403 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6404 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6405 TYPE_SIGN (type)))
6407 *strict_overflow_p = true;
6408 return omit_one_operand (type, integer_zero_node, op0);
6411 /* ... fall through ... */
6413 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6414 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6415 /* If we can extract our operation from the LHS, do so and return a
6416 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6417 do something only if the second operand is a constant. */
6418 if (same_p
6419 && TYPE_OVERFLOW_WRAPS (ctype)
6420 && (t1 = extract_muldiv (op0, c, code, wide_type,
6421 strict_overflow_p)) != 0)
6422 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6423 fold_convert (ctype, op1));
6424 else if (tcode == MULT_EXPR && code == MULT_EXPR
6425 && TYPE_OVERFLOW_WRAPS (ctype)
6426 && (t1 = extract_muldiv (op1, c, code, wide_type,
6427 strict_overflow_p)) != 0)
6428 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6429 fold_convert (ctype, t1));
6430 else if (TREE_CODE (op1) != INTEGER_CST)
6431 return 0;
6433 /* If these are the same operation types, we can associate them
6434 assuming no overflow. */
6435 if (tcode == code)
6437 bool overflow_p = false;
6438 bool overflow_mul_p;
6439 signop sign = TYPE_SIGN (ctype);
6440 unsigned prec = TYPE_PRECISION (ctype);
6441 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6442 wi::to_wide (c, prec),
6443 sign, &overflow_mul_p);
6444 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6445 if (overflow_mul_p
6446 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6447 overflow_p = true;
6448 if (!overflow_p)
6449 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6450 wide_int_to_tree (ctype, mul));
6453 /* If these operations "cancel" each other, we have the main
6454 optimizations of this pass, which occur when either constant is a
6455 multiple of the other, in which case we replace this with either an
6456 operation or CODE or TCODE.
6458 If we have an unsigned type, we cannot do this since it will change
6459 the result if the original computation overflowed. */
6460 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6461 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6462 || (tcode == MULT_EXPR
6463 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6464 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6465 && code != MULT_EXPR)))
6467 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6468 TYPE_SIGN (type)))
6470 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6471 *strict_overflow_p = true;
6472 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6473 fold_convert (ctype,
6474 const_binop (TRUNC_DIV_EXPR,
6475 op1, c)));
6477 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6478 TYPE_SIGN (type)))
6480 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6481 *strict_overflow_p = true;
6482 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6483 fold_convert (ctype,
6484 const_binop (TRUNC_DIV_EXPR,
6485 c, op1)));
6488 break;
6490 default:
6491 break;
6494 return 0;
6497 /* Return a node which has the indicated constant VALUE (either 0 or
6498 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6499 and is of the indicated TYPE. */
6501 tree
6502 constant_boolean_node (bool value, tree type)
6504 if (type == integer_type_node)
6505 return value ? integer_one_node : integer_zero_node;
6506 else if (type == boolean_type_node)
6507 return value ? boolean_true_node : boolean_false_node;
6508 else if (TREE_CODE (type) == VECTOR_TYPE)
6509 return build_vector_from_val (type,
6510 build_int_cst (TREE_TYPE (type),
6511 value ? -1 : 0));
6512 else
6513 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6517 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6518 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6519 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6520 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6521 COND is the first argument to CODE; otherwise (as in the example
6522 given here), it is the second argument. TYPE is the type of the
6523 original expression. Return NULL_TREE if no simplification is
6524 possible. */
6526 static tree
6527 fold_binary_op_with_conditional_arg (location_t loc,
6528 enum tree_code code,
6529 tree type, tree op0, tree op1,
6530 tree cond, tree arg, int cond_first_p)
6532 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6533 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6534 tree test, true_value, false_value;
6535 tree lhs = NULL_TREE;
6536 tree rhs = NULL_TREE;
6537 enum tree_code cond_code = COND_EXPR;
6539 if (TREE_CODE (cond) == COND_EXPR
6540 || TREE_CODE (cond) == VEC_COND_EXPR)
6542 test = TREE_OPERAND (cond, 0);
6543 true_value = TREE_OPERAND (cond, 1);
6544 false_value = TREE_OPERAND (cond, 2);
6545 /* If this operand throws an expression, then it does not make
6546 sense to try to perform a logical or arithmetic operation
6547 involving it. */
6548 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6549 lhs = true_value;
6550 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6551 rhs = false_value;
6553 else if (!(TREE_CODE (type) != VECTOR_TYPE
6554 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6556 tree testtype = TREE_TYPE (cond);
6557 test = cond;
6558 true_value = constant_boolean_node (true, testtype);
6559 false_value = constant_boolean_node (false, testtype);
6561 else
6562 /* Detect the case of mixing vector and scalar types - bail out. */
6563 return NULL_TREE;
6565 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6566 cond_code = VEC_COND_EXPR;
6568 /* This transformation is only worthwhile if we don't have to wrap ARG
6569 in a SAVE_EXPR and the operation can be simplified without recursing
6570 on at least one of the branches once its pushed inside the COND_EXPR. */
6571 if (!TREE_CONSTANT (arg)
6572 && (TREE_SIDE_EFFECTS (arg)
6573 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6574 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6575 return NULL_TREE;
6577 arg = fold_convert_loc (loc, arg_type, arg);
6578 if (lhs == 0)
6580 true_value = fold_convert_loc (loc, cond_type, true_value);
6581 if (cond_first_p)
6582 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6583 else
6584 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6586 if (rhs == 0)
6588 false_value = fold_convert_loc (loc, cond_type, false_value);
6589 if (cond_first_p)
6590 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6591 else
6592 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6595 /* Check that we have simplified at least one of the branches. */
6596 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6597 return NULL_TREE;
6599 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6603 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6605 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6606 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6607 ADDEND is the same as X.
6609 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6610 and finite. The problematic cases are when X is zero, and its mode
6611 has signed zeros. In the case of rounding towards -infinity,
6612 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6613 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6615 bool
6616 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6618 if (!real_zerop (addend))
6619 return false;
6621 /* Don't allow the fold with -fsignaling-nans. */
6622 if (HONOR_SNANS (element_mode (type)))
6623 return false;
6625 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6626 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6627 return true;
6629 /* In a vector or complex, we would need to check the sign of all zeros. */
6630 if (TREE_CODE (addend) != REAL_CST)
6631 return false;
6633 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6634 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6635 negate = !negate;
6637 /* The mode has signed zeros, and we have to honor their sign.
6638 In this situation, there is only one case we can return true for.
6639 X - 0 is the same as X unless rounding towards -infinity is
6640 supported. */
6641 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6644 /* Subroutine of match.pd that optimizes comparisons of a division by
6645 a nonzero integer constant against an integer constant, i.e.
6646 X/C1 op C2.
6648 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6649 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6651 enum tree_code
6652 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6653 tree *hi, bool *neg_overflow)
6655 tree prod, tmp, type = TREE_TYPE (c1);
6656 signop sign = TYPE_SIGN (type);
6657 bool overflow;
6659 /* We have to do this the hard way to detect unsigned overflow.
6660 prod = int_const_binop (MULT_EXPR, c1, c2); */
6661 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6662 prod = force_fit_type (type, val, -1, overflow);
6663 *neg_overflow = false;
6665 if (sign == UNSIGNED)
6667 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6668 *lo = prod;
6670 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6671 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6672 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6674 else if (tree_int_cst_sgn (c1) >= 0)
6676 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6677 switch (tree_int_cst_sgn (c2))
6679 case -1:
6680 *neg_overflow = true;
6681 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6682 *hi = prod;
6683 break;
6685 case 0:
6686 *lo = fold_negate_const (tmp, type);
6687 *hi = tmp;
6688 break;
6690 case 1:
6691 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6692 *lo = prod;
6693 break;
6695 default:
6696 gcc_unreachable ();
6699 else
6701 /* A negative divisor reverses the relational operators. */
6702 code = swap_tree_comparison (code);
6704 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6705 switch (tree_int_cst_sgn (c2))
6707 case -1:
6708 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6709 *lo = prod;
6710 break;
6712 case 0:
6713 *hi = fold_negate_const (tmp, type);
6714 *lo = tmp;
6715 break;
6717 case 1:
6718 *neg_overflow = true;
6719 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6720 *hi = prod;
6721 break;
6723 default:
6724 gcc_unreachable ();
6728 if (code != EQ_EXPR && code != NE_EXPR)
6729 return code;
6731 if (TREE_OVERFLOW (*lo)
6732 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6733 *lo = NULL_TREE;
6734 if (TREE_OVERFLOW (*hi)
6735 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6736 *hi = NULL_TREE;
6738 return code;
6742 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6743 equality/inequality test, then return a simplified form of the test
6744 using a sign testing. Otherwise return NULL. TYPE is the desired
6745 result type. */
6747 static tree
6748 fold_single_bit_test_into_sign_test (location_t loc,
6749 enum tree_code code, tree arg0, tree arg1,
6750 tree result_type)
6752 /* If this is testing a single bit, we can optimize the test. */
6753 if ((code == NE_EXPR || code == EQ_EXPR)
6754 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6755 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6757 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6758 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6759 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6761 if (arg00 != NULL_TREE
6762 /* This is only a win if casting to a signed type is cheap,
6763 i.e. when arg00's type is not a partial mode. */
6764 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6766 tree stype = signed_type_for (TREE_TYPE (arg00));
6767 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6768 result_type,
6769 fold_convert_loc (loc, stype, arg00),
6770 build_int_cst (stype, 0));
6774 return NULL_TREE;
6777 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6778 equality/inequality test, then return a simplified form of
6779 the test using shifts and logical operations. Otherwise return
6780 NULL. TYPE is the desired result type. */
6782 tree
6783 fold_single_bit_test (location_t loc, enum tree_code code,
6784 tree arg0, tree arg1, tree result_type)
6786 /* If this is testing a single bit, we can optimize the test. */
6787 if ((code == NE_EXPR || code == EQ_EXPR)
6788 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6789 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6791 tree inner = TREE_OPERAND (arg0, 0);
6792 tree type = TREE_TYPE (arg0);
6793 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6794 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6795 int ops_unsigned;
6796 tree signed_type, unsigned_type, intermediate_type;
6797 tree tem, one;
6799 /* First, see if we can fold the single bit test into a sign-bit
6800 test. */
6801 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6802 result_type);
6803 if (tem)
6804 return tem;
6806 /* Otherwise we have (A & C) != 0 where C is a single bit,
6807 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6808 Similarly for (A & C) == 0. */
6810 /* If INNER is a right shift of a constant and it plus BITNUM does
6811 not overflow, adjust BITNUM and INNER. */
6812 if (TREE_CODE (inner) == RSHIFT_EXPR
6813 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6814 && bitnum < TYPE_PRECISION (type)
6815 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6816 TYPE_PRECISION (type) - bitnum))
6818 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6819 inner = TREE_OPERAND (inner, 0);
6822 /* If we are going to be able to omit the AND below, we must do our
6823 operations as unsigned. If we must use the AND, we have a choice.
6824 Normally unsigned is faster, but for some machines signed is. */
6825 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6826 && !flag_syntax_only) ? 0 : 1;
6828 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6829 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6830 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6831 inner = fold_convert_loc (loc, intermediate_type, inner);
6833 if (bitnum != 0)
6834 inner = build2 (RSHIFT_EXPR, intermediate_type,
6835 inner, size_int (bitnum));
6837 one = build_int_cst (intermediate_type, 1);
6839 if (code == EQ_EXPR)
6840 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6842 /* Put the AND last so it can combine with more things. */
6843 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6845 /* Make sure to return the proper type. */
6846 inner = fold_convert_loc (loc, result_type, inner);
6848 return inner;
6850 return NULL_TREE;
6853 /* Test whether it is preferable two swap two operands, ARG0 and
6854 ARG1, for example because ARG0 is an integer constant and ARG1
6855 isn't. */
6857 bool
6858 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6860 if (CONSTANT_CLASS_P (arg1))
6861 return 0;
6862 if (CONSTANT_CLASS_P (arg0))
6863 return 1;
6865 STRIP_NOPS (arg0);
6866 STRIP_NOPS (arg1);
6868 if (TREE_CONSTANT (arg1))
6869 return 0;
6870 if (TREE_CONSTANT (arg0))
6871 return 1;
6873 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6874 for commutative and comparison operators. Ensuring a canonical
6875 form allows the optimizers to find additional redundancies without
6876 having to explicitly check for both orderings. */
6877 if (TREE_CODE (arg0) == SSA_NAME
6878 && TREE_CODE (arg1) == SSA_NAME
6879 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6880 return 1;
6882 /* Put SSA_NAMEs last. */
6883 if (TREE_CODE (arg1) == SSA_NAME)
6884 return 0;
6885 if (TREE_CODE (arg0) == SSA_NAME)
6886 return 1;
6888 /* Put variables last. */
6889 if (DECL_P (arg1))
6890 return 0;
6891 if (DECL_P (arg0))
6892 return 1;
6894 return 0;
6898 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6899 means A >= Y && A != MAX, but in this case we know that
6900 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6902 static tree
6903 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6905 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6907 if (TREE_CODE (bound) == LT_EXPR)
6908 a = TREE_OPERAND (bound, 0);
6909 else if (TREE_CODE (bound) == GT_EXPR)
6910 a = TREE_OPERAND (bound, 1);
6911 else
6912 return NULL_TREE;
6914 typea = TREE_TYPE (a);
6915 if (!INTEGRAL_TYPE_P (typea)
6916 && !POINTER_TYPE_P (typea))
6917 return NULL_TREE;
6919 if (TREE_CODE (ineq) == LT_EXPR)
6921 a1 = TREE_OPERAND (ineq, 1);
6922 y = TREE_OPERAND (ineq, 0);
6924 else if (TREE_CODE (ineq) == GT_EXPR)
6926 a1 = TREE_OPERAND (ineq, 0);
6927 y = TREE_OPERAND (ineq, 1);
6929 else
6930 return NULL_TREE;
6932 if (TREE_TYPE (a1) != typea)
6933 return NULL_TREE;
6935 if (POINTER_TYPE_P (typea))
6937 /* Convert the pointer types into integer before taking the difference. */
6938 tree ta = fold_convert_loc (loc, ssizetype, a);
6939 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6940 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6942 else
6943 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6945 if (!diff || !integer_onep (diff))
6946 return NULL_TREE;
6948 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6951 /* Fold a sum or difference of at least one multiplication.
6952 Returns the folded tree or NULL if no simplification could be made. */
6954 static tree
6955 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6956 tree arg0, tree arg1)
6958 tree arg00, arg01, arg10, arg11;
6959 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6961 /* (A * C) +- (B * C) -> (A+-B) * C.
6962 (A * C) +- A -> A * (C+-1).
6963 We are most concerned about the case where C is a constant,
6964 but other combinations show up during loop reduction. Since
6965 it is not difficult, try all four possibilities. */
6967 if (TREE_CODE (arg0) == MULT_EXPR)
6969 arg00 = TREE_OPERAND (arg0, 0);
6970 arg01 = TREE_OPERAND (arg0, 1);
6972 else if (TREE_CODE (arg0) == INTEGER_CST)
6974 arg00 = build_one_cst (type);
6975 arg01 = arg0;
6977 else
6979 /* We cannot generate constant 1 for fract. */
6980 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6981 return NULL_TREE;
6982 arg00 = arg0;
6983 arg01 = build_one_cst (type);
6985 if (TREE_CODE (arg1) == MULT_EXPR)
6987 arg10 = TREE_OPERAND (arg1, 0);
6988 arg11 = TREE_OPERAND (arg1, 1);
6990 else if (TREE_CODE (arg1) == INTEGER_CST)
6992 arg10 = build_one_cst (type);
6993 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6994 the purpose of this canonicalization. */
6995 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
6996 && negate_expr_p (arg1)
6997 && code == PLUS_EXPR)
6999 arg11 = negate_expr (arg1);
7000 code = MINUS_EXPR;
7002 else
7003 arg11 = arg1;
7005 else
7007 /* We cannot generate constant 1 for fract. */
7008 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7009 return NULL_TREE;
7010 arg10 = arg1;
7011 arg11 = build_one_cst (type);
7013 same = NULL_TREE;
7015 /* Prefer factoring a common non-constant. */
7016 if (operand_equal_p (arg00, arg10, 0))
7017 same = arg00, alt0 = arg01, alt1 = arg11;
7018 else if (operand_equal_p (arg01, arg11, 0))
7019 same = arg01, alt0 = arg00, alt1 = arg10;
7020 else if (operand_equal_p (arg00, arg11, 0))
7021 same = arg00, alt0 = arg01, alt1 = arg10;
7022 else if (operand_equal_p (arg01, arg10, 0))
7023 same = arg01, alt0 = arg00, alt1 = arg11;
7025 /* No identical multiplicands; see if we can find a common
7026 power-of-two factor in non-power-of-two multiplies. This
7027 can help in multi-dimensional array access. */
7028 else if (tree_fits_shwi_p (arg01)
7029 && tree_fits_shwi_p (arg11))
7031 HOST_WIDE_INT int01, int11, tmp;
7032 bool swap = false;
7033 tree maybe_same;
7034 int01 = tree_to_shwi (arg01);
7035 int11 = tree_to_shwi (arg11);
7037 /* Move min of absolute values to int11. */
7038 if (absu_hwi (int01) < absu_hwi (int11))
7040 tmp = int01, int01 = int11, int11 = tmp;
7041 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7042 maybe_same = arg01;
7043 swap = true;
7045 else
7046 maybe_same = arg11;
7048 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7049 /* The remainder should not be a constant, otherwise we
7050 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7051 increased the number of multiplications necessary. */
7052 && TREE_CODE (arg10) != INTEGER_CST)
7054 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7055 build_int_cst (TREE_TYPE (arg00),
7056 int01 / int11));
7057 alt1 = arg10;
7058 same = maybe_same;
7059 if (swap)
7060 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7064 if (!same)
7065 return NULL_TREE;
7067 if (! INTEGRAL_TYPE_P (type)
7068 || TYPE_OVERFLOW_WRAPS (type)
7069 /* We are neither factoring zero nor minus one. */
7070 || TREE_CODE (same) == INTEGER_CST)
7071 return fold_build2_loc (loc, MULT_EXPR, type,
7072 fold_build2_loc (loc, code, type,
7073 fold_convert_loc (loc, type, alt0),
7074 fold_convert_loc (loc, type, alt1)),
7075 fold_convert_loc (loc, type, same));
7077 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7078 same may be minus one and thus the multiplication may overflow. Perform
7079 the operations in an unsigned type. */
7080 tree utype = unsigned_type_for (type);
7081 tree tem = fold_build2_loc (loc, code, utype,
7082 fold_convert_loc (loc, utype, alt0),
7083 fold_convert_loc (loc, utype, alt1));
7084 /* If the sum evaluated to a constant that is not -INF the multiplication
7085 cannot overflow. */
7086 if (TREE_CODE (tem) == INTEGER_CST
7087 && (wi::to_wide (tem)
7088 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7089 return fold_build2_loc (loc, MULT_EXPR, type,
7090 fold_convert (type, tem), same);
7092 return fold_convert_loc (loc, type,
7093 fold_build2_loc (loc, MULT_EXPR, utype, tem,
7094 fold_convert_loc (loc, utype, same)));
7097 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7098 specified by EXPR into the buffer PTR of length LEN bytes.
7099 Return the number of bytes placed in the buffer, or zero
7100 upon failure. */
7102 static int
7103 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7105 tree type = TREE_TYPE (expr);
7106 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7107 int byte, offset, word, words;
7108 unsigned char value;
7110 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7111 return 0;
7112 if (off == -1)
7113 off = 0;
7115 if (ptr == NULL)
7116 /* Dry run. */
7117 return MIN (len, total_bytes - off);
7119 words = total_bytes / UNITS_PER_WORD;
7121 for (byte = 0; byte < total_bytes; byte++)
7123 int bitpos = byte * BITS_PER_UNIT;
7124 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7125 number of bytes. */
7126 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7128 if (total_bytes > UNITS_PER_WORD)
7130 word = byte / UNITS_PER_WORD;
7131 if (WORDS_BIG_ENDIAN)
7132 word = (words - 1) - word;
7133 offset = word * UNITS_PER_WORD;
7134 if (BYTES_BIG_ENDIAN)
7135 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7136 else
7137 offset += byte % UNITS_PER_WORD;
7139 else
7140 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7141 if (offset >= off && offset - off < len)
7142 ptr[offset - off] = value;
7144 return MIN (len, total_bytes - off);
7148 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7151 upon failure. */
7153 static int
7154 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7156 tree type = TREE_TYPE (expr);
7157 scalar_mode mode = SCALAR_TYPE_MODE (type);
7158 int total_bytes = GET_MODE_SIZE (mode);
7159 FIXED_VALUE_TYPE value;
7160 tree i_value, i_type;
7162 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7163 return 0;
7165 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7167 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7168 return 0;
7170 value = TREE_FIXED_CST (expr);
7171 i_value = double_int_to_tree (i_type, value.data);
7173 return native_encode_int (i_value, ptr, len, off);
7177 /* Subroutine of native_encode_expr. Encode the REAL_CST
7178 specified by EXPR into the buffer PTR of length LEN bytes.
7179 Return the number of bytes placed in the buffer, or zero
7180 upon failure. */
7182 static int
7183 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7185 tree type = TREE_TYPE (expr);
7186 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7187 int byte, offset, word, words, bitpos;
7188 unsigned char value;
7190 /* There are always 32 bits in each long, no matter the size of
7191 the hosts long. We handle floating point representations with
7192 up to 192 bits. */
7193 long tmp[6];
7195 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7196 return 0;
7197 if (off == -1)
7198 off = 0;
7200 if (ptr == NULL)
7201 /* Dry run. */
7202 return MIN (len, total_bytes - off);
7204 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7206 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7208 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7209 bitpos += BITS_PER_UNIT)
7211 byte = (bitpos / BITS_PER_UNIT) & 3;
7212 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7214 if (UNITS_PER_WORD < 4)
7216 word = byte / UNITS_PER_WORD;
7217 if (WORDS_BIG_ENDIAN)
7218 word = (words - 1) - word;
7219 offset = word * UNITS_PER_WORD;
7220 if (BYTES_BIG_ENDIAN)
7221 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7222 else
7223 offset += byte % UNITS_PER_WORD;
7225 else
7227 offset = byte;
7228 if (BYTES_BIG_ENDIAN)
7230 /* Reverse bytes within each long, or within the entire float
7231 if it's smaller than a long (for HFmode). */
7232 offset = MIN (3, total_bytes - 1) - offset;
7233 gcc_assert (offset >= 0);
7236 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7237 if (offset >= off
7238 && offset - off < len)
7239 ptr[offset - off] = value;
7241 return MIN (len, total_bytes - off);
7244 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7245 specified by EXPR into the buffer PTR of length LEN bytes.
7246 Return the number of bytes placed in the buffer, or zero
7247 upon failure. */
7249 static int
7250 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7252 int rsize, isize;
7253 tree part;
7255 part = TREE_REALPART (expr);
7256 rsize = native_encode_expr (part, ptr, len, off);
7257 if (off == -1 && rsize == 0)
7258 return 0;
7259 part = TREE_IMAGPART (expr);
7260 if (off != -1)
7261 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7262 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7263 len - rsize, off);
7264 if (off == -1 && isize != rsize)
7265 return 0;
7266 return rsize + isize;
7270 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7271 specified by EXPR into the buffer PTR of length LEN bytes.
7272 Return the number of bytes placed in the buffer, or zero
7273 upon failure. */
7275 static int
7276 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7278 unsigned i, count;
7279 int size, offset;
7280 tree itype, elem;
7282 offset = 0;
7283 count = VECTOR_CST_NELTS (expr);
7284 itype = TREE_TYPE (TREE_TYPE (expr));
7285 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7286 for (i = 0; i < count; i++)
7288 if (off >= size)
7290 off -= size;
7291 continue;
7293 elem = VECTOR_CST_ELT (expr, i);
7294 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7295 len - offset, off);
7296 if ((off == -1 && res != size) || res == 0)
7297 return 0;
7298 offset += res;
7299 if (offset >= len)
7300 return offset;
7301 if (off != -1)
7302 off = 0;
7304 return offset;
7308 /* Subroutine of native_encode_expr. Encode the STRING_CST
7309 specified by EXPR into the buffer PTR of length LEN bytes.
7310 Return the number of bytes placed in the buffer, or zero
7311 upon failure. */
7313 static int
7314 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7316 tree type = TREE_TYPE (expr);
7318 /* Wide-char strings are encoded in target byte-order so native
7319 encoding them is trivial. */
7320 if (BITS_PER_UNIT != CHAR_BIT
7321 || TREE_CODE (type) != ARRAY_TYPE
7322 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7323 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7324 return 0;
7326 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7327 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7328 return 0;
7329 if (off == -1)
7330 off = 0;
7331 if (ptr == NULL)
7332 /* Dry run. */;
7333 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7335 int written = 0;
7336 if (off < TREE_STRING_LENGTH (expr))
7338 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7339 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7341 memset (ptr + written, 0,
7342 MIN (total_bytes - written, len - written));
7344 else
7345 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7346 return MIN (total_bytes - off, len);
7350 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7351 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7352 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7353 anything, just do a dry run. If OFF is not -1 then start
7354 the encoding at byte offset OFF and encode at most LEN bytes.
7355 Return the number of bytes placed in the buffer, or zero upon failure. */
7358 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7360 /* We don't support starting at negative offset and -1 is special. */
7361 if (off < -1)
7362 return 0;
7364 switch (TREE_CODE (expr))
7366 case INTEGER_CST:
7367 return native_encode_int (expr, ptr, len, off);
7369 case REAL_CST:
7370 return native_encode_real (expr, ptr, len, off);
7372 case FIXED_CST:
7373 return native_encode_fixed (expr, ptr, len, off);
7375 case COMPLEX_CST:
7376 return native_encode_complex (expr, ptr, len, off);
7378 case VECTOR_CST:
7379 return native_encode_vector (expr, ptr, len, off);
7381 case STRING_CST:
7382 return native_encode_string (expr, ptr, len, off);
7384 default:
7385 return 0;
7390 /* Subroutine of native_interpret_expr. Interpret the contents of
7391 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7392 If the buffer cannot be interpreted, return NULL_TREE. */
7394 static tree
7395 native_interpret_int (tree type, const unsigned char *ptr, int len)
7397 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7399 if (total_bytes > len
7400 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7401 return NULL_TREE;
7403 wide_int result = wi::from_buffer (ptr, total_bytes);
7405 return wide_int_to_tree (type, result);
7409 /* Subroutine of native_interpret_expr. Interpret the contents of
7410 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7411 If the buffer cannot be interpreted, return NULL_TREE. */
7413 static tree
7414 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7416 scalar_mode mode = SCALAR_TYPE_MODE (type);
7417 int total_bytes = GET_MODE_SIZE (mode);
7418 double_int result;
7419 FIXED_VALUE_TYPE fixed_value;
7421 if (total_bytes > len
7422 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7423 return NULL_TREE;
7425 result = double_int::from_buffer (ptr, total_bytes);
7426 fixed_value = fixed_from_double_int (result, mode);
7428 return build_fixed (type, fixed_value);
7432 /* Subroutine of native_interpret_expr. Interpret the contents of
7433 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7434 If the buffer cannot be interpreted, return NULL_TREE. */
7436 static tree
7437 native_interpret_real (tree type, const unsigned char *ptr, int len)
7439 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7440 int total_bytes = GET_MODE_SIZE (mode);
7441 unsigned char value;
7442 /* There are always 32 bits in each long, no matter the size of
7443 the hosts long. We handle floating point representations with
7444 up to 192 bits. */
7445 REAL_VALUE_TYPE r;
7446 long tmp[6];
7448 if (total_bytes > len || total_bytes > 24)
7449 return NULL_TREE;
7450 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7452 memset (tmp, 0, sizeof (tmp));
7453 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7454 bitpos += BITS_PER_UNIT)
7456 /* Both OFFSET and BYTE index within a long;
7457 bitpos indexes the whole float. */
7458 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7459 if (UNITS_PER_WORD < 4)
7461 int word = byte / UNITS_PER_WORD;
7462 if (WORDS_BIG_ENDIAN)
7463 word = (words - 1) - word;
7464 offset = word * UNITS_PER_WORD;
7465 if (BYTES_BIG_ENDIAN)
7466 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7467 else
7468 offset += byte % UNITS_PER_WORD;
7470 else
7472 offset = byte;
7473 if (BYTES_BIG_ENDIAN)
7475 /* Reverse bytes within each long, or within the entire float
7476 if it's smaller than a long (for HFmode). */
7477 offset = MIN (3, total_bytes - 1) - offset;
7478 gcc_assert (offset >= 0);
7481 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7483 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7486 real_from_target (&r, tmp, mode);
7487 return build_real (type, r);
7491 /* Subroutine of native_interpret_expr. Interpret the contents of
7492 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7493 If the buffer cannot be interpreted, return NULL_TREE. */
7495 static tree
7496 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7498 tree etype, rpart, ipart;
7499 int size;
7501 etype = TREE_TYPE (type);
7502 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7503 if (size * 2 > len)
7504 return NULL_TREE;
7505 rpart = native_interpret_expr (etype, ptr, size);
7506 if (!rpart)
7507 return NULL_TREE;
7508 ipart = native_interpret_expr (etype, ptr+size, size);
7509 if (!ipart)
7510 return NULL_TREE;
7511 return build_complex (type, rpart, ipart);
7515 /* Subroutine of native_interpret_expr. Interpret the contents of
7516 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7517 If the buffer cannot be interpreted, return NULL_TREE. */
7519 static tree
7520 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7522 tree etype, elem;
7523 int i, size, count;
7525 etype = TREE_TYPE (type);
7526 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7527 count = TYPE_VECTOR_SUBPARTS (type);
7528 if (size * count > len)
7529 return NULL_TREE;
7531 tree_vector_builder elements (type, count, 1);
7532 for (i = 0; i < count; ++i)
7534 elem = native_interpret_expr (etype, ptr+(i*size), size);
7535 if (!elem)
7536 return NULL_TREE;
7537 elements.quick_push (elem);
7539 return elements.build ();
7543 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7544 the buffer PTR of length LEN as a constant of type TYPE. For
7545 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7546 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7547 return NULL_TREE. */
7549 tree
7550 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7552 switch (TREE_CODE (type))
7554 case INTEGER_TYPE:
7555 case ENUMERAL_TYPE:
7556 case BOOLEAN_TYPE:
7557 case POINTER_TYPE:
7558 case REFERENCE_TYPE:
7559 return native_interpret_int (type, ptr, len);
7561 case REAL_TYPE:
7562 return native_interpret_real (type, ptr, len);
7564 case FIXED_POINT_TYPE:
7565 return native_interpret_fixed (type, ptr, len);
7567 case COMPLEX_TYPE:
7568 return native_interpret_complex (type, ptr, len);
7570 case VECTOR_TYPE:
7571 return native_interpret_vector (type, ptr, len);
7573 default:
7574 return NULL_TREE;
7578 /* Returns true if we can interpret the contents of a native encoding
7579 as TYPE. */
7581 static bool
7582 can_native_interpret_type_p (tree type)
7584 switch (TREE_CODE (type))
7586 case INTEGER_TYPE:
7587 case ENUMERAL_TYPE:
7588 case BOOLEAN_TYPE:
7589 case POINTER_TYPE:
7590 case REFERENCE_TYPE:
7591 case FIXED_POINT_TYPE:
7592 case REAL_TYPE:
7593 case COMPLEX_TYPE:
7594 case VECTOR_TYPE:
7595 return true;
7596 default:
7597 return false;
7602 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7603 TYPE at compile-time. If we're unable to perform the conversion
7604 return NULL_TREE. */
7606 static tree
7607 fold_view_convert_expr (tree type, tree expr)
7609 /* We support up to 512-bit values (for V8DFmode). */
7610 unsigned char buffer[64];
7611 int len;
7613 /* Check that the host and target are sane. */
7614 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7615 return NULL_TREE;
7617 len = native_encode_expr (expr, buffer, sizeof (buffer));
7618 if (len == 0)
7619 return NULL_TREE;
7621 return native_interpret_expr (type, buffer, len);
7624 /* Build an expression for the address of T. Folds away INDIRECT_REF
7625 to avoid confusing the gimplify process. */
7627 tree
7628 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7630 /* The size of the object is not relevant when talking about its address. */
7631 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7632 t = TREE_OPERAND (t, 0);
7634 if (TREE_CODE (t) == INDIRECT_REF)
7636 t = TREE_OPERAND (t, 0);
7638 if (TREE_TYPE (t) != ptrtype)
7639 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7641 else if (TREE_CODE (t) == MEM_REF
7642 && integer_zerop (TREE_OPERAND (t, 1)))
7643 return TREE_OPERAND (t, 0);
7644 else if (TREE_CODE (t) == MEM_REF
7645 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7646 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7647 TREE_OPERAND (t, 0),
7648 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7649 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7651 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7653 if (TREE_TYPE (t) != ptrtype)
7654 t = fold_convert_loc (loc, ptrtype, t);
7656 else
7657 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7659 return t;
7662 /* Build an expression for the address of T. */
7664 tree
7665 build_fold_addr_expr_loc (location_t loc, tree t)
7667 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7669 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7672 /* Fold a unary expression of code CODE and type TYPE with operand
7673 OP0. Return the folded expression if folding is successful.
7674 Otherwise, return NULL_TREE. */
7676 tree
7677 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7679 tree tem;
7680 tree arg0;
7681 enum tree_code_class kind = TREE_CODE_CLASS (code);
7683 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7684 && TREE_CODE_LENGTH (code) == 1);
7686 arg0 = op0;
7687 if (arg0)
7689 if (CONVERT_EXPR_CODE_P (code)
7690 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7692 /* Don't use STRIP_NOPS, because signedness of argument type
7693 matters. */
7694 STRIP_SIGN_NOPS (arg0);
7696 else
7698 /* Strip any conversions that don't change the mode. This
7699 is safe for every expression, except for a comparison
7700 expression because its signedness is derived from its
7701 operands.
7703 Note that this is done as an internal manipulation within
7704 the constant folder, in order to find the simplest
7705 representation of the arguments so that their form can be
7706 studied. In any cases, the appropriate type conversions
7707 should be put back in the tree that will get out of the
7708 constant folder. */
7709 STRIP_NOPS (arg0);
7712 if (CONSTANT_CLASS_P (arg0))
7714 tree tem = const_unop (code, type, arg0);
7715 if (tem)
7717 if (TREE_TYPE (tem) != type)
7718 tem = fold_convert_loc (loc, type, tem);
7719 return tem;
7724 tem = generic_simplify (loc, code, type, op0);
7725 if (tem)
7726 return tem;
7728 if (TREE_CODE_CLASS (code) == tcc_unary)
7730 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7731 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7732 fold_build1_loc (loc, code, type,
7733 fold_convert_loc (loc, TREE_TYPE (op0),
7734 TREE_OPERAND (arg0, 1))));
7735 else if (TREE_CODE (arg0) == COND_EXPR)
7737 tree arg01 = TREE_OPERAND (arg0, 1);
7738 tree arg02 = TREE_OPERAND (arg0, 2);
7739 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7740 arg01 = fold_build1_loc (loc, code, type,
7741 fold_convert_loc (loc,
7742 TREE_TYPE (op0), arg01));
7743 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7744 arg02 = fold_build1_loc (loc, code, type,
7745 fold_convert_loc (loc,
7746 TREE_TYPE (op0), arg02));
7747 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7748 arg01, arg02);
7750 /* If this was a conversion, and all we did was to move into
7751 inside the COND_EXPR, bring it back out. But leave it if
7752 it is a conversion from integer to integer and the
7753 result precision is no wider than a word since such a
7754 conversion is cheap and may be optimized away by combine,
7755 while it couldn't if it were outside the COND_EXPR. Then return
7756 so we don't get into an infinite recursion loop taking the
7757 conversion out and then back in. */
7759 if ((CONVERT_EXPR_CODE_P (code)
7760 || code == NON_LVALUE_EXPR)
7761 && TREE_CODE (tem) == COND_EXPR
7762 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7763 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7764 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7765 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7766 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7767 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7768 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7769 && (INTEGRAL_TYPE_P
7770 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7771 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7772 || flag_syntax_only))
7773 tem = build1_loc (loc, code, type,
7774 build3 (COND_EXPR,
7775 TREE_TYPE (TREE_OPERAND
7776 (TREE_OPERAND (tem, 1), 0)),
7777 TREE_OPERAND (tem, 0),
7778 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7779 TREE_OPERAND (TREE_OPERAND (tem, 2),
7780 0)));
7781 return tem;
7785 switch (code)
7787 case NON_LVALUE_EXPR:
7788 if (!maybe_lvalue_p (op0))
7789 return fold_convert_loc (loc, type, op0);
7790 return NULL_TREE;
7792 CASE_CONVERT:
7793 case FLOAT_EXPR:
7794 case FIX_TRUNC_EXPR:
7795 if (COMPARISON_CLASS_P (op0))
7797 /* If we have (type) (a CMP b) and type is an integral type, return
7798 new expression involving the new type. Canonicalize
7799 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7800 non-integral type.
7801 Do not fold the result as that would not simplify further, also
7802 folding again results in recursions. */
7803 if (TREE_CODE (type) == BOOLEAN_TYPE)
7804 return build2_loc (loc, TREE_CODE (op0), type,
7805 TREE_OPERAND (op0, 0),
7806 TREE_OPERAND (op0, 1));
7807 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7808 && TREE_CODE (type) != VECTOR_TYPE)
7809 return build3_loc (loc, COND_EXPR, type, op0,
7810 constant_boolean_node (true, type),
7811 constant_boolean_node (false, type));
7814 /* Handle (T *)&A.B.C for A being of type T and B and C
7815 living at offset zero. This occurs frequently in
7816 C++ upcasting and then accessing the base. */
7817 if (TREE_CODE (op0) == ADDR_EXPR
7818 && POINTER_TYPE_P (type)
7819 && handled_component_p (TREE_OPERAND (op0, 0)))
7821 HOST_WIDE_INT bitsize, bitpos;
7822 tree offset;
7823 machine_mode mode;
7824 int unsignedp, reversep, volatilep;
7825 tree base
7826 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7827 &offset, &mode, &unsignedp, &reversep,
7828 &volatilep);
7829 /* If the reference was to a (constant) zero offset, we can use
7830 the address of the base if it has the same base type
7831 as the result type and the pointer type is unqualified. */
7832 if (! offset && bitpos == 0
7833 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7834 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7835 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7836 return fold_convert_loc (loc, type,
7837 build_fold_addr_expr_loc (loc, base));
7840 if (TREE_CODE (op0) == MODIFY_EXPR
7841 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7842 /* Detect assigning a bitfield. */
7843 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7844 && DECL_BIT_FIELD
7845 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7847 /* Don't leave an assignment inside a conversion
7848 unless assigning a bitfield. */
7849 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7850 /* First do the assignment, then return converted constant. */
7851 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7852 TREE_NO_WARNING (tem) = 1;
7853 TREE_USED (tem) = 1;
7854 return tem;
7857 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7858 constants (if x has signed type, the sign bit cannot be set
7859 in c). This folds extension into the BIT_AND_EXPR.
7860 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7861 very likely don't have maximal range for their precision and this
7862 transformation effectively doesn't preserve non-maximal ranges. */
7863 if (TREE_CODE (type) == INTEGER_TYPE
7864 && TREE_CODE (op0) == BIT_AND_EXPR
7865 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7867 tree and_expr = op0;
7868 tree and0 = TREE_OPERAND (and_expr, 0);
7869 tree and1 = TREE_OPERAND (and_expr, 1);
7870 int change = 0;
7872 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7873 || (TYPE_PRECISION (type)
7874 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7875 change = 1;
7876 else if (TYPE_PRECISION (TREE_TYPE (and1))
7877 <= HOST_BITS_PER_WIDE_INT
7878 && tree_fits_uhwi_p (and1))
7880 unsigned HOST_WIDE_INT cst;
7882 cst = tree_to_uhwi (and1);
7883 cst &= HOST_WIDE_INT_M1U
7884 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7885 change = (cst == 0);
7886 if (change
7887 && !flag_syntax_only
7888 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7889 == ZERO_EXTEND))
7891 tree uns = unsigned_type_for (TREE_TYPE (and0));
7892 and0 = fold_convert_loc (loc, uns, and0);
7893 and1 = fold_convert_loc (loc, uns, and1);
7896 if (change)
7898 tem = force_fit_type (type, wi::to_widest (and1), 0,
7899 TREE_OVERFLOW (and1));
7900 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7901 fold_convert_loc (loc, type, and0), tem);
7905 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7906 cast (T1)X will fold away. We assume that this happens when X itself
7907 is a cast. */
7908 if (POINTER_TYPE_P (type)
7909 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7910 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7912 tree arg00 = TREE_OPERAND (arg0, 0);
7913 tree arg01 = TREE_OPERAND (arg0, 1);
7915 return fold_build_pointer_plus_loc
7916 (loc, fold_convert_loc (loc, type, arg00), arg01);
7919 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7920 of the same precision, and X is an integer type not narrower than
7921 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7922 if (INTEGRAL_TYPE_P (type)
7923 && TREE_CODE (op0) == BIT_NOT_EXPR
7924 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7925 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7926 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7928 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7929 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7930 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7931 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7932 fold_convert_loc (loc, type, tem));
7935 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7936 type of X and Y (integer types only). */
7937 if (INTEGRAL_TYPE_P (type)
7938 && TREE_CODE (op0) == MULT_EXPR
7939 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7940 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7942 /* Be careful not to introduce new overflows. */
7943 tree mult_type;
7944 if (TYPE_OVERFLOW_WRAPS (type))
7945 mult_type = type;
7946 else
7947 mult_type = unsigned_type_for (type);
7949 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7951 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7952 fold_convert_loc (loc, mult_type,
7953 TREE_OPERAND (op0, 0)),
7954 fold_convert_loc (loc, mult_type,
7955 TREE_OPERAND (op0, 1)));
7956 return fold_convert_loc (loc, type, tem);
7960 return NULL_TREE;
7962 case VIEW_CONVERT_EXPR:
7963 if (TREE_CODE (op0) == MEM_REF)
7965 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7966 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7967 tem = fold_build2_loc (loc, MEM_REF, type,
7968 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7969 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7970 return tem;
7973 return NULL_TREE;
7975 case NEGATE_EXPR:
7976 tem = fold_negate_expr (loc, arg0);
7977 if (tem)
7978 return fold_convert_loc (loc, type, tem);
7979 return NULL_TREE;
7981 case ABS_EXPR:
7982 /* Convert fabs((double)float) into (double)fabsf(float). */
7983 if (TREE_CODE (arg0) == NOP_EXPR
7984 && TREE_CODE (type) == REAL_TYPE)
7986 tree targ0 = strip_float_extensions (arg0);
7987 if (targ0 != arg0)
7988 return fold_convert_loc (loc, type,
7989 fold_build1_loc (loc, ABS_EXPR,
7990 TREE_TYPE (targ0),
7991 targ0));
7993 return NULL_TREE;
7995 case BIT_NOT_EXPR:
7996 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7997 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7998 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7999 fold_convert_loc (loc, type,
8000 TREE_OPERAND (arg0, 0)))))
8001 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8002 fold_convert_loc (loc, type,
8003 TREE_OPERAND (arg0, 1)));
8004 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8005 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8006 fold_convert_loc (loc, type,
8007 TREE_OPERAND (arg0, 1)))))
8008 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8009 fold_convert_loc (loc, type,
8010 TREE_OPERAND (arg0, 0)), tem);
8012 return NULL_TREE;
8014 case TRUTH_NOT_EXPR:
8015 /* Note that the operand of this must be an int
8016 and its values must be 0 or 1.
8017 ("true" is a fixed value perhaps depending on the language,
8018 but we don't handle values other than 1 correctly yet.) */
8019 tem = fold_truth_not_expr (loc, arg0);
8020 if (!tem)
8021 return NULL_TREE;
8022 return fold_convert_loc (loc, type, tem);
8024 case INDIRECT_REF:
8025 /* Fold *&X to X if X is an lvalue. */
8026 if (TREE_CODE (op0) == ADDR_EXPR)
8028 tree op00 = TREE_OPERAND (op0, 0);
8029 if ((VAR_P (op00)
8030 || TREE_CODE (op00) == PARM_DECL
8031 || TREE_CODE (op00) == RESULT_DECL)
8032 && !TREE_READONLY (op00))
8033 return op00;
8035 return NULL_TREE;
8037 default:
8038 return NULL_TREE;
8039 } /* switch (code) */
8043 /* If the operation was a conversion do _not_ mark a resulting constant
8044 with TREE_OVERFLOW if the original constant was not. These conversions
8045 have implementation defined behavior and retaining the TREE_OVERFLOW
8046 flag here would confuse later passes such as VRP. */
8047 tree
8048 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8049 tree type, tree op0)
8051 tree res = fold_unary_loc (loc, code, type, op0);
8052 if (res
8053 && TREE_CODE (res) == INTEGER_CST
8054 && TREE_CODE (op0) == INTEGER_CST
8055 && CONVERT_EXPR_CODE_P (code))
8056 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8058 return res;
8061 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8062 operands OP0 and OP1. LOC is the location of the resulting expression.
8063 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8064 Return the folded expression if folding is successful. Otherwise,
8065 return NULL_TREE. */
8066 static tree
8067 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8068 tree arg0, tree arg1, tree op0, tree op1)
8070 tree tem;
8072 /* We only do these simplifications if we are optimizing. */
8073 if (!optimize)
8074 return NULL_TREE;
8076 /* Check for things like (A || B) && (A || C). We can convert this
8077 to A || (B && C). Note that either operator can be any of the four
8078 truth and/or operations and the transformation will still be
8079 valid. Also note that we only care about order for the
8080 ANDIF and ORIF operators. If B contains side effects, this
8081 might change the truth-value of A. */
8082 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8083 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8084 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8085 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8086 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8087 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8089 tree a00 = TREE_OPERAND (arg0, 0);
8090 tree a01 = TREE_OPERAND (arg0, 1);
8091 tree a10 = TREE_OPERAND (arg1, 0);
8092 tree a11 = TREE_OPERAND (arg1, 1);
8093 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8094 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8095 && (code == TRUTH_AND_EXPR
8096 || code == TRUTH_OR_EXPR));
8098 if (operand_equal_p (a00, a10, 0))
8099 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8100 fold_build2_loc (loc, code, type, a01, a11));
8101 else if (commutative && operand_equal_p (a00, a11, 0))
8102 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8103 fold_build2_loc (loc, code, type, a01, a10));
8104 else if (commutative && operand_equal_p (a01, a10, 0))
8105 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8106 fold_build2_loc (loc, code, type, a00, a11));
8108 /* This case if tricky because we must either have commutative
8109 operators or else A10 must not have side-effects. */
8111 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8112 && operand_equal_p (a01, a11, 0))
8113 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8114 fold_build2_loc (loc, code, type, a00, a10),
8115 a01);
8118 /* See if we can build a range comparison. */
8119 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8120 return tem;
8122 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8123 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8125 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8126 if (tem)
8127 return fold_build2_loc (loc, code, type, tem, arg1);
8130 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8131 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8133 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8134 if (tem)
8135 return fold_build2_loc (loc, code, type, arg0, tem);
8138 /* Check for the possibility of merging component references. If our
8139 lhs is another similar operation, try to merge its rhs with our
8140 rhs. Then try to merge our lhs and rhs. */
8141 if (TREE_CODE (arg0) == code
8142 && (tem = fold_truth_andor_1 (loc, code, type,
8143 TREE_OPERAND (arg0, 1), arg1)) != 0)
8144 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8146 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8147 return tem;
8149 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8150 && !flag_sanitize_coverage
8151 && (code == TRUTH_AND_EXPR
8152 || code == TRUTH_ANDIF_EXPR
8153 || code == TRUTH_OR_EXPR
8154 || code == TRUTH_ORIF_EXPR))
8156 enum tree_code ncode, icode;
8158 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8159 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8160 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8162 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8163 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8164 We don't want to pack more than two leafs to a non-IF AND/OR
8165 expression.
8166 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8167 equal to IF-CODE, then we don't want to add right-hand operand.
8168 If the inner right-hand side of left-hand operand has
8169 side-effects, or isn't simple, then we can't add to it,
8170 as otherwise we might destroy if-sequence. */
8171 if (TREE_CODE (arg0) == icode
8172 && simple_operand_p_2 (arg1)
8173 /* Needed for sequence points to handle trappings, and
8174 side-effects. */
8175 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8177 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8178 arg1);
8179 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8180 tem);
8182 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8183 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8184 else if (TREE_CODE (arg1) == icode
8185 && simple_operand_p_2 (arg0)
8186 /* Needed for sequence points to handle trappings, and
8187 side-effects. */
8188 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8190 tem = fold_build2_loc (loc, ncode, type,
8191 arg0, TREE_OPERAND (arg1, 0));
8192 return fold_build2_loc (loc, icode, type, tem,
8193 TREE_OPERAND (arg1, 1));
8195 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8196 into (A OR B).
8197 For sequence point consistancy, we need to check for trapping,
8198 and side-effects. */
8199 else if (code == icode && simple_operand_p_2 (arg0)
8200 && simple_operand_p_2 (arg1))
8201 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8204 return NULL_TREE;
8207 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8208 by changing CODE to reduce the magnitude of constants involved in
8209 ARG0 of the comparison.
8210 Returns a canonicalized comparison tree if a simplification was
8211 possible, otherwise returns NULL_TREE.
8212 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8213 valid if signed overflow is undefined. */
8215 static tree
8216 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8217 tree arg0, tree arg1,
8218 bool *strict_overflow_p)
8220 enum tree_code code0 = TREE_CODE (arg0);
8221 tree t, cst0 = NULL_TREE;
8222 int sgn0;
8224 /* Match A +- CST code arg1. We can change this only if overflow
8225 is undefined. */
8226 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8227 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8228 /* In principle pointers also have undefined overflow behavior,
8229 but that causes problems elsewhere. */
8230 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8231 && (code0 == MINUS_EXPR
8232 || code0 == PLUS_EXPR)
8233 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8234 return NULL_TREE;
8236 /* Identify the constant in arg0 and its sign. */
8237 cst0 = TREE_OPERAND (arg0, 1);
8238 sgn0 = tree_int_cst_sgn (cst0);
8240 /* Overflowed constants and zero will cause problems. */
8241 if (integer_zerop (cst0)
8242 || TREE_OVERFLOW (cst0))
8243 return NULL_TREE;
8245 /* See if we can reduce the magnitude of the constant in
8246 arg0 by changing the comparison code. */
8247 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8248 if (code == LT_EXPR
8249 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8250 code = LE_EXPR;
8251 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8252 else if (code == GT_EXPR
8253 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8254 code = GE_EXPR;
8255 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8256 else if (code == LE_EXPR
8257 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8258 code = LT_EXPR;
8259 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8260 else if (code == GE_EXPR
8261 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8262 code = GT_EXPR;
8263 else
8264 return NULL_TREE;
8265 *strict_overflow_p = true;
8267 /* Now build the constant reduced in magnitude. But not if that
8268 would produce one outside of its types range. */
8269 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8270 && ((sgn0 == 1
8271 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8272 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8273 || (sgn0 == -1
8274 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8275 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8276 return NULL_TREE;
8278 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8279 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8280 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8281 t = fold_convert (TREE_TYPE (arg1), t);
8283 return fold_build2_loc (loc, code, type, t, arg1);
8286 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8287 overflow further. Try to decrease the magnitude of constants involved
8288 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8289 and put sole constants at the second argument position.
8290 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8292 static tree
8293 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8294 tree arg0, tree arg1)
8296 tree t;
8297 bool strict_overflow_p;
8298 const char * const warnmsg = G_("assuming signed overflow does not occur "
8299 "when reducing constant in comparison");
8301 /* Try canonicalization by simplifying arg0. */
8302 strict_overflow_p = false;
8303 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8304 &strict_overflow_p);
8305 if (t)
8307 if (strict_overflow_p)
8308 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8309 return t;
8312 /* Try canonicalization by simplifying arg1 using the swapped
8313 comparison. */
8314 code = swap_tree_comparison (code);
8315 strict_overflow_p = false;
8316 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8317 &strict_overflow_p);
8318 if (t && strict_overflow_p)
8319 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8320 return t;
8323 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8324 space. This is used to avoid issuing overflow warnings for
8325 expressions like &p->x which can not wrap. */
8327 static bool
8328 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8330 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8331 return true;
8333 if (maybe_lt (bitpos, 0))
8334 return true;
8336 poly_wide_int wi_offset;
8337 int precision = TYPE_PRECISION (TREE_TYPE (base));
8338 if (offset == NULL_TREE)
8339 wi_offset = wi::zero (precision);
8340 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8341 return true;
8342 else
8343 wi_offset = wi::to_poly_wide (offset);
8345 bool overflow;
8346 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8347 precision);
8348 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8349 if (overflow)
8350 return true;
8352 poly_uint64 total_hwi, size;
8353 if (!total.to_uhwi (&total_hwi)
8354 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8355 &size)
8356 || known_eq (size, 0U))
8357 return true;
8359 if (known_le (total_hwi, size))
8360 return false;
8362 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8363 array. */
8364 if (TREE_CODE (base) == ADDR_EXPR
8365 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8366 &size)
8367 && maybe_ne (size, 0U)
8368 && known_le (total_hwi, size))
8369 return false;
8371 return true;
8374 /* Return a positive integer when the symbol DECL is known to have
8375 a nonzero address, zero when it's known not to (e.g., it's a weak
8376 symbol), and a negative integer when the symbol is not yet in the
8377 symbol table and so whether or not its address is zero is unknown.
8378 For function local objects always return positive integer. */
8379 static int
8380 maybe_nonzero_address (tree decl)
8382 if (DECL_P (decl) && decl_in_symtab_p (decl))
8383 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8384 return symbol->nonzero_address ();
8386 /* Function local objects are never NULL. */
8387 if (DECL_P (decl)
8388 && (DECL_CONTEXT (decl)
8389 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8390 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8391 return 1;
8393 return -1;
8396 /* Subroutine of fold_binary. This routine performs all of the
8397 transformations that are common to the equality/inequality
8398 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8399 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8400 fold_binary should call fold_binary. Fold a comparison with
8401 tree code CODE and type TYPE with operands OP0 and OP1. Return
8402 the folded comparison or NULL_TREE. */
8404 static tree
8405 fold_comparison (location_t loc, enum tree_code code, tree type,
8406 tree op0, tree op1)
8408 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8409 tree arg0, arg1, tem;
8411 arg0 = op0;
8412 arg1 = op1;
8414 STRIP_SIGN_NOPS (arg0);
8415 STRIP_SIGN_NOPS (arg1);
8417 /* For comparisons of pointers we can decompose it to a compile time
8418 comparison of the base objects and the offsets into the object.
8419 This requires at least one operand being an ADDR_EXPR or a
8420 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8421 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8422 && (TREE_CODE (arg0) == ADDR_EXPR
8423 || TREE_CODE (arg1) == ADDR_EXPR
8424 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8425 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8427 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8428 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8429 machine_mode mode;
8430 int volatilep, reversep, unsignedp;
8431 bool indirect_base0 = false, indirect_base1 = false;
8433 /* Get base and offset for the access. Strip ADDR_EXPR for
8434 get_inner_reference, but put it back by stripping INDIRECT_REF
8435 off the base object if possible. indirect_baseN will be true
8436 if baseN is not an address but refers to the object itself. */
8437 base0 = arg0;
8438 if (TREE_CODE (arg0) == ADDR_EXPR)
8440 base0
8441 = get_inner_reference (TREE_OPERAND (arg0, 0),
8442 &bitsize, &bitpos0, &offset0, &mode,
8443 &unsignedp, &reversep, &volatilep);
8444 if (TREE_CODE (base0) == INDIRECT_REF)
8445 base0 = TREE_OPERAND (base0, 0);
8446 else
8447 indirect_base0 = true;
8449 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8451 base0 = TREE_OPERAND (arg0, 0);
8452 STRIP_SIGN_NOPS (base0);
8453 if (TREE_CODE (base0) == ADDR_EXPR)
8455 base0
8456 = get_inner_reference (TREE_OPERAND (base0, 0),
8457 &bitsize, &bitpos0, &offset0, &mode,
8458 &unsignedp, &reversep, &volatilep);
8459 if (TREE_CODE (base0) == INDIRECT_REF)
8460 base0 = TREE_OPERAND (base0, 0);
8461 else
8462 indirect_base0 = true;
8464 if (offset0 == NULL_TREE || integer_zerop (offset0))
8465 offset0 = TREE_OPERAND (arg0, 1);
8466 else
8467 offset0 = size_binop (PLUS_EXPR, offset0,
8468 TREE_OPERAND (arg0, 1));
8469 if (TREE_CODE (offset0) == INTEGER_CST)
8471 offset_int tem = wi::sext (wi::to_offset (offset0),
8472 TYPE_PRECISION (sizetype));
8473 tem <<= LOG2_BITS_PER_UNIT;
8474 tem += bitpos0;
8475 if (wi::fits_shwi_p (tem))
8477 bitpos0 = tem.to_shwi ();
8478 offset0 = NULL_TREE;
8483 base1 = arg1;
8484 if (TREE_CODE (arg1) == ADDR_EXPR)
8486 base1
8487 = get_inner_reference (TREE_OPERAND (arg1, 0),
8488 &bitsize, &bitpos1, &offset1, &mode,
8489 &unsignedp, &reversep, &volatilep);
8490 if (TREE_CODE (base1) == INDIRECT_REF)
8491 base1 = TREE_OPERAND (base1, 0);
8492 else
8493 indirect_base1 = true;
8495 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8497 base1 = TREE_OPERAND (arg1, 0);
8498 STRIP_SIGN_NOPS (base1);
8499 if (TREE_CODE (base1) == ADDR_EXPR)
8501 base1
8502 = get_inner_reference (TREE_OPERAND (base1, 0),
8503 &bitsize, &bitpos1, &offset1, &mode,
8504 &unsignedp, &reversep, &volatilep);
8505 if (TREE_CODE (base1) == INDIRECT_REF)
8506 base1 = TREE_OPERAND (base1, 0);
8507 else
8508 indirect_base1 = true;
8510 if (offset1 == NULL_TREE || integer_zerop (offset1))
8511 offset1 = TREE_OPERAND (arg1, 1);
8512 else
8513 offset1 = size_binop (PLUS_EXPR, offset1,
8514 TREE_OPERAND (arg1, 1));
8515 if (TREE_CODE (offset1) == INTEGER_CST)
8517 offset_int tem = wi::sext (wi::to_offset (offset1),
8518 TYPE_PRECISION (sizetype));
8519 tem <<= LOG2_BITS_PER_UNIT;
8520 tem += bitpos1;
8521 if (wi::fits_shwi_p (tem))
8523 bitpos1 = tem.to_shwi ();
8524 offset1 = NULL_TREE;
8529 /* If we have equivalent bases we might be able to simplify. */
8530 if (indirect_base0 == indirect_base1
8531 && operand_equal_p (base0, base1,
8532 indirect_base0 ? OEP_ADDRESS_OF : 0))
8534 /* We can fold this expression to a constant if the non-constant
8535 offset parts are equal. */
8536 if (offset0 == offset1
8537 || (offset0 && offset1
8538 && operand_equal_p (offset0, offset1, 0)))
8540 if (!equality_code
8541 && bitpos0 != bitpos1
8542 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8543 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8544 fold_overflow_warning (("assuming pointer wraparound does not "
8545 "occur when comparing P +- C1 with "
8546 "P +- C2"),
8547 WARN_STRICT_OVERFLOW_CONDITIONAL);
8549 switch (code)
8551 case EQ_EXPR:
8552 return constant_boolean_node (bitpos0 == bitpos1, type);
8553 case NE_EXPR:
8554 return constant_boolean_node (bitpos0 != bitpos1, type);
8555 case LT_EXPR:
8556 return constant_boolean_node (bitpos0 < bitpos1, type);
8557 case LE_EXPR:
8558 return constant_boolean_node (bitpos0 <= bitpos1, type);
8559 case GE_EXPR:
8560 return constant_boolean_node (bitpos0 >= bitpos1, type);
8561 case GT_EXPR:
8562 return constant_boolean_node (bitpos0 > bitpos1, type);
8563 default:;
8566 /* We can simplify the comparison to a comparison of the variable
8567 offset parts if the constant offset parts are equal.
8568 Be careful to use signed sizetype here because otherwise we
8569 mess with array offsets in the wrong way. This is possible
8570 because pointer arithmetic is restricted to retain within an
8571 object and overflow on pointer differences is undefined as of
8572 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8573 else if (bitpos0 == bitpos1)
8575 /* By converting to signed sizetype we cover middle-end pointer
8576 arithmetic which operates on unsigned pointer types of size
8577 type size and ARRAY_REF offsets which are properly sign or
8578 zero extended from their type in case it is narrower than
8579 sizetype. */
8580 if (offset0 == NULL_TREE)
8581 offset0 = build_int_cst (ssizetype, 0);
8582 else
8583 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8584 if (offset1 == NULL_TREE)
8585 offset1 = build_int_cst (ssizetype, 0);
8586 else
8587 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8589 if (!equality_code
8590 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8591 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8592 fold_overflow_warning (("assuming pointer wraparound does not "
8593 "occur when comparing P +- C1 with "
8594 "P +- C2"),
8595 WARN_STRICT_OVERFLOW_COMPARISON);
8597 return fold_build2_loc (loc, code, type, offset0, offset1);
8600 /* For equal offsets we can simplify to a comparison of the
8601 base addresses. */
8602 else if (bitpos0 == bitpos1
8603 && (indirect_base0
8604 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8605 && (indirect_base1
8606 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8607 && ((offset0 == offset1)
8608 || (offset0 && offset1
8609 && operand_equal_p (offset0, offset1, 0))))
8611 if (indirect_base0)
8612 base0 = build_fold_addr_expr_loc (loc, base0);
8613 if (indirect_base1)
8614 base1 = build_fold_addr_expr_loc (loc, base1);
8615 return fold_build2_loc (loc, code, type, base0, base1);
8617 /* Comparison between an ordinary (non-weak) symbol and a null
8618 pointer can be eliminated since such symbols must have a non
8619 null address. In C, relational expressions between pointers
8620 to objects and null pointers are undefined. The results
8621 below follow the C++ rules with the additional property that
8622 every object pointer compares greater than a null pointer.
8624 else if (((DECL_P (base0)
8625 && maybe_nonzero_address (base0) > 0
8626 /* Avoid folding references to struct members at offset 0 to
8627 prevent tests like '&ptr->firstmember == 0' from getting
8628 eliminated. When ptr is null, although the -> expression
8629 is strictly speaking invalid, GCC retains it as a matter
8630 of QoI. See PR c/44555. */
8631 && (offset0 == NULL_TREE && bitpos0 != 0))
8632 || CONSTANT_CLASS_P (base0))
8633 && indirect_base0
8634 /* The caller guarantees that when one of the arguments is
8635 constant (i.e., null in this case) it is second. */
8636 && integer_zerop (arg1))
8638 switch (code)
8640 case EQ_EXPR:
8641 case LE_EXPR:
8642 case LT_EXPR:
8643 return constant_boolean_node (false, type);
8644 case GE_EXPR:
8645 case GT_EXPR:
8646 case NE_EXPR:
8647 return constant_boolean_node (true, type);
8648 default:
8649 gcc_unreachable ();
8654 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8655 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8656 the resulting offset is smaller in absolute value than the
8657 original one and has the same sign. */
8658 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8659 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8660 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8661 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8662 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8663 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8664 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8665 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8667 tree const1 = TREE_OPERAND (arg0, 1);
8668 tree const2 = TREE_OPERAND (arg1, 1);
8669 tree variable1 = TREE_OPERAND (arg0, 0);
8670 tree variable2 = TREE_OPERAND (arg1, 0);
8671 tree cst;
8672 const char * const warnmsg = G_("assuming signed overflow does not "
8673 "occur when combining constants around "
8674 "a comparison");
8676 /* Put the constant on the side where it doesn't overflow and is
8677 of lower absolute value and of same sign than before. */
8678 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8679 ? MINUS_EXPR : PLUS_EXPR,
8680 const2, const1);
8681 if (!TREE_OVERFLOW (cst)
8682 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8683 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8686 return fold_build2_loc (loc, code, type,
8687 variable1,
8688 fold_build2_loc (loc, TREE_CODE (arg1),
8689 TREE_TYPE (arg1),
8690 variable2, cst));
8693 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8694 ? MINUS_EXPR : PLUS_EXPR,
8695 const1, const2);
8696 if (!TREE_OVERFLOW (cst)
8697 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8698 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8700 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8701 return fold_build2_loc (loc, code, type,
8702 fold_build2_loc (loc, TREE_CODE (arg0),
8703 TREE_TYPE (arg0),
8704 variable1, cst),
8705 variable2);
8709 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8710 if (tem)
8711 return tem;
8713 /* If we are comparing an expression that just has comparisons
8714 of two integer values, arithmetic expressions of those comparisons,
8715 and constants, we can simplify it. There are only three cases
8716 to check: the two values can either be equal, the first can be
8717 greater, or the second can be greater. Fold the expression for
8718 those three values. Since each value must be 0 or 1, we have
8719 eight possibilities, each of which corresponds to the constant 0
8720 or 1 or one of the six possible comparisons.
8722 This handles common cases like (a > b) == 0 but also handles
8723 expressions like ((x > y) - (y > x)) > 0, which supposedly
8724 occur in macroized code. */
8726 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8728 tree cval1 = 0, cval2 = 0;
8729 int save_p = 0;
8731 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8732 /* Don't handle degenerate cases here; they should already
8733 have been handled anyway. */
8734 && cval1 != 0 && cval2 != 0
8735 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8736 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8737 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8738 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8739 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8740 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8741 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8743 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8744 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8746 /* We can't just pass T to eval_subst in case cval1 or cval2
8747 was the same as ARG1. */
8749 tree high_result
8750 = fold_build2_loc (loc, code, type,
8751 eval_subst (loc, arg0, cval1, maxval,
8752 cval2, minval),
8753 arg1);
8754 tree equal_result
8755 = fold_build2_loc (loc, code, type,
8756 eval_subst (loc, arg0, cval1, maxval,
8757 cval2, maxval),
8758 arg1);
8759 tree low_result
8760 = fold_build2_loc (loc, code, type,
8761 eval_subst (loc, arg0, cval1, minval,
8762 cval2, maxval),
8763 arg1);
8765 /* All three of these results should be 0 or 1. Confirm they are.
8766 Then use those values to select the proper code to use. */
8768 if (TREE_CODE (high_result) == INTEGER_CST
8769 && TREE_CODE (equal_result) == INTEGER_CST
8770 && TREE_CODE (low_result) == INTEGER_CST)
8772 /* Make a 3-bit mask with the high-order bit being the
8773 value for `>', the next for '=', and the low for '<'. */
8774 switch ((integer_onep (high_result) * 4)
8775 + (integer_onep (equal_result) * 2)
8776 + integer_onep (low_result))
8778 case 0:
8779 /* Always false. */
8780 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8781 case 1:
8782 code = LT_EXPR;
8783 break;
8784 case 2:
8785 code = EQ_EXPR;
8786 break;
8787 case 3:
8788 code = LE_EXPR;
8789 break;
8790 case 4:
8791 code = GT_EXPR;
8792 break;
8793 case 5:
8794 code = NE_EXPR;
8795 break;
8796 case 6:
8797 code = GE_EXPR;
8798 break;
8799 case 7:
8800 /* Always true. */
8801 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8804 if (save_p)
8806 tem = save_expr (build2 (code, type, cval1, cval2));
8807 protected_set_expr_location (tem, loc);
8808 return tem;
8810 return fold_build2_loc (loc, code, type, cval1, cval2);
8815 return NULL_TREE;
8819 /* Subroutine of fold_binary. Optimize complex multiplications of the
8820 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8821 argument EXPR represents the expression "z" of type TYPE. */
8823 static tree
8824 fold_mult_zconjz (location_t loc, tree type, tree expr)
8826 tree itype = TREE_TYPE (type);
8827 tree rpart, ipart, tem;
8829 if (TREE_CODE (expr) == COMPLEX_EXPR)
8831 rpart = TREE_OPERAND (expr, 0);
8832 ipart = TREE_OPERAND (expr, 1);
8834 else if (TREE_CODE (expr) == COMPLEX_CST)
8836 rpart = TREE_REALPART (expr);
8837 ipart = TREE_IMAGPART (expr);
8839 else
8841 expr = save_expr (expr);
8842 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8843 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8846 rpart = save_expr (rpart);
8847 ipart = save_expr (ipart);
8848 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8849 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8850 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8851 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8852 build_zero_cst (itype));
8856 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8857 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8858 true if successful. */
8860 static bool
8861 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8863 unsigned int i;
8865 if (TREE_CODE (arg) == VECTOR_CST)
8867 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8868 elts[i] = VECTOR_CST_ELT (arg, i);
8870 else if (TREE_CODE (arg) == CONSTRUCTOR)
8872 constructor_elt *elt;
8874 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8875 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8876 return false;
8877 else
8878 elts[i] = elt->value;
8880 else
8881 return false;
8882 for (; i < nelts; i++)
8883 elts[i]
8884 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8885 return true;
8888 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8889 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8890 NULL_TREE otherwise. */
8892 static tree
8893 fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel)
8895 unsigned int i;
8896 bool need_ctor = false;
8898 unsigned int nelts = sel.length ();
8899 gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
8900 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8901 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8902 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8903 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8904 return NULL_TREE;
8906 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8907 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8908 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8909 return NULL_TREE;
8911 tree_vector_builder out_elts (type, nelts, 1);
8912 for (i = 0; i < nelts; i++)
8914 if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
8915 need_ctor = true;
8916 out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
8919 if (need_ctor)
8921 vec<constructor_elt, va_gc> *v;
8922 vec_alloc (v, nelts);
8923 for (i = 0; i < nelts; i++)
8924 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8925 return build_constructor (type, v);
8927 else
8928 return out_elts.build ();
8931 /* Try to fold a pointer difference of type TYPE two address expressions of
8932 array references AREF0 and AREF1 using location LOC. Return a
8933 simplified expression for the difference or NULL_TREE. */
8935 static tree
8936 fold_addr_of_array_ref_difference (location_t loc, tree type,
8937 tree aref0, tree aref1,
8938 bool use_pointer_diff)
8940 tree base0 = TREE_OPERAND (aref0, 0);
8941 tree base1 = TREE_OPERAND (aref1, 0);
8942 tree base_offset = build_int_cst (type, 0);
8944 /* If the bases are array references as well, recurse. If the bases
8945 are pointer indirections compute the difference of the pointers.
8946 If the bases are equal, we are set. */
8947 if ((TREE_CODE (base0) == ARRAY_REF
8948 && TREE_CODE (base1) == ARRAY_REF
8949 && (base_offset
8950 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
8951 use_pointer_diff)))
8952 || (INDIRECT_REF_P (base0)
8953 && INDIRECT_REF_P (base1)
8954 && (base_offset
8955 = use_pointer_diff
8956 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
8957 TREE_OPERAND (base0, 0),
8958 TREE_OPERAND (base1, 0))
8959 : fold_binary_loc (loc, MINUS_EXPR, type,
8960 fold_convert (type,
8961 TREE_OPERAND (base0, 0)),
8962 fold_convert (type,
8963 TREE_OPERAND (base1, 0)))))
8964 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8966 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8967 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8968 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8969 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8970 return fold_build2_loc (loc, PLUS_EXPR, type,
8971 base_offset,
8972 fold_build2_loc (loc, MULT_EXPR, type,
8973 diff, esz));
8975 return NULL_TREE;
8978 /* If the real or vector real constant CST of type TYPE has an exact
8979 inverse, return it, else return NULL. */
8981 tree
8982 exact_inverse (tree type, tree cst)
8984 REAL_VALUE_TYPE r;
8985 tree unit_type;
8986 machine_mode mode;
8988 switch (TREE_CODE (cst))
8990 case REAL_CST:
8991 r = TREE_REAL_CST (cst);
8993 if (exact_real_inverse (TYPE_MODE (type), &r))
8994 return build_real (type, r);
8996 return NULL_TREE;
8998 case VECTOR_CST:
9000 unit_type = TREE_TYPE (type);
9001 mode = TYPE_MODE (unit_type);
9003 tree_vector_builder elts;
9004 if (!elts.new_unary_operation (type, cst, false))
9005 return NULL_TREE;
9006 unsigned int count = elts.encoded_nelts ();
9007 for (unsigned int i = 0; i < count; ++i)
9009 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9010 if (!exact_real_inverse (mode, &r))
9011 return NULL_TREE;
9012 elts.quick_push (build_real (unit_type, r));
9015 return elts.build ();
9018 default:
9019 return NULL_TREE;
9023 /* Mask out the tz least significant bits of X of type TYPE where
9024 tz is the number of trailing zeroes in Y. */
9025 static wide_int
9026 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9028 int tz = wi::ctz (y);
9029 if (tz > 0)
9030 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9031 return x;
9034 /* Return true when T is an address and is known to be nonzero.
9035 For floating point we further ensure that T is not denormal.
9036 Similar logic is present in nonzero_address in rtlanal.h.
9038 If the return value is based on the assumption that signed overflow
9039 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9040 change *STRICT_OVERFLOW_P. */
9042 static bool
9043 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9045 tree type = TREE_TYPE (t);
9046 enum tree_code code;
9048 /* Doing something useful for floating point would need more work. */
9049 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9050 return false;
9052 code = TREE_CODE (t);
9053 switch (TREE_CODE_CLASS (code))
9055 case tcc_unary:
9056 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9057 strict_overflow_p);
9058 case tcc_binary:
9059 case tcc_comparison:
9060 return tree_binary_nonzero_warnv_p (code, type,
9061 TREE_OPERAND (t, 0),
9062 TREE_OPERAND (t, 1),
9063 strict_overflow_p);
9064 case tcc_constant:
9065 case tcc_declaration:
9066 case tcc_reference:
9067 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9069 default:
9070 break;
9073 switch (code)
9075 case TRUTH_NOT_EXPR:
9076 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9077 strict_overflow_p);
9079 case TRUTH_AND_EXPR:
9080 case TRUTH_OR_EXPR:
9081 case TRUTH_XOR_EXPR:
9082 return tree_binary_nonzero_warnv_p (code, type,
9083 TREE_OPERAND (t, 0),
9084 TREE_OPERAND (t, 1),
9085 strict_overflow_p);
9087 case COND_EXPR:
9088 case CONSTRUCTOR:
9089 case OBJ_TYPE_REF:
9090 case ASSERT_EXPR:
9091 case ADDR_EXPR:
9092 case WITH_SIZE_EXPR:
9093 case SSA_NAME:
9094 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9096 case COMPOUND_EXPR:
9097 case MODIFY_EXPR:
9098 case BIND_EXPR:
9099 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9100 strict_overflow_p);
9102 case SAVE_EXPR:
9103 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9104 strict_overflow_p);
9106 case CALL_EXPR:
9108 tree fndecl = get_callee_fndecl (t);
9109 if (!fndecl) return false;
9110 if (flag_delete_null_pointer_checks && !flag_check_new
9111 && DECL_IS_OPERATOR_NEW (fndecl)
9112 && !TREE_NOTHROW (fndecl))
9113 return true;
9114 if (flag_delete_null_pointer_checks
9115 && lookup_attribute ("returns_nonnull",
9116 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9117 return true;
9118 return alloca_call_p (t);
9121 default:
9122 break;
9124 return false;
9127 /* Return true when T is an address and is known to be nonzero.
9128 Handle warnings about undefined signed overflow. */
9130 bool
9131 tree_expr_nonzero_p (tree t)
9133 bool ret, strict_overflow_p;
9135 strict_overflow_p = false;
9136 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9137 if (strict_overflow_p)
9138 fold_overflow_warning (("assuming signed overflow does not occur when "
9139 "determining that expression is always "
9140 "non-zero"),
9141 WARN_STRICT_OVERFLOW_MISC);
9142 return ret;
9145 /* Return true if T is known not to be equal to an integer W. */
9147 bool
9148 expr_not_equal_to (tree t, const wide_int &w)
9150 wide_int min, max, nz;
9151 value_range_type rtype;
9152 switch (TREE_CODE (t))
9154 case INTEGER_CST:
9155 return wi::to_wide (t) != w;
9157 case SSA_NAME:
9158 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9159 return false;
9160 rtype = get_range_info (t, &min, &max);
9161 if (rtype == VR_RANGE)
9163 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9164 return true;
9165 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9166 return true;
9168 else if (rtype == VR_ANTI_RANGE
9169 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9170 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9171 return true;
9172 /* If T has some known zero bits and W has any of those bits set,
9173 then T is known not to be equal to W. */
9174 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9175 TYPE_PRECISION (TREE_TYPE (t))), 0))
9176 return true;
9177 return false;
9179 default:
9180 return false;
9184 /* Fold a binary expression of code CODE and type TYPE with operands
9185 OP0 and OP1. LOC is the location of the resulting expression.
9186 Return the folded expression if folding is successful. Otherwise,
9187 return NULL_TREE. */
9189 tree
9190 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9191 tree op0, tree op1)
9193 enum tree_code_class kind = TREE_CODE_CLASS (code);
9194 tree arg0, arg1, tem;
9195 tree t1 = NULL_TREE;
9196 bool strict_overflow_p;
9197 unsigned int prec;
9199 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9200 && TREE_CODE_LENGTH (code) == 2
9201 && op0 != NULL_TREE
9202 && op1 != NULL_TREE);
9204 arg0 = op0;
9205 arg1 = op1;
9207 /* Strip any conversions that don't change the mode. This is
9208 safe for every expression, except for a comparison expression
9209 because its signedness is derived from its operands. So, in
9210 the latter case, only strip conversions that don't change the
9211 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9212 preserved.
9214 Note that this is done as an internal manipulation within the
9215 constant folder, in order to find the simplest representation
9216 of the arguments so that their form can be studied. In any
9217 cases, the appropriate type conversions should be put back in
9218 the tree that will get out of the constant folder. */
9220 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9222 STRIP_SIGN_NOPS (arg0);
9223 STRIP_SIGN_NOPS (arg1);
9225 else
9227 STRIP_NOPS (arg0);
9228 STRIP_NOPS (arg1);
9231 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9232 constant but we can't do arithmetic on them. */
9233 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9235 tem = const_binop (code, type, arg0, arg1);
9236 if (tem != NULL_TREE)
9238 if (TREE_TYPE (tem) != type)
9239 tem = fold_convert_loc (loc, type, tem);
9240 return tem;
9244 /* If this is a commutative operation, and ARG0 is a constant, move it
9245 to ARG1 to reduce the number of tests below. */
9246 if (commutative_tree_code (code)
9247 && tree_swap_operands_p (arg0, arg1))
9248 return fold_build2_loc (loc, code, type, op1, op0);
9250 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9251 to ARG1 to reduce the number of tests below. */
9252 if (kind == tcc_comparison
9253 && tree_swap_operands_p (arg0, arg1))
9254 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9256 tem = generic_simplify (loc, code, type, op0, op1);
9257 if (tem)
9258 return tem;
9260 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9262 First check for cases where an arithmetic operation is applied to a
9263 compound, conditional, or comparison operation. Push the arithmetic
9264 operation inside the compound or conditional to see if any folding
9265 can then be done. Convert comparison to conditional for this purpose.
9266 The also optimizes non-constant cases that used to be done in
9267 expand_expr.
9269 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9270 one of the operands is a comparison and the other is a comparison, a
9271 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9272 code below would make the expression more complex. Change it to a
9273 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9274 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9276 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9277 || code == EQ_EXPR || code == NE_EXPR)
9278 && TREE_CODE (type) != VECTOR_TYPE
9279 && ((truth_value_p (TREE_CODE (arg0))
9280 && (truth_value_p (TREE_CODE (arg1))
9281 || (TREE_CODE (arg1) == BIT_AND_EXPR
9282 && integer_onep (TREE_OPERAND (arg1, 1)))))
9283 || (truth_value_p (TREE_CODE (arg1))
9284 && (truth_value_p (TREE_CODE (arg0))
9285 || (TREE_CODE (arg0) == BIT_AND_EXPR
9286 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9288 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9289 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9290 : TRUTH_XOR_EXPR,
9291 boolean_type_node,
9292 fold_convert_loc (loc, boolean_type_node, arg0),
9293 fold_convert_loc (loc, boolean_type_node, arg1));
9295 if (code == EQ_EXPR)
9296 tem = invert_truthvalue_loc (loc, tem);
9298 return fold_convert_loc (loc, type, tem);
9301 if (TREE_CODE_CLASS (code) == tcc_binary
9302 || TREE_CODE_CLASS (code) == tcc_comparison)
9304 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9306 tem = fold_build2_loc (loc, code, type,
9307 fold_convert_loc (loc, TREE_TYPE (op0),
9308 TREE_OPERAND (arg0, 1)), op1);
9309 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9310 tem);
9312 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9314 tem = fold_build2_loc (loc, code, type, op0,
9315 fold_convert_loc (loc, TREE_TYPE (op1),
9316 TREE_OPERAND (arg1, 1)));
9317 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9318 tem);
9321 if (TREE_CODE (arg0) == COND_EXPR
9322 || TREE_CODE (arg0) == VEC_COND_EXPR
9323 || COMPARISON_CLASS_P (arg0))
9325 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9326 arg0, arg1,
9327 /*cond_first_p=*/1);
9328 if (tem != NULL_TREE)
9329 return tem;
9332 if (TREE_CODE (arg1) == COND_EXPR
9333 || TREE_CODE (arg1) == VEC_COND_EXPR
9334 || COMPARISON_CLASS_P (arg1))
9336 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9337 arg1, arg0,
9338 /*cond_first_p=*/0);
9339 if (tem != NULL_TREE)
9340 return tem;
9344 switch (code)
9346 case MEM_REF:
9347 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9348 if (TREE_CODE (arg0) == ADDR_EXPR
9349 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9351 tree iref = TREE_OPERAND (arg0, 0);
9352 return fold_build2 (MEM_REF, type,
9353 TREE_OPERAND (iref, 0),
9354 int_const_binop (PLUS_EXPR, arg1,
9355 TREE_OPERAND (iref, 1)));
9358 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9359 if (TREE_CODE (arg0) == ADDR_EXPR
9360 && handled_component_p (TREE_OPERAND (arg0, 0)))
9362 tree base;
9363 poly_int64 coffset;
9364 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9365 &coffset);
9366 if (!base)
9367 return NULL_TREE;
9368 return fold_build2 (MEM_REF, type,
9369 build_fold_addr_expr (base),
9370 int_const_binop (PLUS_EXPR, arg1,
9371 size_int (coffset)));
9374 return NULL_TREE;
9376 case POINTER_PLUS_EXPR:
9377 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9378 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9379 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9380 return fold_convert_loc (loc, type,
9381 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9382 fold_convert_loc (loc, sizetype,
9383 arg1),
9384 fold_convert_loc (loc, sizetype,
9385 arg0)));
9387 return NULL_TREE;
9389 case PLUS_EXPR:
9390 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9392 /* X + (X / CST) * -CST is X % CST. */
9393 if (TREE_CODE (arg1) == MULT_EXPR
9394 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9395 && operand_equal_p (arg0,
9396 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9398 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9399 tree cst1 = TREE_OPERAND (arg1, 1);
9400 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9401 cst1, cst0);
9402 if (sum && integer_zerop (sum))
9403 return fold_convert_loc (loc, type,
9404 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9405 TREE_TYPE (arg0), arg0,
9406 cst0));
9410 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9411 one. Make sure the type is not saturating and has the signedness of
9412 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9413 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9414 if ((TREE_CODE (arg0) == MULT_EXPR
9415 || TREE_CODE (arg1) == MULT_EXPR)
9416 && !TYPE_SATURATING (type)
9417 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9418 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9419 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9421 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9422 if (tem)
9423 return tem;
9426 if (! FLOAT_TYPE_P (type))
9428 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9429 (plus (plus (mult) (mult)) (foo)) so that we can
9430 take advantage of the factoring cases below. */
9431 if (ANY_INTEGRAL_TYPE_P (type)
9432 && TYPE_OVERFLOW_WRAPS (type)
9433 && (((TREE_CODE (arg0) == PLUS_EXPR
9434 || TREE_CODE (arg0) == MINUS_EXPR)
9435 && TREE_CODE (arg1) == MULT_EXPR)
9436 || ((TREE_CODE (arg1) == PLUS_EXPR
9437 || TREE_CODE (arg1) == MINUS_EXPR)
9438 && TREE_CODE (arg0) == MULT_EXPR)))
9440 tree parg0, parg1, parg, marg;
9441 enum tree_code pcode;
9443 if (TREE_CODE (arg1) == MULT_EXPR)
9444 parg = arg0, marg = arg1;
9445 else
9446 parg = arg1, marg = arg0;
9447 pcode = TREE_CODE (parg);
9448 parg0 = TREE_OPERAND (parg, 0);
9449 parg1 = TREE_OPERAND (parg, 1);
9450 STRIP_NOPS (parg0);
9451 STRIP_NOPS (parg1);
9453 if (TREE_CODE (parg0) == MULT_EXPR
9454 && TREE_CODE (parg1) != MULT_EXPR)
9455 return fold_build2_loc (loc, pcode, type,
9456 fold_build2_loc (loc, PLUS_EXPR, type,
9457 fold_convert_loc (loc, type,
9458 parg0),
9459 fold_convert_loc (loc, type,
9460 marg)),
9461 fold_convert_loc (loc, type, parg1));
9462 if (TREE_CODE (parg0) != MULT_EXPR
9463 && TREE_CODE (parg1) == MULT_EXPR)
9464 return
9465 fold_build2_loc (loc, PLUS_EXPR, type,
9466 fold_convert_loc (loc, type, parg0),
9467 fold_build2_loc (loc, pcode, type,
9468 fold_convert_loc (loc, type, marg),
9469 fold_convert_loc (loc, type,
9470 parg1)));
9473 else
9475 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9476 to __complex__ ( x, y ). This is not the same for SNaNs or
9477 if signed zeros are involved. */
9478 if (!HONOR_SNANS (element_mode (arg0))
9479 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9480 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9482 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9483 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9484 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9485 bool arg0rz = false, arg0iz = false;
9486 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9487 || (arg0i && (arg0iz = real_zerop (arg0i))))
9489 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9490 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9491 if (arg0rz && arg1i && real_zerop (arg1i))
9493 tree rp = arg1r ? arg1r
9494 : build1 (REALPART_EXPR, rtype, arg1);
9495 tree ip = arg0i ? arg0i
9496 : build1 (IMAGPART_EXPR, rtype, arg0);
9497 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9499 else if (arg0iz && arg1r && real_zerop (arg1r))
9501 tree rp = arg0r ? arg0r
9502 : build1 (REALPART_EXPR, rtype, arg0);
9503 tree ip = arg1i ? arg1i
9504 : build1 (IMAGPART_EXPR, rtype, arg1);
9505 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9510 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9511 We associate floats only if the user has specified
9512 -fassociative-math. */
9513 if (flag_associative_math
9514 && TREE_CODE (arg1) == PLUS_EXPR
9515 && TREE_CODE (arg0) != MULT_EXPR)
9517 tree tree10 = TREE_OPERAND (arg1, 0);
9518 tree tree11 = TREE_OPERAND (arg1, 1);
9519 if (TREE_CODE (tree11) == MULT_EXPR
9520 && TREE_CODE (tree10) == MULT_EXPR)
9522 tree tree0;
9523 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9524 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9527 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9528 We associate floats only if the user has specified
9529 -fassociative-math. */
9530 if (flag_associative_math
9531 && TREE_CODE (arg0) == PLUS_EXPR
9532 && TREE_CODE (arg1) != MULT_EXPR)
9534 tree tree00 = TREE_OPERAND (arg0, 0);
9535 tree tree01 = TREE_OPERAND (arg0, 1);
9536 if (TREE_CODE (tree01) == MULT_EXPR
9537 && TREE_CODE (tree00) == MULT_EXPR)
9539 tree tree0;
9540 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9541 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9546 bit_rotate:
9547 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9548 is a rotate of A by C1 bits. */
9549 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9550 is a rotate of A by B bits.
9551 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9552 though in this case CODE must be | and not + or ^, otherwise
9553 it doesn't return A when B is 0. */
9555 enum tree_code code0, code1;
9556 tree rtype;
9557 code0 = TREE_CODE (arg0);
9558 code1 = TREE_CODE (arg1);
9559 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9560 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9561 && operand_equal_p (TREE_OPERAND (arg0, 0),
9562 TREE_OPERAND (arg1, 0), 0)
9563 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9564 TYPE_UNSIGNED (rtype))
9565 /* Only create rotates in complete modes. Other cases are not
9566 expanded properly. */
9567 && (element_precision (rtype)
9568 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9570 tree tree01, tree11;
9571 tree orig_tree01, orig_tree11;
9572 enum tree_code code01, code11;
9574 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9575 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9576 STRIP_NOPS (tree01);
9577 STRIP_NOPS (tree11);
9578 code01 = TREE_CODE (tree01);
9579 code11 = TREE_CODE (tree11);
9580 if (code11 != MINUS_EXPR
9581 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9583 std::swap (code0, code1);
9584 std::swap (code01, code11);
9585 std::swap (tree01, tree11);
9586 std::swap (orig_tree01, orig_tree11);
9588 if (code01 == INTEGER_CST
9589 && code11 == INTEGER_CST
9590 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9591 == element_precision (rtype)))
9593 tem = build2_loc (loc, LROTATE_EXPR,
9594 rtype, TREE_OPERAND (arg0, 0),
9595 code0 == LSHIFT_EXPR
9596 ? orig_tree01 : orig_tree11);
9597 return fold_convert_loc (loc, type, tem);
9599 else if (code11 == MINUS_EXPR)
9601 tree tree110, tree111;
9602 tree110 = TREE_OPERAND (tree11, 0);
9603 tree111 = TREE_OPERAND (tree11, 1);
9604 STRIP_NOPS (tree110);
9605 STRIP_NOPS (tree111);
9606 if (TREE_CODE (tree110) == INTEGER_CST
9607 && compare_tree_int (tree110,
9608 element_precision (rtype)) == 0
9609 && operand_equal_p (tree01, tree111, 0))
9611 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9612 ? LROTATE_EXPR : RROTATE_EXPR),
9613 rtype, TREE_OPERAND (arg0, 0),
9614 orig_tree01);
9615 return fold_convert_loc (loc, type, tem);
9618 else if (code == BIT_IOR_EXPR
9619 && code11 == BIT_AND_EXPR
9620 && pow2p_hwi (element_precision (rtype)))
9622 tree tree110, tree111;
9623 tree110 = TREE_OPERAND (tree11, 0);
9624 tree111 = TREE_OPERAND (tree11, 1);
9625 STRIP_NOPS (tree110);
9626 STRIP_NOPS (tree111);
9627 if (TREE_CODE (tree110) == NEGATE_EXPR
9628 && TREE_CODE (tree111) == INTEGER_CST
9629 && compare_tree_int (tree111,
9630 element_precision (rtype) - 1) == 0
9631 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9633 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9634 ? LROTATE_EXPR : RROTATE_EXPR),
9635 rtype, TREE_OPERAND (arg0, 0),
9636 orig_tree01);
9637 return fold_convert_loc (loc, type, tem);
9643 associate:
9644 /* In most languages, can't associate operations on floats through
9645 parentheses. Rather than remember where the parentheses were, we
9646 don't associate floats at all, unless the user has specified
9647 -fassociative-math.
9648 And, we need to make sure type is not saturating. */
9650 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9651 && !TYPE_SATURATING (type))
9653 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9654 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9655 tree atype = type;
9656 bool ok = true;
9658 /* Split both trees into variables, constants, and literals. Then
9659 associate each group together, the constants with literals,
9660 then the result with variables. This increases the chances of
9661 literals being recombined later and of generating relocatable
9662 expressions for the sum of a constant and literal. */
9663 var0 = split_tree (arg0, type, code,
9664 &minus_var0, &con0, &minus_con0,
9665 &lit0, &minus_lit0, 0);
9666 var1 = split_tree (arg1, type, code,
9667 &minus_var1, &con1, &minus_con1,
9668 &lit1, &minus_lit1, code == MINUS_EXPR);
9670 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9671 if (code == MINUS_EXPR)
9672 code = PLUS_EXPR;
9674 /* With undefined overflow prefer doing association in a type
9675 which wraps on overflow, if that is one of the operand types. */
9676 if (POINTER_TYPE_P (type)
9677 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9679 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9680 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9681 atype = TREE_TYPE (arg0);
9682 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9683 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9684 atype = TREE_TYPE (arg1);
9685 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9688 /* With undefined overflow we can only associate constants with one
9689 variable, and constants whose association doesn't overflow. */
9690 if (POINTER_TYPE_P (atype)
9691 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9693 if ((var0 && var1) || (minus_var0 && minus_var1))
9695 /* ??? If split_tree would handle NEGATE_EXPR we could
9696 simply reject these cases and the allowed cases would
9697 be the var0/minus_var1 ones. */
9698 tree tmp0 = var0 ? var0 : minus_var0;
9699 tree tmp1 = var1 ? var1 : minus_var1;
9700 bool one_neg = false;
9702 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9704 tmp0 = TREE_OPERAND (tmp0, 0);
9705 one_neg = !one_neg;
9707 if (CONVERT_EXPR_P (tmp0)
9708 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9709 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9710 <= TYPE_PRECISION (atype)))
9711 tmp0 = TREE_OPERAND (tmp0, 0);
9712 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9714 tmp1 = TREE_OPERAND (tmp1, 0);
9715 one_neg = !one_neg;
9717 if (CONVERT_EXPR_P (tmp1)
9718 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9719 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9720 <= TYPE_PRECISION (atype)))
9721 tmp1 = TREE_OPERAND (tmp1, 0);
9722 /* The only case we can still associate with two variables
9723 is if they cancel out. */
9724 if (!one_neg
9725 || !operand_equal_p (tmp0, tmp1, 0))
9726 ok = false;
9728 else if ((var0 && minus_var1
9729 && ! operand_equal_p (var0, minus_var1, 0))
9730 || (minus_var0 && var1
9731 && ! operand_equal_p (minus_var0, var1, 0)))
9732 ok = false;
9735 /* Only do something if we found more than two objects. Otherwise,
9736 nothing has changed and we risk infinite recursion. */
9737 if (ok
9738 && ((var0 != 0) + (var1 != 0)
9739 + (minus_var0 != 0) + (minus_var1 != 0)
9740 + (con0 != 0) + (con1 != 0)
9741 + (minus_con0 != 0) + (minus_con1 != 0)
9742 + (lit0 != 0) + (lit1 != 0)
9743 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
9745 var0 = associate_trees (loc, var0, var1, code, atype);
9746 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9747 code, atype);
9748 con0 = associate_trees (loc, con0, con1, code, atype);
9749 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9750 code, atype);
9751 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9752 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9753 code, atype);
9755 if (minus_var0 && var0)
9757 var0 = associate_trees (loc, var0, minus_var0,
9758 MINUS_EXPR, atype);
9759 minus_var0 = 0;
9761 if (minus_con0 && con0)
9763 con0 = associate_trees (loc, con0, minus_con0,
9764 MINUS_EXPR, atype);
9765 minus_con0 = 0;
9768 /* Preserve the MINUS_EXPR if the negative part of the literal is
9769 greater than the positive part. Otherwise, the multiplicative
9770 folding code (i.e extract_muldiv) may be fooled in case
9771 unsigned constants are subtracted, like in the following
9772 example: ((X*2 + 4) - 8U)/2. */
9773 if (minus_lit0 && lit0)
9775 if (TREE_CODE (lit0) == INTEGER_CST
9776 && TREE_CODE (minus_lit0) == INTEGER_CST
9777 && tree_int_cst_lt (lit0, minus_lit0)
9778 /* But avoid ending up with only negated parts. */
9779 && (var0 || con0))
9781 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9782 MINUS_EXPR, atype);
9783 lit0 = 0;
9785 else
9787 lit0 = associate_trees (loc, lit0, minus_lit0,
9788 MINUS_EXPR, atype);
9789 minus_lit0 = 0;
9793 /* Don't introduce overflows through reassociation. */
9794 if ((lit0 && TREE_OVERFLOW_P (lit0))
9795 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9796 return NULL_TREE;
9798 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9799 con0 = associate_trees (loc, con0, lit0, code, atype);
9800 lit0 = 0;
9801 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9802 code, atype);
9803 minus_lit0 = 0;
9805 /* Eliminate minus_con0. */
9806 if (minus_con0)
9808 if (con0)
9809 con0 = associate_trees (loc, con0, minus_con0,
9810 MINUS_EXPR, atype);
9811 else if (var0)
9812 var0 = associate_trees (loc, var0, minus_con0,
9813 MINUS_EXPR, atype);
9814 else
9815 gcc_unreachable ();
9816 minus_con0 = 0;
9819 /* Eliminate minus_var0. */
9820 if (minus_var0)
9822 if (con0)
9823 con0 = associate_trees (loc, con0, minus_var0,
9824 MINUS_EXPR, atype);
9825 else
9826 gcc_unreachable ();
9827 minus_var0 = 0;
9830 return
9831 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9832 code, atype));
9836 return NULL_TREE;
9838 case POINTER_DIFF_EXPR:
9839 case MINUS_EXPR:
9840 /* Fold &a[i] - &a[j] to i-j. */
9841 if (TREE_CODE (arg0) == ADDR_EXPR
9842 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9843 && TREE_CODE (arg1) == ADDR_EXPR
9844 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9846 tree tem = fold_addr_of_array_ref_difference (loc, type,
9847 TREE_OPERAND (arg0, 0),
9848 TREE_OPERAND (arg1, 0),
9849 code
9850 == POINTER_DIFF_EXPR);
9851 if (tem)
9852 return tem;
9855 /* Further transformations are not for pointers. */
9856 if (code == POINTER_DIFF_EXPR)
9857 return NULL_TREE;
9859 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9860 if (TREE_CODE (arg0) == NEGATE_EXPR
9861 && negate_expr_p (op1)
9862 /* If arg0 is e.g. unsigned int and type is int, then this could
9863 introduce UB, because if A is INT_MIN at runtime, the original
9864 expression can be well defined while the latter is not.
9865 See PR83269. */
9866 && !(ANY_INTEGRAL_TYPE_P (type)
9867 && TYPE_OVERFLOW_UNDEFINED (type)
9868 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9869 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9870 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9871 fold_convert_loc (loc, type,
9872 TREE_OPERAND (arg0, 0)));
9874 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9875 __complex__ ( x, -y ). This is not the same for SNaNs or if
9876 signed zeros are involved. */
9877 if (!HONOR_SNANS (element_mode (arg0))
9878 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9879 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9881 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9882 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9883 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9884 bool arg0rz = false, arg0iz = false;
9885 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9886 || (arg0i && (arg0iz = real_zerop (arg0i))))
9888 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9889 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9890 if (arg0rz && arg1i && real_zerop (arg1i))
9892 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9893 arg1r ? arg1r
9894 : build1 (REALPART_EXPR, rtype, arg1));
9895 tree ip = arg0i ? arg0i
9896 : build1 (IMAGPART_EXPR, rtype, arg0);
9897 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9899 else if (arg0iz && arg1r && real_zerop (arg1r))
9901 tree rp = arg0r ? arg0r
9902 : build1 (REALPART_EXPR, rtype, arg0);
9903 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9904 arg1i ? arg1i
9905 : build1 (IMAGPART_EXPR, rtype, arg1));
9906 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9911 /* A - B -> A + (-B) if B is easily negatable. */
9912 if (negate_expr_p (op1)
9913 && ! TYPE_OVERFLOW_SANITIZED (type)
9914 && ((FLOAT_TYPE_P (type)
9915 /* Avoid this transformation if B is a positive REAL_CST. */
9916 && (TREE_CODE (op1) != REAL_CST
9917 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9918 || INTEGRAL_TYPE_P (type)))
9919 return fold_build2_loc (loc, PLUS_EXPR, type,
9920 fold_convert_loc (loc, type, arg0),
9921 negate_expr (op1));
9923 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9924 one. Make sure the type is not saturating and has the signedness of
9925 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9926 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9927 if ((TREE_CODE (arg0) == MULT_EXPR
9928 || TREE_CODE (arg1) == MULT_EXPR)
9929 && !TYPE_SATURATING (type)
9930 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9931 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9932 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9934 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9935 if (tem)
9936 return tem;
9939 goto associate;
9941 case MULT_EXPR:
9942 if (! FLOAT_TYPE_P (type))
9944 /* Transform x * -C into -x * C if x is easily negatable. */
9945 if (TREE_CODE (op1) == INTEGER_CST
9946 && tree_int_cst_sgn (op1) == -1
9947 && negate_expr_p (op0)
9948 && negate_expr_p (op1)
9949 && (tem = negate_expr (op1)) != op1
9950 && ! TREE_OVERFLOW (tem))
9951 return fold_build2_loc (loc, MULT_EXPR, type,
9952 fold_convert_loc (loc, type,
9953 negate_expr (op0)), tem);
9955 strict_overflow_p = false;
9956 if (TREE_CODE (arg1) == INTEGER_CST
9957 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9958 &strict_overflow_p)) != 0)
9960 if (strict_overflow_p)
9961 fold_overflow_warning (("assuming signed overflow does not "
9962 "occur when simplifying "
9963 "multiplication"),
9964 WARN_STRICT_OVERFLOW_MISC);
9965 return fold_convert_loc (loc, type, tem);
9968 /* Optimize z * conj(z) for integer complex numbers. */
9969 if (TREE_CODE (arg0) == CONJ_EXPR
9970 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9971 return fold_mult_zconjz (loc, type, arg1);
9972 if (TREE_CODE (arg1) == CONJ_EXPR
9973 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9974 return fold_mult_zconjz (loc, type, arg0);
9976 else
9978 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9979 This is not the same for NaNs or if signed zeros are
9980 involved. */
9981 if (!HONOR_NANS (arg0)
9982 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9983 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9984 && TREE_CODE (arg1) == COMPLEX_CST
9985 && real_zerop (TREE_REALPART (arg1)))
9987 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9988 if (real_onep (TREE_IMAGPART (arg1)))
9989 return
9990 fold_build2_loc (loc, COMPLEX_EXPR, type,
9991 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9992 rtype, arg0)),
9993 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9994 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9995 return
9996 fold_build2_loc (loc, COMPLEX_EXPR, type,
9997 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9998 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9999 rtype, arg0)));
10002 /* Optimize z * conj(z) for floating point complex numbers.
10003 Guarded by flag_unsafe_math_optimizations as non-finite
10004 imaginary components don't produce scalar results. */
10005 if (flag_unsafe_math_optimizations
10006 && TREE_CODE (arg0) == CONJ_EXPR
10007 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10008 return fold_mult_zconjz (loc, type, arg1);
10009 if (flag_unsafe_math_optimizations
10010 && TREE_CODE (arg1) == CONJ_EXPR
10011 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10012 return fold_mult_zconjz (loc, type, arg0);
10014 goto associate;
10016 case BIT_IOR_EXPR:
10017 /* Canonicalize (X & C1) | C2. */
10018 if (TREE_CODE (arg0) == BIT_AND_EXPR
10019 && TREE_CODE (arg1) == INTEGER_CST
10020 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10022 int width = TYPE_PRECISION (type), w;
10023 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10024 wide_int c2 = wi::to_wide (arg1);
10026 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10027 if ((c1 & c2) == c1)
10028 return omit_one_operand_loc (loc, type, arg1,
10029 TREE_OPERAND (arg0, 0));
10031 wide_int msk = wi::mask (width, false,
10032 TYPE_PRECISION (TREE_TYPE (arg1)));
10034 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10035 if (wi::bit_and_not (msk, c1 | c2) == 0)
10037 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10038 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10041 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10042 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10043 mode which allows further optimizations. */
10044 c1 &= msk;
10045 c2 &= msk;
10046 wide_int c3 = wi::bit_and_not (c1, c2);
10047 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10049 wide_int mask = wi::mask (w, false,
10050 TYPE_PRECISION (type));
10051 if (((c1 | c2) & mask) == mask
10052 && wi::bit_and_not (c1, mask) == 0)
10054 c3 = mask;
10055 break;
10059 if (c3 != c1)
10061 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10062 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10063 wide_int_to_tree (type, c3));
10064 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10068 /* See if this can be simplified into a rotate first. If that
10069 is unsuccessful continue in the association code. */
10070 goto bit_rotate;
10072 case BIT_XOR_EXPR:
10073 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10074 if (TREE_CODE (arg0) == BIT_AND_EXPR
10075 && INTEGRAL_TYPE_P (type)
10076 && integer_onep (TREE_OPERAND (arg0, 1))
10077 && integer_onep (arg1))
10078 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10079 build_zero_cst (TREE_TYPE (arg0)));
10081 /* See if this can be simplified into a rotate first. If that
10082 is unsuccessful continue in the association code. */
10083 goto bit_rotate;
10085 case BIT_AND_EXPR:
10086 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10087 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10088 && INTEGRAL_TYPE_P (type)
10089 && integer_onep (TREE_OPERAND (arg0, 1))
10090 && integer_onep (arg1))
10092 tree tem2;
10093 tem = TREE_OPERAND (arg0, 0);
10094 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10095 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10096 tem, tem2);
10097 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10098 build_zero_cst (TREE_TYPE (tem)));
10100 /* Fold ~X & 1 as (X & 1) == 0. */
10101 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10102 && INTEGRAL_TYPE_P (type)
10103 && integer_onep (arg1))
10105 tree tem2;
10106 tem = TREE_OPERAND (arg0, 0);
10107 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10108 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10109 tem, tem2);
10110 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10111 build_zero_cst (TREE_TYPE (tem)));
10113 /* Fold !X & 1 as X == 0. */
10114 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10115 && integer_onep (arg1))
10117 tem = TREE_OPERAND (arg0, 0);
10118 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10119 build_zero_cst (TREE_TYPE (tem)));
10122 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10123 multiple of 1 << CST. */
10124 if (TREE_CODE (arg1) == INTEGER_CST)
10126 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10127 wide_int ncst1 = -cst1;
10128 if ((cst1 & ncst1) == ncst1
10129 && multiple_of_p (type, arg0,
10130 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10131 return fold_convert_loc (loc, type, arg0);
10134 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10135 bits from CST2. */
10136 if (TREE_CODE (arg1) == INTEGER_CST
10137 && TREE_CODE (arg0) == MULT_EXPR
10138 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10140 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10141 wide_int masked
10142 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10144 if (masked == 0)
10145 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10146 arg0, arg1);
10147 else if (masked != warg1)
10149 /* Avoid the transform if arg1 is a mask of some
10150 mode which allows further optimizations. */
10151 int pop = wi::popcount (warg1);
10152 if (!(pop >= BITS_PER_UNIT
10153 && pow2p_hwi (pop)
10154 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10155 return fold_build2_loc (loc, code, type, op0,
10156 wide_int_to_tree (type, masked));
10160 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10161 ((A & N) + B) & M -> (A + B) & M
10162 Similarly if (N & M) == 0,
10163 ((A | N) + B) & M -> (A + B) & M
10164 and for - instead of + (or unary - instead of +)
10165 and/or ^ instead of |.
10166 If B is constant and (B & M) == 0, fold into A & M. */
10167 if (TREE_CODE (arg1) == INTEGER_CST)
10169 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10170 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10171 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10172 && (TREE_CODE (arg0) == PLUS_EXPR
10173 || TREE_CODE (arg0) == MINUS_EXPR
10174 || TREE_CODE (arg0) == NEGATE_EXPR)
10175 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10176 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10178 tree pmop[2];
10179 int which = 0;
10180 wide_int cst0;
10182 /* Now we know that arg0 is (C + D) or (C - D) or
10183 -C and arg1 (M) is == (1LL << cst) - 1.
10184 Store C into PMOP[0] and D into PMOP[1]. */
10185 pmop[0] = TREE_OPERAND (arg0, 0);
10186 pmop[1] = NULL;
10187 if (TREE_CODE (arg0) != NEGATE_EXPR)
10189 pmop[1] = TREE_OPERAND (arg0, 1);
10190 which = 1;
10193 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10194 which = -1;
10196 for (; which >= 0; which--)
10197 switch (TREE_CODE (pmop[which]))
10199 case BIT_AND_EXPR:
10200 case BIT_IOR_EXPR:
10201 case BIT_XOR_EXPR:
10202 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10203 != INTEGER_CST)
10204 break;
10205 cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10206 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10208 if (cst0 != cst1)
10209 break;
10211 else if (cst0 != 0)
10212 break;
10213 /* If C or D is of the form (A & N) where
10214 (N & M) == M, or of the form (A | N) or
10215 (A ^ N) where (N & M) == 0, replace it with A. */
10216 pmop[which] = TREE_OPERAND (pmop[which], 0);
10217 break;
10218 case INTEGER_CST:
10219 /* If C or D is a N where (N & M) == 0, it can be
10220 omitted (assumed 0). */
10221 if ((TREE_CODE (arg0) == PLUS_EXPR
10222 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10223 && (cst1 & wi::to_wide (pmop[which])) == 0)
10224 pmop[which] = NULL;
10225 break;
10226 default:
10227 break;
10230 /* Only build anything new if we optimized one or both arguments
10231 above. */
10232 if (pmop[0] != TREE_OPERAND (arg0, 0)
10233 || (TREE_CODE (arg0) != NEGATE_EXPR
10234 && pmop[1] != TREE_OPERAND (arg0, 1)))
10236 tree utype = TREE_TYPE (arg0);
10237 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10239 /* Perform the operations in a type that has defined
10240 overflow behavior. */
10241 utype = unsigned_type_for (TREE_TYPE (arg0));
10242 if (pmop[0] != NULL)
10243 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10244 if (pmop[1] != NULL)
10245 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10248 if (TREE_CODE (arg0) == NEGATE_EXPR)
10249 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10250 else if (TREE_CODE (arg0) == PLUS_EXPR)
10252 if (pmop[0] != NULL && pmop[1] != NULL)
10253 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10254 pmop[0], pmop[1]);
10255 else if (pmop[0] != NULL)
10256 tem = pmop[0];
10257 else if (pmop[1] != NULL)
10258 tem = pmop[1];
10259 else
10260 return build_int_cst (type, 0);
10262 else if (pmop[0] == NULL)
10263 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10264 else
10265 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10266 pmop[0], pmop[1]);
10267 /* TEM is now the new binary +, - or unary - replacement. */
10268 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10269 fold_convert_loc (loc, utype, arg1));
10270 return fold_convert_loc (loc, type, tem);
10275 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10276 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10277 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10279 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10281 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10282 if (mask == -1)
10283 return
10284 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10287 goto associate;
10289 case RDIV_EXPR:
10290 /* Don't touch a floating-point divide by zero unless the mode
10291 of the constant can represent infinity. */
10292 if (TREE_CODE (arg1) == REAL_CST
10293 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10294 && real_zerop (arg1))
10295 return NULL_TREE;
10297 /* (-A) / (-B) -> A / B */
10298 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10299 return fold_build2_loc (loc, RDIV_EXPR, type,
10300 TREE_OPERAND (arg0, 0),
10301 negate_expr (arg1));
10302 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10303 return fold_build2_loc (loc, RDIV_EXPR, type,
10304 negate_expr (arg0),
10305 TREE_OPERAND (arg1, 0));
10306 return NULL_TREE;
10308 case TRUNC_DIV_EXPR:
10309 /* Fall through */
10311 case FLOOR_DIV_EXPR:
10312 /* Simplify A / (B << N) where A and B are positive and B is
10313 a power of 2, to A >> (N + log2(B)). */
10314 strict_overflow_p = false;
10315 if (TREE_CODE (arg1) == LSHIFT_EXPR
10316 && (TYPE_UNSIGNED (type)
10317 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10319 tree sval = TREE_OPERAND (arg1, 0);
10320 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10322 tree sh_cnt = TREE_OPERAND (arg1, 1);
10323 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10324 wi::exact_log2 (wi::to_wide (sval)));
10326 if (strict_overflow_p)
10327 fold_overflow_warning (("assuming signed overflow does not "
10328 "occur when simplifying A / (B << N)"),
10329 WARN_STRICT_OVERFLOW_MISC);
10331 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10332 sh_cnt, pow2);
10333 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10334 fold_convert_loc (loc, type, arg0), sh_cnt);
10338 /* Fall through */
10340 case ROUND_DIV_EXPR:
10341 case CEIL_DIV_EXPR:
10342 case EXACT_DIV_EXPR:
10343 if (integer_zerop (arg1))
10344 return NULL_TREE;
10346 /* Convert -A / -B to A / B when the type is signed and overflow is
10347 undefined. */
10348 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10349 && TREE_CODE (op0) == NEGATE_EXPR
10350 && negate_expr_p (op1))
10352 if (INTEGRAL_TYPE_P (type))
10353 fold_overflow_warning (("assuming signed overflow does not occur "
10354 "when distributing negation across "
10355 "division"),
10356 WARN_STRICT_OVERFLOW_MISC);
10357 return fold_build2_loc (loc, code, type,
10358 fold_convert_loc (loc, type,
10359 TREE_OPERAND (arg0, 0)),
10360 negate_expr (op1));
10362 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10363 && TREE_CODE (arg1) == NEGATE_EXPR
10364 && negate_expr_p (op0))
10366 if (INTEGRAL_TYPE_P (type))
10367 fold_overflow_warning (("assuming signed overflow does not occur "
10368 "when distributing negation across "
10369 "division"),
10370 WARN_STRICT_OVERFLOW_MISC);
10371 return fold_build2_loc (loc, code, type,
10372 negate_expr (op0),
10373 fold_convert_loc (loc, type,
10374 TREE_OPERAND (arg1, 0)));
10377 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10378 operation, EXACT_DIV_EXPR.
10380 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10381 At one time others generated faster code, it's not clear if they do
10382 after the last round to changes to the DIV code in expmed.c. */
10383 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10384 && multiple_of_p (type, arg0, arg1))
10385 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10386 fold_convert (type, arg0),
10387 fold_convert (type, arg1));
10389 strict_overflow_p = false;
10390 if (TREE_CODE (arg1) == INTEGER_CST
10391 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10392 &strict_overflow_p)) != 0)
10394 if (strict_overflow_p)
10395 fold_overflow_warning (("assuming signed overflow does not occur "
10396 "when simplifying division"),
10397 WARN_STRICT_OVERFLOW_MISC);
10398 return fold_convert_loc (loc, type, tem);
10401 return NULL_TREE;
10403 case CEIL_MOD_EXPR:
10404 case FLOOR_MOD_EXPR:
10405 case ROUND_MOD_EXPR:
10406 case TRUNC_MOD_EXPR:
10407 strict_overflow_p = false;
10408 if (TREE_CODE (arg1) == INTEGER_CST
10409 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10410 &strict_overflow_p)) != 0)
10412 if (strict_overflow_p)
10413 fold_overflow_warning (("assuming signed overflow does not occur "
10414 "when simplifying modulus"),
10415 WARN_STRICT_OVERFLOW_MISC);
10416 return fold_convert_loc (loc, type, tem);
10419 return NULL_TREE;
10421 case LROTATE_EXPR:
10422 case RROTATE_EXPR:
10423 case RSHIFT_EXPR:
10424 case LSHIFT_EXPR:
10425 /* Since negative shift count is not well-defined,
10426 don't try to compute it in the compiler. */
10427 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10428 return NULL_TREE;
10430 prec = element_precision (type);
10432 /* If we have a rotate of a bit operation with the rotate count and
10433 the second operand of the bit operation both constant,
10434 permute the two operations. */
10435 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10436 && (TREE_CODE (arg0) == BIT_AND_EXPR
10437 || TREE_CODE (arg0) == BIT_IOR_EXPR
10438 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10439 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10441 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10442 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10443 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10444 fold_build2_loc (loc, code, type,
10445 arg00, arg1),
10446 fold_build2_loc (loc, code, type,
10447 arg01, arg1));
10450 /* Two consecutive rotates adding up to the some integer
10451 multiple of the precision of the type can be ignored. */
10452 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10453 && TREE_CODE (arg0) == RROTATE_EXPR
10454 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10455 && wi::umod_trunc (wi::to_wide (arg1)
10456 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10457 prec) == 0)
10458 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10460 return NULL_TREE;
10462 case MIN_EXPR:
10463 case MAX_EXPR:
10464 goto associate;
10466 case TRUTH_ANDIF_EXPR:
10467 /* Note that the operands of this must be ints
10468 and their values must be 0 or 1.
10469 ("true" is a fixed value perhaps depending on the language.) */
10470 /* If first arg is constant zero, return it. */
10471 if (integer_zerop (arg0))
10472 return fold_convert_loc (loc, type, arg0);
10473 /* FALLTHRU */
10474 case TRUTH_AND_EXPR:
10475 /* If either arg is constant true, drop it. */
10476 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10477 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10478 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10479 /* Preserve sequence points. */
10480 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10481 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10482 /* If second arg is constant zero, result is zero, but first arg
10483 must be evaluated. */
10484 if (integer_zerop (arg1))
10485 return omit_one_operand_loc (loc, type, arg1, arg0);
10486 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10487 case will be handled here. */
10488 if (integer_zerop (arg0))
10489 return omit_one_operand_loc (loc, type, arg0, arg1);
10491 /* !X && X is always false. */
10492 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10493 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10494 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10495 /* X && !X is always false. */
10496 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10497 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10498 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10500 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10501 means A >= Y && A != MAX, but in this case we know that
10502 A < X <= MAX. */
10504 if (!TREE_SIDE_EFFECTS (arg0)
10505 && !TREE_SIDE_EFFECTS (arg1))
10507 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10508 if (tem && !operand_equal_p (tem, arg0, 0))
10509 return fold_build2_loc (loc, code, type, tem, arg1);
10511 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10512 if (tem && !operand_equal_p (tem, arg1, 0))
10513 return fold_build2_loc (loc, code, type, arg0, tem);
10516 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10517 != NULL_TREE)
10518 return tem;
10520 return NULL_TREE;
10522 case TRUTH_ORIF_EXPR:
10523 /* Note that the operands of this must be ints
10524 and their values must be 0 or true.
10525 ("true" is a fixed value perhaps depending on the language.) */
10526 /* If first arg is constant true, return it. */
10527 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10528 return fold_convert_loc (loc, type, arg0);
10529 /* FALLTHRU */
10530 case TRUTH_OR_EXPR:
10531 /* If either arg is constant zero, drop it. */
10532 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10533 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10534 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10535 /* Preserve sequence points. */
10536 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10537 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10538 /* If second arg is constant true, result is true, but we must
10539 evaluate first arg. */
10540 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10541 return omit_one_operand_loc (loc, type, arg1, arg0);
10542 /* Likewise for first arg, but note this only occurs here for
10543 TRUTH_OR_EXPR. */
10544 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10545 return omit_one_operand_loc (loc, type, arg0, arg1);
10547 /* !X || X is always true. */
10548 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10549 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10550 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10551 /* X || !X is always true. */
10552 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10553 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10554 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10556 /* (X && !Y) || (!X && Y) is X ^ Y */
10557 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10558 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10560 tree a0, a1, l0, l1, n0, n1;
10562 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10563 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10565 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10566 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10568 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10569 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10571 if ((operand_equal_p (n0, a0, 0)
10572 && operand_equal_p (n1, a1, 0))
10573 || (operand_equal_p (n0, a1, 0)
10574 && operand_equal_p (n1, a0, 0)))
10575 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10578 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10579 != NULL_TREE)
10580 return tem;
10582 return NULL_TREE;
10584 case TRUTH_XOR_EXPR:
10585 /* If the second arg is constant zero, drop it. */
10586 if (integer_zerop (arg1))
10587 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10588 /* If the second arg is constant true, this is a logical inversion. */
10589 if (integer_onep (arg1))
10591 tem = invert_truthvalue_loc (loc, arg0);
10592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10594 /* Identical arguments cancel to zero. */
10595 if (operand_equal_p (arg0, arg1, 0))
10596 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10598 /* !X ^ X is always true. */
10599 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10600 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10601 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10603 /* X ^ !X is always true. */
10604 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10605 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10606 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10608 return NULL_TREE;
10610 case EQ_EXPR:
10611 case NE_EXPR:
10612 STRIP_NOPS (arg0);
10613 STRIP_NOPS (arg1);
10615 tem = fold_comparison (loc, code, type, op0, op1);
10616 if (tem != NULL_TREE)
10617 return tem;
10619 /* bool_var != 1 becomes !bool_var. */
10620 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10621 && code == NE_EXPR)
10622 return fold_convert_loc (loc, type,
10623 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10624 TREE_TYPE (arg0), arg0));
10626 /* bool_var == 0 becomes !bool_var. */
10627 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10628 && code == EQ_EXPR)
10629 return fold_convert_loc (loc, type,
10630 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10631 TREE_TYPE (arg0), arg0));
10633 /* !exp != 0 becomes !exp */
10634 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10635 && code == NE_EXPR)
10636 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10638 /* If this is an EQ or NE comparison with zero and ARG0 is
10639 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10640 two operations, but the latter can be done in one less insn
10641 on machines that have only two-operand insns or on which a
10642 constant cannot be the first operand. */
10643 if (TREE_CODE (arg0) == BIT_AND_EXPR
10644 && integer_zerop (arg1))
10646 tree arg00 = TREE_OPERAND (arg0, 0);
10647 tree arg01 = TREE_OPERAND (arg0, 1);
10648 if (TREE_CODE (arg00) == LSHIFT_EXPR
10649 && integer_onep (TREE_OPERAND (arg00, 0)))
10651 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10652 arg01, TREE_OPERAND (arg00, 1));
10653 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10654 build_int_cst (TREE_TYPE (arg0), 1));
10655 return fold_build2_loc (loc, code, type,
10656 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10657 arg1);
10659 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10660 && integer_onep (TREE_OPERAND (arg01, 0)))
10662 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10663 arg00, TREE_OPERAND (arg01, 1));
10664 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10665 build_int_cst (TREE_TYPE (arg0), 1));
10666 return fold_build2_loc (loc, code, type,
10667 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10668 arg1);
10672 /* If this is an NE or EQ comparison of zero against the result of a
10673 signed MOD operation whose second operand is a power of 2, make
10674 the MOD operation unsigned since it is simpler and equivalent. */
10675 if (integer_zerop (arg1)
10676 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10677 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10678 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10679 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10680 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10681 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10683 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10684 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10685 fold_convert_loc (loc, newtype,
10686 TREE_OPERAND (arg0, 0)),
10687 fold_convert_loc (loc, newtype,
10688 TREE_OPERAND (arg0, 1)));
10690 return fold_build2_loc (loc, code, type, newmod,
10691 fold_convert_loc (loc, newtype, arg1));
10694 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10695 C1 is a valid shift constant, and C2 is a power of two, i.e.
10696 a single bit. */
10697 if (TREE_CODE (arg0) == BIT_AND_EXPR
10698 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10699 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10700 == INTEGER_CST
10701 && integer_pow2p (TREE_OPERAND (arg0, 1))
10702 && integer_zerop (arg1))
10704 tree itype = TREE_TYPE (arg0);
10705 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10706 prec = TYPE_PRECISION (itype);
10708 /* Check for a valid shift count. */
10709 if (wi::ltu_p (wi::to_wide (arg001), prec))
10711 tree arg01 = TREE_OPERAND (arg0, 1);
10712 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10713 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10714 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10715 can be rewritten as (X & (C2 << C1)) != 0. */
10716 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10718 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10719 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10720 return fold_build2_loc (loc, code, type, tem,
10721 fold_convert_loc (loc, itype, arg1));
10723 /* Otherwise, for signed (arithmetic) shifts,
10724 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10725 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10726 else if (!TYPE_UNSIGNED (itype))
10727 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10728 arg000, build_int_cst (itype, 0));
10729 /* Otherwise, of unsigned (logical) shifts,
10730 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10731 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10732 else
10733 return omit_one_operand_loc (loc, type,
10734 code == EQ_EXPR ? integer_one_node
10735 : integer_zero_node,
10736 arg000);
10740 /* If this is a comparison of a field, we may be able to simplify it. */
10741 if ((TREE_CODE (arg0) == COMPONENT_REF
10742 || TREE_CODE (arg0) == BIT_FIELD_REF)
10743 /* Handle the constant case even without -O
10744 to make sure the warnings are given. */
10745 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10747 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10748 if (t1)
10749 return t1;
10752 /* Optimize comparisons of strlen vs zero to a compare of the
10753 first character of the string vs zero. To wit,
10754 strlen(ptr) == 0 => *ptr == 0
10755 strlen(ptr) != 0 => *ptr != 0
10756 Other cases should reduce to one of these two (or a constant)
10757 due to the return value of strlen being unsigned. */
10758 if (TREE_CODE (arg0) == CALL_EXPR
10759 && integer_zerop (arg1))
10761 tree fndecl = get_callee_fndecl (arg0);
10763 if (fndecl
10764 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10765 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10766 && call_expr_nargs (arg0) == 1
10767 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10769 tree iref = build_fold_indirect_ref_loc (loc,
10770 CALL_EXPR_ARG (arg0, 0));
10771 return fold_build2_loc (loc, code, type, iref,
10772 build_int_cst (TREE_TYPE (iref), 0));
10776 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10777 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10778 if (TREE_CODE (arg0) == RSHIFT_EXPR
10779 && integer_zerop (arg1)
10780 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10782 tree arg00 = TREE_OPERAND (arg0, 0);
10783 tree arg01 = TREE_OPERAND (arg0, 1);
10784 tree itype = TREE_TYPE (arg00);
10785 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10787 if (TYPE_UNSIGNED (itype))
10789 itype = signed_type_for (itype);
10790 arg00 = fold_convert_loc (loc, itype, arg00);
10792 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10793 type, arg00, build_zero_cst (itype));
10797 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10798 (X & C) == 0 when C is a single bit. */
10799 if (TREE_CODE (arg0) == BIT_AND_EXPR
10800 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10801 && integer_zerop (arg1)
10802 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10804 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10805 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10806 TREE_OPERAND (arg0, 1));
10807 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10808 type, tem,
10809 fold_convert_loc (loc, TREE_TYPE (arg0),
10810 arg1));
10813 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10814 constant C is a power of two, i.e. a single bit. */
10815 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10816 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10817 && integer_zerop (arg1)
10818 && integer_pow2p (TREE_OPERAND (arg0, 1))
10819 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10820 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10822 tree arg00 = TREE_OPERAND (arg0, 0);
10823 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10824 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10827 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10828 when is C is a power of two, i.e. a single bit. */
10829 if (TREE_CODE (arg0) == BIT_AND_EXPR
10830 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10831 && integer_zerop (arg1)
10832 && integer_pow2p (TREE_OPERAND (arg0, 1))
10833 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10834 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10836 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10837 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10838 arg000, TREE_OPERAND (arg0, 1));
10839 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10840 tem, build_int_cst (TREE_TYPE (tem), 0));
10843 if (integer_zerop (arg1)
10844 && tree_expr_nonzero_p (arg0))
10846 tree res = constant_boolean_node (code==NE_EXPR, type);
10847 return omit_one_operand_loc (loc, type, res, arg0);
10850 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10851 if (TREE_CODE (arg0) == BIT_AND_EXPR
10852 && TREE_CODE (arg1) == BIT_AND_EXPR)
10854 tree arg00 = TREE_OPERAND (arg0, 0);
10855 tree arg01 = TREE_OPERAND (arg0, 1);
10856 tree arg10 = TREE_OPERAND (arg1, 0);
10857 tree arg11 = TREE_OPERAND (arg1, 1);
10858 tree itype = TREE_TYPE (arg0);
10860 if (operand_equal_p (arg01, arg11, 0))
10862 tem = fold_convert_loc (loc, itype, arg10);
10863 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10864 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10865 return fold_build2_loc (loc, code, type, tem,
10866 build_zero_cst (itype));
10868 if (operand_equal_p (arg01, arg10, 0))
10870 tem = fold_convert_loc (loc, itype, arg11);
10871 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10872 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10873 return fold_build2_loc (loc, code, type, tem,
10874 build_zero_cst (itype));
10876 if (operand_equal_p (arg00, arg11, 0))
10878 tem = fold_convert_loc (loc, itype, arg10);
10879 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10880 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10881 return fold_build2_loc (loc, code, type, tem,
10882 build_zero_cst (itype));
10884 if (operand_equal_p (arg00, arg10, 0))
10886 tem = fold_convert_loc (loc, itype, arg11);
10887 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10888 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10889 return fold_build2_loc (loc, code, type, tem,
10890 build_zero_cst (itype));
10894 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10895 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10897 tree arg00 = TREE_OPERAND (arg0, 0);
10898 tree arg01 = TREE_OPERAND (arg0, 1);
10899 tree arg10 = TREE_OPERAND (arg1, 0);
10900 tree arg11 = TREE_OPERAND (arg1, 1);
10901 tree itype = TREE_TYPE (arg0);
10903 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10904 operand_equal_p guarantees no side-effects so we don't need
10905 to use omit_one_operand on Z. */
10906 if (operand_equal_p (arg01, arg11, 0))
10907 return fold_build2_loc (loc, code, type, arg00,
10908 fold_convert_loc (loc, TREE_TYPE (arg00),
10909 arg10));
10910 if (operand_equal_p (arg01, arg10, 0))
10911 return fold_build2_loc (loc, code, type, arg00,
10912 fold_convert_loc (loc, TREE_TYPE (arg00),
10913 arg11));
10914 if (operand_equal_p (arg00, arg11, 0))
10915 return fold_build2_loc (loc, code, type, arg01,
10916 fold_convert_loc (loc, TREE_TYPE (arg01),
10917 arg10));
10918 if (operand_equal_p (arg00, arg10, 0))
10919 return fold_build2_loc (loc, code, type, arg01,
10920 fold_convert_loc (loc, TREE_TYPE (arg01),
10921 arg11));
10923 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10924 if (TREE_CODE (arg01) == INTEGER_CST
10925 && TREE_CODE (arg11) == INTEGER_CST)
10927 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10928 fold_convert_loc (loc, itype, arg11));
10929 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10930 return fold_build2_loc (loc, code, type, tem,
10931 fold_convert_loc (loc, itype, arg10));
10935 /* Attempt to simplify equality/inequality comparisons of complex
10936 values. Only lower the comparison if the result is known or
10937 can be simplified to a single scalar comparison. */
10938 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10939 || TREE_CODE (arg0) == COMPLEX_CST)
10940 && (TREE_CODE (arg1) == COMPLEX_EXPR
10941 || TREE_CODE (arg1) == COMPLEX_CST))
10943 tree real0, imag0, real1, imag1;
10944 tree rcond, icond;
10946 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10948 real0 = TREE_OPERAND (arg0, 0);
10949 imag0 = TREE_OPERAND (arg0, 1);
10951 else
10953 real0 = TREE_REALPART (arg0);
10954 imag0 = TREE_IMAGPART (arg0);
10957 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10959 real1 = TREE_OPERAND (arg1, 0);
10960 imag1 = TREE_OPERAND (arg1, 1);
10962 else
10964 real1 = TREE_REALPART (arg1);
10965 imag1 = TREE_IMAGPART (arg1);
10968 rcond = fold_binary_loc (loc, code, type, real0, real1);
10969 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10971 if (integer_zerop (rcond))
10973 if (code == EQ_EXPR)
10974 return omit_two_operands_loc (loc, type, boolean_false_node,
10975 imag0, imag1);
10976 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10978 else
10980 if (code == NE_EXPR)
10981 return omit_two_operands_loc (loc, type, boolean_true_node,
10982 imag0, imag1);
10983 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10987 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10988 if (icond && TREE_CODE (icond) == INTEGER_CST)
10990 if (integer_zerop (icond))
10992 if (code == EQ_EXPR)
10993 return omit_two_operands_loc (loc, type, boolean_false_node,
10994 real0, real1);
10995 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10997 else
10999 if (code == NE_EXPR)
11000 return omit_two_operands_loc (loc, type, boolean_true_node,
11001 real0, real1);
11002 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11007 return NULL_TREE;
11009 case LT_EXPR:
11010 case GT_EXPR:
11011 case LE_EXPR:
11012 case GE_EXPR:
11013 tem = fold_comparison (loc, code, type, op0, op1);
11014 if (tem != NULL_TREE)
11015 return tem;
11017 /* Transform comparisons of the form X +- C CMP X. */
11018 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11019 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11020 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11021 && !HONOR_SNANS (arg0))
11023 tree arg01 = TREE_OPERAND (arg0, 1);
11024 enum tree_code code0 = TREE_CODE (arg0);
11025 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11027 /* (X - c) > X becomes false. */
11028 if (code == GT_EXPR
11029 && ((code0 == MINUS_EXPR && is_positive >= 0)
11030 || (code0 == PLUS_EXPR && is_positive <= 0)))
11031 return constant_boolean_node (0, type);
11033 /* Likewise (X + c) < X becomes false. */
11034 if (code == LT_EXPR
11035 && ((code0 == PLUS_EXPR && is_positive >= 0)
11036 || (code0 == MINUS_EXPR && is_positive <= 0)))
11037 return constant_boolean_node (0, type);
11039 /* Convert (X - c) <= X to true. */
11040 if (!HONOR_NANS (arg1)
11041 && code == LE_EXPR
11042 && ((code0 == MINUS_EXPR && is_positive >= 0)
11043 || (code0 == PLUS_EXPR && is_positive <= 0)))
11044 return constant_boolean_node (1, type);
11046 /* Convert (X + c) >= X to true. */
11047 if (!HONOR_NANS (arg1)
11048 && code == GE_EXPR
11049 && ((code0 == PLUS_EXPR && is_positive >= 0)
11050 || (code0 == MINUS_EXPR && is_positive <= 0)))
11051 return constant_boolean_node (1, type);
11054 /* If we are comparing an ABS_EXPR with a constant, we can
11055 convert all the cases into explicit comparisons, but they may
11056 well not be faster than doing the ABS and one comparison.
11057 But ABS (X) <= C is a range comparison, which becomes a subtraction
11058 and a comparison, and is probably faster. */
11059 if (code == LE_EXPR
11060 && TREE_CODE (arg1) == INTEGER_CST
11061 && TREE_CODE (arg0) == ABS_EXPR
11062 && ! TREE_SIDE_EFFECTS (arg0)
11063 && (tem = negate_expr (arg1)) != 0
11064 && TREE_CODE (tem) == INTEGER_CST
11065 && !TREE_OVERFLOW (tem))
11066 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11067 build2 (GE_EXPR, type,
11068 TREE_OPERAND (arg0, 0), tem),
11069 build2 (LE_EXPR, type,
11070 TREE_OPERAND (arg0, 0), arg1));
11072 /* Convert ABS_EXPR<x> >= 0 to true. */
11073 strict_overflow_p = false;
11074 if (code == GE_EXPR
11075 && (integer_zerop (arg1)
11076 || (! HONOR_NANS (arg0)
11077 && real_zerop (arg1)))
11078 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11080 if (strict_overflow_p)
11081 fold_overflow_warning (("assuming signed overflow does not occur "
11082 "when simplifying comparison of "
11083 "absolute value and zero"),
11084 WARN_STRICT_OVERFLOW_CONDITIONAL);
11085 return omit_one_operand_loc (loc, type,
11086 constant_boolean_node (true, type),
11087 arg0);
11090 /* Convert ABS_EXPR<x> < 0 to false. */
11091 strict_overflow_p = false;
11092 if (code == LT_EXPR
11093 && (integer_zerop (arg1) || real_zerop (arg1))
11094 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11096 if (strict_overflow_p)
11097 fold_overflow_warning (("assuming signed overflow does not occur "
11098 "when simplifying comparison of "
11099 "absolute value and zero"),
11100 WARN_STRICT_OVERFLOW_CONDITIONAL);
11101 return omit_one_operand_loc (loc, type,
11102 constant_boolean_node (false, type),
11103 arg0);
11106 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11107 and similarly for >= into !=. */
11108 if ((code == LT_EXPR || code == GE_EXPR)
11109 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11110 && TREE_CODE (arg1) == LSHIFT_EXPR
11111 && integer_onep (TREE_OPERAND (arg1, 0)))
11112 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11113 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11114 TREE_OPERAND (arg1, 1)),
11115 build_zero_cst (TREE_TYPE (arg0)));
11117 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11118 otherwise Y might be >= # of bits in X's type and thus e.g.
11119 (unsigned char) (1 << Y) for Y 15 might be 0.
11120 If the cast is widening, then 1 << Y should have unsigned type,
11121 otherwise if Y is number of bits in the signed shift type minus 1,
11122 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11123 31 might be 0xffffffff80000000. */
11124 if ((code == LT_EXPR || code == GE_EXPR)
11125 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11126 && CONVERT_EXPR_P (arg1)
11127 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11128 && (element_precision (TREE_TYPE (arg1))
11129 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11130 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11131 || (element_precision (TREE_TYPE (arg1))
11132 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11133 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11135 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11136 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11137 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11138 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11139 build_zero_cst (TREE_TYPE (arg0)));
11142 return NULL_TREE;
11144 case UNORDERED_EXPR:
11145 case ORDERED_EXPR:
11146 case UNLT_EXPR:
11147 case UNLE_EXPR:
11148 case UNGT_EXPR:
11149 case UNGE_EXPR:
11150 case UNEQ_EXPR:
11151 case LTGT_EXPR:
11152 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11154 tree targ0 = strip_float_extensions (arg0);
11155 tree targ1 = strip_float_extensions (arg1);
11156 tree newtype = TREE_TYPE (targ0);
11158 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11159 newtype = TREE_TYPE (targ1);
11161 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11162 return fold_build2_loc (loc, code, type,
11163 fold_convert_loc (loc, newtype, targ0),
11164 fold_convert_loc (loc, newtype, targ1));
11167 return NULL_TREE;
11169 case COMPOUND_EXPR:
11170 /* When pedantic, a compound expression can be neither an lvalue
11171 nor an integer constant expression. */
11172 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11173 return NULL_TREE;
11174 /* Don't let (0, 0) be null pointer constant. */
11175 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11176 : fold_convert_loc (loc, type, arg1);
11177 return pedantic_non_lvalue_loc (loc, tem);
11179 case ASSERT_EXPR:
11180 /* An ASSERT_EXPR should never be passed to fold_binary. */
11181 gcc_unreachable ();
11183 default:
11184 return NULL_TREE;
11185 } /* switch (code) */
11188 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11189 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11190 of GOTO_EXPR. */
11192 static tree
11193 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11195 switch (TREE_CODE (*tp))
11197 case LABEL_EXPR:
11198 return *tp;
11200 case GOTO_EXPR:
11201 *walk_subtrees = 0;
11203 /* fall through */
11205 default:
11206 return NULL_TREE;
11210 /* Return whether the sub-tree ST contains a label which is accessible from
11211 outside the sub-tree. */
11213 static bool
11214 contains_label_p (tree st)
11216 return
11217 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11220 /* Fold a ternary expression of code CODE and type TYPE with operands
11221 OP0, OP1, and OP2. Return the folded expression if folding is
11222 successful. Otherwise, return NULL_TREE. */
11224 tree
11225 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11226 tree op0, tree op1, tree op2)
11228 tree tem;
11229 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11230 enum tree_code_class kind = TREE_CODE_CLASS (code);
11232 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11233 && TREE_CODE_LENGTH (code) == 3);
11235 /* If this is a commutative operation, and OP0 is a constant, move it
11236 to OP1 to reduce the number of tests below. */
11237 if (commutative_ternary_tree_code (code)
11238 && tree_swap_operands_p (op0, op1))
11239 return fold_build3_loc (loc, code, type, op1, op0, op2);
11241 tem = generic_simplify (loc, code, type, op0, op1, op2);
11242 if (tem)
11243 return tem;
11245 /* Strip any conversions that don't change the mode. This is safe
11246 for every expression, except for a comparison expression because
11247 its signedness is derived from its operands. So, in the latter
11248 case, only strip conversions that don't change the signedness.
11250 Note that this is done as an internal manipulation within the
11251 constant folder, in order to find the simplest representation of
11252 the arguments so that their form can be studied. In any cases,
11253 the appropriate type conversions should be put back in the tree
11254 that will get out of the constant folder. */
11255 if (op0)
11257 arg0 = op0;
11258 STRIP_NOPS (arg0);
11261 if (op1)
11263 arg1 = op1;
11264 STRIP_NOPS (arg1);
11267 if (op2)
11269 arg2 = op2;
11270 STRIP_NOPS (arg2);
11273 switch (code)
11275 case COMPONENT_REF:
11276 if (TREE_CODE (arg0) == CONSTRUCTOR
11277 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11279 unsigned HOST_WIDE_INT idx;
11280 tree field, value;
11281 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11282 if (field == arg1)
11283 return value;
11285 return NULL_TREE;
11287 case COND_EXPR:
11288 case VEC_COND_EXPR:
11289 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11290 so all simple results must be passed through pedantic_non_lvalue. */
11291 if (TREE_CODE (arg0) == INTEGER_CST)
11293 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11294 tem = integer_zerop (arg0) ? op2 : op1;
11295 /* Only optimize constant conditions when the selected branch
11296 has the same type as the COND_EXPR. This avoids optimizing
11297 away "c ? x : throw", where the throw has a void type.
11298 Avoid throwing away that operand which contains label. */
11299 if ((!TREE_SIDE_EFFECTS (unused_op)
11300 || !contains_label_p (unused_op))
11301 && (! VOID_TYPE_P (TREE_TYPE (tem))
11302 || VOID_TYPE_P (type)))
11303 return pedantic_non_lvalue_loc (loc, tem);
11304 return NULL_TREE;
11306 else if (TREE_CODE (arg0) == VECTOR_CST)
11308 if ((TREE_CODE (arg1) == VECTOR_CST
11309 || TREE_CODE (arg1) == CONSTRUCTOR)
11310 && (TREE_CODE (arg2) == VECTOR_CST
11311 || TREE_CODE (arg2) == CONSTRUCTOR))
11313 unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
11314 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11315 auto_vec_perm_indices sel (nelts);
11316 for (i = 0; i < nelts; i++)
11318 tree val = VECTOR_CST_ELT (arg0, i);
11319 if (integer_all_onesp (val))
11320 sel.quick_push (i);
11321 else if (integer_zerop (val))
11322 sel.quick_push (nelts + i);
11323 else /* Currently unreachable. */
11324 return NULL_TREE;
11326 tree t = fold_vec_perm (type, arg1, arg2, sel);
11327 if (t != NULL_TREE)
11328 return t;
11332 /* If we have A op B ? A : C, we may be able to convert this to a
11333 simpler expression, depending on the operation and the values
11334 of B and C. Signed zeros prevent all of these transformations,
11335 for reasons given above each one.
11337 Also try swapping the arguments and inverting the conditional. */
11338 if (COMPARISON_CLASS_P (arg0)
11339 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11340 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11342 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11343 if (tem)
11344 return tem;
11347 if (COMPARISON_CLASS_P (arg0)
11348 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11349 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11351 location_t loc0 = expr_location_or (arg0, loc);
11352 tem = fold_invert_truthvalue (loc0, arg0);
11353 if (tem && COMPARISON_CLASS_P (tem))
11355 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11356 if (tem)
11357 return tem;
11361 /* If the second operand is simpler than the third, swap them
11362 since that produces better jump optimization results. */
11363 if (truth_value_p (TREE_CODE (arg0))
11364 && tree_swap_operands_p (op1, op2))
11366 location_t loc0 = expr_location_or (arg0, loc);
11367 /* See if this can be inverted. If it can't, possibly because
11368 it was a floating-point inequality comparison, don't do
11369 anything. */
11370 tem = fold_invert_truthvalue (loc0, arg0);
11371 if (tem)
11372 return fold_build3_loc (loc, code, type, tem, op2, op1);
11375 /* Convert A ? 1 : 0 to simply A. */
11376 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11377 : (integer_onep (op1)
11378 && !VECTOR_TYPE_P (type)))
11379 && integer_zerop (op2)
11380 /* If we try to convert OP0 to our type, the
11381 call to fold will try to move the conversion inside
11382 a COND, which will recurse. In that case, the COND_EXPR
11383 is probably the best choice, so leave it alone. */
11384 && type == TREE_TYPE (arg0))
11385 return pedantic_non_lvalue_loc (loc, arg0);
11387 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11388 over COND_EXPR in cases such as floating point comparisons. */
11389 if (integer_zerop (op1)
11390 && code == COND_EXPR
11391 && integer_onep (op2)
11392 && !VECTOR_TYPE_P (type)
11393 && truth_value_p (TREE_CODE (arg0)))
11394 return pedantic_non_lvalue_loc (loc,
11395 fold_convert_loc (loc, type,
11396 invert_truthvalue_loc (loc,
11397 arg0)));
11399 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11400 if (TREE_CODE (arg0) == LT_EXPR
11401 && integer_zerop (TREE_OPERAND (arg0, 1))
11402 && integer_zerop (op2)
11403 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11405 /* sign_bit_p looks through both zero and sign extensions,
11406 but for this optimization only sign extensions are
11407 usable. */
11408 tree tem2 = TREE_OPERAND (arg0, 0);
11409 while (tem != tem2)
11411 if (TREE_CODE (tem2) != NOP_EXPR
11412 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11414 tem = NULL_TREE;
11415 break;
11417 tem2 = TREE_OPERAND (tem2, 0);
11419 /* sign_bit_p only checks ARG1 bits within A's precision.
11420 If <sign bit of A> has wider type than A, bits outside
11421 of A's precision in <sign bit of A> need to be checked.
11422 If they are all 0, this optimization needs to be done
11423 in unsigned A's type, if they are all 1 in signed A's type,
11424 otherwise this can't be done. */
11425 if (tem
11426 && TYPE_PRECISION (TREE_TYPE (tem))
11427 < TYPE_PRECISION (TREE_TYPE (arg1))
11428 && TYPE_PRECISION (TREE_TYPE (tem))
11429 < TYPE_PRECISION (type))
11431 int inner_width, outer_width;
11432 tree tem_type;
11434 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11435 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11436 if (outer_width > TYPE_PRECISION (type))
11437 outer_width = TYPE_PRECISION (type);
11439 wide_int mask = wi::shifted_mask
11440 (inner_width, outer_width - inner_width, false,
11441 TYPE_PRECISION (TREE_TYPE (arg1)));
11443 wide_int common = mask & wi::to_wide (arg1);
11444 if (common == mask)
11446 tem_type = signed_type_for (TREE_TYPE (tem));
11447 tem = fold_convert_loc (loc, tem_type, tem);
11449 else if (common == 0)
11451 tem_type = unsigned_type_for (TREE_TYPE (tem));
11452 tem = fold_convert_loc (loc, tem_type, tem);
11454 else
11455 tem = NULL;
11458 if (tem)
11459 return
11460 fold_convert_loc (loc, type,
11461 fold_build2_loc (loc, BIT_AND_EXPR,
11462 TREE_TYPE (tem), tem,
11463 fold_convert_loc (loc,
11464 TREE_TYPE (tem),
11465 arg1)));
11468 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11469 already handled above. */
11470 if (TREE_CODE (arg0) == BIT_AND_EXPR
11471 && integer_onep (TREE_OPERAND (arg0, 1))
11472 && integer_zerop (op2)
11473 && integer_pow2p (arg1))
11475 tree tem = TREE_OPERAND (arg0, 0);
11476 STRIP_NOPS (tem);
11477 if (TREE_CODE (tem) == RSHIFT_EXPR
11478 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11479 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11480 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11481 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11482 fold_convert_loc (loc, type,
11483 TREE_OPERAND (tem, 0)),
11484 op1);
11487 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11488 is probably obsolete because the first operand should be a
11489 truth value (that's why we have the two cases above), but let's
11490 leave it in until we can confirm this for all front-ends. */
11491 if (integer_zerop (op2)
11492 && TREE_CODE (arg0) == NE_EXPR
11493 && integer_zerop (TREE_OPERAND (arg0, 1))
11494 && integer_pow2p (arg1)
11495 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11496 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11497 arg1, OEP_ONLY_CONST))
11498 return pedantic_non_lvalue_loc (loc,
11499 fold_convert_loc (loc, type,
11500 TREE_OPERAND (arg0, 0)));
11502 /* Disable the transformations below for vectors, since
11503 fold_binary_op_with_conditional_arg may undo them immediately,
11504 yielding an infinite loop. */
11505 if (code == VEC_COND_EXPR)
11506 return NULL_TREE;
11508 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11509 if (integer_zerop (op2)
11510 && truth_value_p (TREE_CODE (arg0))
11511 && truth_value_p (TREE_CODE (arg1))
11512 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11513 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11514 : TRUTH_ANDIF_EXPR,
11515 type, fold_convert_loc (loc, type, arg0), op1);
11517 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11518 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11519 && truth_value_p (TREE_CODE (arg0))
11520 && truth_value_p (TREE_CODE (arg1))
11521 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11523 location_t loc0 = expr_location_or (arg0, loc);
11524 /* Only perform transformation if ARG0 is easily inverted. */
11525 tem = fold_invert_truthvalue (loc0, arg0);
11526 if (tem)
11527 return fold_build2_loc (loc, code == VEC_COND_EXPR
11528 ? BIT_IOR_EXPR
11529 : TRUTH_ORIF_EXPR,
11530 type, fold_convert_loc (loc, type, tem),
11531 op1);
11534 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11535 if (integer_zerop (arg1)
11536 && truth_value_p (TREE_CODE (arg0))
11537 && truth_value_p (TREE_CODE (op2))
11538 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11540 location_t loc0 = expr_location_or (arg0, loc);
11541 /* Only perform transformation if ARG0 is easily inverted. */
11542 tem = fold_invert_truthvalue (loc0, arg0);
11543 if (tem)
11544 return fold_build2_loc (loc, code == VEC_COND_EXPR
11545 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11546 type, fold_convert_loc (loc, type, tem),
11547 op2);
11550 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11551 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11552 && truth_value_p (TREE_CODE (arg0))
11553 && truth_value_p (TREE_CODE (op2))
11554 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11555 return fold_build2_loc (loc, code == VEC_COND_EXPR
11556 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11557 type, fold_convert_loc (loc, type, arg0), op2);
11559 return NULL_TREE;
11561 case CALL_EXPR:
11562 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11563 of fold_ternary on them. */
11564 gcc_unreachable ();
11566 case BIT_FIELD_REF:
11567 if (TREE_CODE (arg0) == VECTOR_CST
11568 && (type == TREE_TYPE (TREE_TYPE (arg0))
11569 || (TREE_CODE (type) == VECTOR_TYPE
11570 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11572 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11573 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11574 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11575 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11577 if (n != 0
11578 && (idx % width) == 0
11579 && (n % width) == 0
11580 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11582 idx = idx / width;
11583 n = n / width;
11585 if (TREE_CODE (arg0) == VECTOR_CST)
11587 if (n == 1)
11588 return VECTOR_CST_ELT (arg0, idx);
11590 tree_vector_builder vals (type, n, 1);
11591 for (unsigned i = 0; i < n; ++i)
11592 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11593 return vals.build ();
11598 /* On constants we can use native encode/interpret to constant
11599 fold (nearly) all BIT_FIELD_REFs. */
11600 if (CONSTANT_CLASS_P (arg0)
11601 && can_native_interpret_type_p (type)
11602 && BITS_PER_UNIT == 8)
11604 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11605 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11606 /* Limit us to a reasonable amount of work. To relax the
11607 other limitations we need bit-shifting of the buffer
11608 and rounding up the size. */
11609 if (bitpos % BITS_PER_UNIT == 0
11610 && bitsize % BITS_PER_UNIT == 0
11611 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11613 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11614 unsigned HOST_WIDE_INT len
11615 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11616 bitpos / BITS_PER_UNIT);
11617 if (len > 0
11618 && len * BITS_PER_UNIT >= bitsize)
11620 tree v = native_interpret_expr (type, b,
11621 bitsize / BITS_PER_UNIT);
11622 if (v)
11623 return v;
11628 return NULL_TREE;
11630 case FMA_EXPR:
11631 /* For integers we can decompose the FMA if possible. */
11632 if (TREE_CODE (arg0) == INTEGER_CST
11633 && TREE_CODE (arg1) == INTEGER_CST)
11634 return fold_build2_loc (loc, PLUS_EXPR, type,
11635 const_binop (MULT_EXPR, arg0, arg1), arg2);
11636 if (integer_zerop (arg2))
11637 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11639 return fold_fma (loc, type, arg0, arg1, arg2);
11641 case VEC_PERM_EXPR:
11642 if (TREE_CODE (arg2) == VECTOR_CST)
11644 unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
11645 bool need_mask_canon = false;
11646 bool need_mask_canon2 = false;
11647 bool all_in_vec0 = true;
11648 bool all_in_vec1 = true;
11649 bool maybe_identity = true;
11650 bool single_arg = (op0 == op1);
11651 bool changed = false;
11653 mask2 = 2 * nelts - 1;
11654 mask = single_arg ? (nelts - 1) : mask2;
11655 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11656 auto_vec_perm_indices sel (nelts);
11657 auto_vec_perm_indices sel2 (nelts);
11658 for (i = 0; i < nelts; i++)
11660 tree val = VECTOR_CST_ELT (arg2, i);
11661 if (TREE_CODE (val) != INTEGER_CST)
11662 return NULL_TREE;
11664 /* Make sure that the perm value is in an acceptable
11665 range. */
11666 wi::tree_to_wide_ref t = wi::to_wide (val);
11667 need_mask_canon |= wi::gtu_p (t, mask);
11668 need_mask_canon2 |= wi::gtu_p (t, mask2);
11669 unsigned int elt = t.to_uhwi () & mask;
11670 unsigned int elt2 = t.to_uhwi () & mask2;
11672 if (elt < nelts)
11673 all_in_vec1 = false;
11674 else
11675 all_in_vec0 = false;
11677 if ((elt & (nelts - 1)) != i)
11678 maybe_identity = false;
11680 sel.quick_push (elt);
11681 sel2.quick_push (elt2);
11684 if (maybe_identity)
11686 if (all_in_vec0)
11687 return op0;
11688 if (all_in_vec1)
11689 return op1;
11692 if (all_in_vec0)
11693 op1 = op0;
11694 else if (all_in_vec1)
11696 op0 = op1;
11697 for (i = 0; i < nelts; i++)
11698 sel[i] -= nelts;
11699 need_mask_canon = true;
11702 if ((TREE_CODE (op0) == VECTOR_CST
11703 || TREE_CODE (op0) == CONSTRUCTOR)
11704 && (TREE_CODE (op1) == VECTOR_CST
11705 || TREE_CODE (op1) == CONSTRUCTOR))
11707 tree t = fold_vec_perm (type, op0, op1, sel);
11708 if (t != NULL_TREE)
11709 return t;
11712 if (op0 == op1 && !single_arg)
11713 changed = true;
11715 /* Some targets are deficient and fail to expand a single
11716 argument permutation while still allowing an equivalent
11717 2-argument version. */
11718 if (need_mask_canon && arg2 == op2
11719 && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
11720 && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
11722 need_mask_canon = need_mask_canon2;
11723 sel = sel2;
11726 if (need_mask_canon && arg2 == op2)
11728 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11729 tree_vector_builder tsel (TREE_TYPE (arg2), nelts, 1);
11730 for (i = 0; i < nelts; i++)
11731 tsel.quick_push (build_int_cst (eltype, sel[i]));
11732 op2 = tsel.build ();
11733 changed = true;
11736 if (changed)
11737 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11739 return NULL_TREE;
11741 case BIT_INSERT_EXPR:
11742 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11743 if (TREE_CODE (arg0) == INTEGER_CST
11744 && TREE_CODE (arg1) == INTEGER_CST)
11746 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11747 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11748 wide_int tem = (wi::to_wide (arg0)
11749 & wi::shifted_mask (bitpos, bitsize, true,
11750 TYPE_PRECISION (type)));
11751 wide_int tem2
11752 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11753 bitsize), bitpos);
11754 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11756 else if (TREE_CODE (arg0) == VECTOR_CST
11757 && CONSTANT_CLASS_P (arg1)
11758 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11759 TREE_TYPE (arg1)))
11761 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11762 unsigned HOST_WIDE_INT elsize
11763 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11764 if (bitpos % elsize == 0)
11766 unsigned k = bitpos / elsize;
11767 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11768 return arg0;
11769 else
11771 unsigned int nelts = VECTOR_CST_NELTS (arg0);
11772 tree_vector_builder elts (type, nelts, 1);
11773 elts.quick_grow (nelts);
11774 for (unsigned int i = 0; i < nelts; ++i)
11775 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11776 return elts.build ();
11780 return NULL_TREE;
11782 default:
11783 return NULL_TREE;
11784 } /* switch (code) */
11787 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11788 of an array (or vector). */
11790 tree
11791 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11793 tree index_type = NULL_TREE;
11794 offset_int low_bound = 0;
11796 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11798 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11799 if (domain_type && TYPE_MIN_VALUE (domain_type))
11801 /* Static constructors for variably sized objects makes no sense. */
11802 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11803 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11804 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11808 if (index_type)
11809 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11810 TYPE_SIGN (index_type));
11812 offset_int index = low_bound - 1;
11813 if (index_type)
11814 index = wi::ext (index, TYPE_PRECISION (index_type),
11815 TYPE_SIGN (index_type));
11817 offset_int max_index;
11818 unsigned HOST_WIDE_INT cnt;
11819 tree cfield, cval;
11821 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11823 /* Array constructor might explicitly set index, or specify a range,
11824 or leave index NULL meaning that it is next index after previous
11825 one. */
11826 if (cfield)
11828 if (TREE_CODE (cfield) == INTEGER_CST)
11829 max_index = index = wi::to_offset (cfield);
11830 else
11832 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11833 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11834 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11837 else
11839 index += 1;
11840 if (index_type)
11841 index = wi::ext (index, TYPE_PRECISION (index_type),
11842 TYPE_SIGN (index_type));
11843 max_index = index;
11846 /* Do we have match? */
11847 if (wi::cmpu (access_index, index) >= 0
11848 && wi::cmpu (access_index, max_index) <= 0)
11849 return cval;
11851 return NULL_TREE;
11854 /* Perform constant folding and related simplification of EXPR.
11855 The related simplifications include x*1 => x, x*0 => 0, etc.,
11856 and application of the associative law.
11857 NOP_EXPR conversions may be removed freely (as long as we
11858 are careful not to change the type of the overall expression).
11859 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11860 but we can constant-fold them if they have constant operands. */
11862 #ifdef ENABLE_FOLD_CHECKING
11863 # define fold(x) fold_1 (x)
11864 static tree fold_1 (tree);
11865 static
11866 #endif
11867 tree
11868 fold (tree expr)
11870 const tree t = expr;
11871 enum tree_code code = TREE_CODE (t);
11872 enum tree_code_class kind = TREE_CODE_CLASS (code);
11873 tree tem;
11874 location_t loc = EXPR_LOCATION (expr);
11876 /* Return right away if a constant. */
11877 if (kind == tcc_constant)
11878 return t;
11880 /* CALL_EXPR-like objects with variable numbers of operands are
11881 treated specially. */
11882 if (kind == tcc_vl_exp)
11884 if (code == CALL_EXPR)
11886 tem = fold_call_expr (loc, expr, false);
11887 return tem ? tem : expr;
11889 return expr;
11892 if (IS_EXPR_CODE_CLASS (kind))
11894 tree type = TREE_TYPE (t);
11895 tree op0, op1, op2;
11897 switch (TREE_CODE_LENGTH (code))
11899 case 1:
11900 op0 = TREE_OPERAND (t, 0);
11901 tem = fold_unary_loc (loc, code, type, op0);
11902 return tem ? tem : expr;
11903 case 2:
11904 op0 = TREE_OPERAND (t, 0);
11905 op1 = TREE_OPERAND (t, 1);
11906 tem = fold_binary_loc (loc, code, type, op0, op1);
11907 return tem ? tem : expr;
11908 case 3:
11909 op0 = TREE_OPERAND (t, 0);
11910 op1 = TREE_OPERAND (t, 1);
11911 op2 = TREE_OPERAND (t, 2);
11912 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11913 return tem ? tem : expr;
11914 default:
11915 break;
11919 switch (code)
11921 case ARRAY_REF:
11923 tree op0 = TREE_OPERAND (t, 0);
11924 tree op1 = TREE_OPERAND (t, 1);
11926 if (TREE_CODE (op1) == INTEGER_CST
11927 && TREE_CODE (op0) == CONSTRUCTOR
11928 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11930 tree val = get_array_ctor_element_at_index (op0,
11931 wi::to_offset (op1));
11932 if (val)
11933 return val;
11936 return t;
11939 /* Return a VECTOR_CST if possible. */
11940 case CONSTRUCTOR:
11942 tree type = TREE_TYPE (t);
11943 if (TREE_CODE (type) != VECTOR_TYPE)
11944 return t;
11946 unsigned i;
11947 tree val;
11948 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11949 if (! CONSTANT_CLASS_P (val))
11950 return t;
11952 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11955 case CONST_DECL:
11956 return fold (DECL_INITIAL (t));
11958 default:
11959 return t;
11960 } /* switch (code) */
11963 #ifdef ENABLE_FOLD_CHECKING
11964 #undef fold
11966 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11967 hash_table<nofree_ptr_hash<const tree_node> > *);
11968 static void fold_check_failed (const_tree, const_tree);
11969 void print_fold_checksum (const_tree);
11971 /* When --enable-checking=fold, compute a digest of expr before
11972 and after actual fold call to see if fold did not accidentally
11973 change original expr. */
11975 tree
11976 fold (tree expr)
11978 tree ret;
11979 struct md5_ctx ctx;
11980 unsigned char checksum_before[16], checksum_after[16];
11981 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11983 md5_init_ctx (&ctx);
11984 fold_checksum_tree (expr, &ctx, &ht);
11985 md5_finish_ctx (&ctx, checksum_before);
11986 ht.empty ();
11988 ret = fold_1 (expr);
11990 md5_init_ctx (&ctx);
11991 fold_checksum_tree (expr, &ctx, &ht);
11992 md5_finish_ctx (&ctx, checksum_after);
11994 if (memcmp (checksum_before, checksum_after, 16))
11995 fold_check_failed (expr, ret);
11997 return ret;
12000 void
12001 print_fold_checksum (const_tree expr)
12003 struct md5_ctx ctx;
12004 unsigned char checksum[16], cnt;
12005 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12007 md5_init_ctx (&ctx);
12008 fold_checksum_tree (expr, &ctx, &ht);
12009 md5_finish_ctx (&ctx, checksum);
12010 for (cnt = 0; cnt < 16; ++cnt)
12011 fprintf (stderr, "%02x", checksum[cnt]);
12012 putc ('\n', stderr);
12015 static void
12016 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12018 internal_error ("fold check: original tree changed by fold");
12021 static void
12022 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12023 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12025 const tree_node **slot;
12026 enum tree_code code;
12027 union tree_node buf;
12028 int i, len;
12030 recursive_label:
12031 if (expr == NULL)
12032 return;
12033 slot = ht->find_slot (expr, INSERT);
12034 if (*slot != NULL)
12035 return;
12036 *slot = expr;
12037 code = TREE_CODE (expr);
12038 if (TREE_CODE_CLASS (code) == tcc_declaration
12039 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12041 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12042 memcpy ((char *) &buf, expr, tree_size (expr));
12043 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12044 buf.decl_with_vis.symtab_node = NULL;
12045 expr = (tree) &buf;
12047 else if (TREE_CODE_CLASS (code) == tcc_type
12048 && (TYPE_POINTER_TO (expr)
12049 || TYPE_REFERENCE_TO (expr)
12050 || TYPE_CACHED_VALUES_P (expr)
12051 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12052 || TYPE_NEXT_VARIANT (expr)
12053 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12055 /* Allow these fields to be modified. */
12056 tree tmp;
12057 memcpy ((char *) &buf, expr, tree_size (expr));
12058 expr = tmp = (tree) &buf;
12059 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12060 TYPE_POINTER_TO (tmp) = NULL;
12061 TYPE_REFERENCE_TO (tmp) = NULL;
12062 TYPE_NEXT_VARIANT (tmp) = NULL;
12063 TYPE_ALIAS_SET (tmp) = -1;
12064 if (TYPE_CACHED_VALUES_P (tmp))
12066 TYPE_CACHED_VALUES_P (tmp) = 0;
12067 TYPE_CACHED_VALUES (tmp) = NULL;
12070 md5_process_bytes (expr, tree_size (expr), ctx);
12071 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12072 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12073 if (TREE_CODE_CLASS (code) != tcc_type
12074 && TREE_CODE_CLASS (code) != tcc_declaration
12075 && code != TREE_LIST
12076 && code != SSA_NAME
12077 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12078 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12079 switch (TREE_CODE_CLASS (code))
12081 case tcc_constant:
12082 switch (code)
12084 case STRING_CST:
12085 md5_process_bytes (TREE_STRING_POINTER (expr),
12086 TREE_STRING_LENGTH (expr), ctx);
12087 break;
12088 case COMPLEX_CST:
12089 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12090 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12091 break;
12092 case VECTOR_CST:
12093 len = vector_cst_encoded_nelts (expr);
12094 for (i = 0; i < len; ++i)
12095 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12096 break;
12097 default:
12098 break;
12100 break;
12101 case tcc_exceptional:
12102 switch (code)
12104 case TREE_LIST:
12105 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12106 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12107 expr = TREE_CHAIN (expr);
12108 goto recursive_label;
12109 break;
12110 case TREE_VEC:
12111 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12112 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12113 break;
12114 default:
12115 break;
12117 break;
12118 case tcc_expression:
12119 case tcc_reference:
12120 case tcc_comparison:
12121 case tcc_unary:
12122 case tcc_binary:
12123 case tcc_statement:
12124 case tcc_vl_exp:
12125 len = TREE_OPERAND_LENGTH (expr);
12126 for (i = 0; i < len; ++i)
12127 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12128 break;
12129 case tcc_declaration:
12130 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12131 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12132 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12134 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12135 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12136 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12137 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12138 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12141 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12143 if (TREE_CODE (expr) == FUNCTION_DECL)
12145 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12146 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12148 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12150 break;
12151 case tcc_type:
12152 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12153 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12154 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12155 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12156 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12157 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12158 if (INTEGRAL_TYPE_P (expr)
12159 || SCALAR_FLOAT_TYPE_P (expr))
12161 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12162 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12164 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12165 if (TREE_CODE (expr) == RECORD_TYPE
12166 || TREE_CODE (expr) == UNION_TYPE
12167 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12168 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12169 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12170 break;
12171 default:
12172 break;
12176 /* Helper function for outputting the checksum of a tree T. When
12177 debugging with gdb, you can "define mynext" to be "next" followed
12178 by "call debug_fold_checksum (op0)", then just trace down till the
12179 outputs differ. */
12181 DEBUG_FUNCTION void
12182 debug_fold_checksum (const_tree t)
12184 int i;
12185 unsigned char checksum[16];
12186 struct md5_ctx ctx;
12187 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12189 md5_init_ctx (&ctx);
12190 fold_checksum_tree (t, &ctx, &ht);
12191 md5_finish_ctx (&ctx, checksum);
12192 ht.empty ();
12194 for (i = 0; i < 16; i++)
12195 fprintf (stderr, "%d ", checksum[i]);
12197 fprintf (stderr, "\n");
12200 #endif
12202 /* Fold a unary tree expression with code CODE of type TYPE with an
12203 operand OP0. LOC is the location of the resulting expression.
12204 Return a folded expression if successful. Otherwise, return a tree
12205 expression with code CODE of type TYPE with an operand OP0. */
12207 tree
12208 fold_build1_loc (location_t loc,
12209 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12211 tree tem;
12212 #ifdef ENABLE_FOLD_CHECKING
12213 unsigned char checksum_before[16], checksum_after[16];
12214 struct md5_ctx ctx;
12215 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12217 md5_init_ctx (&ctx);
12218 fold_checksum_tree (op0, &ctx, &ht);
12219 md5_finish_ctx (&ctx, checksum_before);
12220 ht.empty ();
12221 #endif
12223 tem = fold_unary_loc (loc, code, type, op0);
12224 if (!tem)
12225 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12227 #ifdef ENABLE_FOLD_CHECKING
12228 md5_init_ctx (&ctx);
12229 fold_checksum_tree (op0, &ctx, &ht);
12230 md5_finish_ctx (&ctx, checksum_after);
12232 if (memcmp (checksum_before, checksum_after, 16))
12233 fold_check_failed (op0, tem);
12234 #endif
12235 return tem;
12238 /* Fold a binary tree expression with code CODE of type TYPE with
12239 operands OP0 and OP1. LOC is the location of the resulting
12240 expression. Return a folded expression if successful. Otherwise,
12241 return a tree expression with code CODE of type TYPE with operands
12242 OP0 and OP1. */
12244 tree
12245 fold_build2_loc (location_t loc,
12246 enum tree_code code, tree type, tree op0, tree op1
12247 MEM_STAT_DECL)
12249 tree tem;
12250 #ifdef ENABLE_FOLD_CHECKING
12251 unsigned char checksum_before_op0[16],
12252 checksum_before_op1[16],
12253 checksum_after_op0[16],
12254 checksum_after_op1[16];
12255 struct md5_ctx ctx;
12256 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12258 md5_init_ctx (&ctx);
12259 fold_checksum_tree (op0, &ctx, &ht);
12260 md5_finish_ctx (&ctx, checksum_before_op0);
12261 ht.empty ();
12263 md5_init_ctx (&ctx);
12264 fold_checksum_tree (op1, &ctx, &ht);
12265 md5_finish_ctx (&ctx, checksum_before_op1);
12266 ht.empty ();
12267 #endif
12269 tem = fold_binary_loc (loc, code, type, op0, op1);
12270 if (!tem)
12271 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12273 #ifdef ENABLE_FOLD_CHECKING
12274 md5_init_ctx (&ctx);
12275 fold_checksum_tree (op0, &ctx, &ht);
12276 md5_finish_ctx (&ctx, checksum_after_op0);
12277 ht.empty ();
12279 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12280 fold_check_failed (op0, tem);
12282 md5_init_ctx (&ctx);
12283 fold_checksum_tree (op1, &ctx, &ht);
12284 md5_finish_ctx (&ctx, checksum_after_op1);
12286 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12287 fold_check_failed (op1, tem);
12288 #endif
12289 return tem;
12292 /* Fold a ternary tree expression with code CODE of type TYPE with
12293 operands OP0, OP1, and OP2. Return a folded expression if
12294 successful. Otherwise, return a tree expression with code CODE of
12295 type TYPE with operands OP0, OP1, and OP2. */
12297 tree
12298 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12299 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12301 tree tem;
12302 #ifdef ENABLE_FOLD_CHECKING
12303 unsigned char checksum_before_op0[16],
12304 checksum_before_op1[16],
12305 checksum_before_op2[16],
12306 checksum_after_op0[16],
12307 checksum_after_op1[16],
12308 checksum_after_op2[16];
12309 struct md5_ctx ctx;
12310 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12312 md5_init_ctx (&ctx);
12313 fold_checksum_tree (op0, &ctx, &ht);
12314 md5_finish_ctx (&ctx, checksum_before_op0);
12315 ht.empty ();
12317 md5_init_ctx (&ctx);
12318 fold_checksum_tree (op1, &ctx, &ht);
12319 md5_finish_ctx (&ctx, checksum_before_op1);
12320 ht.empty ();
12322 md5_init_ctx (&ctx);
12323 fold_checksum_tree (op2, &ctx, &ht);
12324 md5_finish_ctx (&ctx, checksum_before_op2);
12325 ht.empty ();
12326 #endif
12328 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12329 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12330 if (!tem)
12331 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12333 #ifdef ENABLE_FOLD_CHECKING
12334 md5_init_ctx (&ctx);
12335 fold_checksum_tree (op0, &ctx, &ht);
12336 md5_finish_ctx (&ctx, checksum_after_op0);
12337 ht.empty ();
12339 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12340 fold_check_failed (op0, tem);
12342 md5_init_ctx (&ctx);
12343 fold_checksum_tree (op1, &ctx, &ht);
12344 md5_finish_ctx (&ctx, checksum_after_op1);
12345 ht.empty ();
12347 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12348 fold_check_failed (op1, tem);
12350 md5_init_ctx (&ctx);
12351 fold_checksum_tree (op2, &ctx, &ht);
12352 md5_finish_ctx (&ctx, checksum_after_op2);
12354 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12355 fold_check_failed (op2, tem);
12356 #endif
12357 return tem;
12360 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12361 arguments in ARGARRAY, and a null static chain.
12362 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12363 of type TYPE from the given operands as constructed by build_call_array. */
12365 tree
12366 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12367 int nargs, tree *argarray)
12369 tree tem;
12370 #ifdef ENABLE_FOLD_CHECKING
12371 unsigned char checksum_before_fn[16],
12372 checksum_before_arglist[16],
12373 checksum_after_fn[16],
12374 checksum_after_arglist[16];
12375 struct md5_ctx ctx;
12376 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12377 int i;
12379 md5_init_ctx (&ctx);
12380 fold_checksum_tree (fn, &ctx, &ht);
12381 md5_finish_ctx (&ctx, checksum_before_fn);
12382 ht.empty ();
12384 md5_init_ctx (&ctx);
12385 for (i = 0; i < nargs; i++)
12386 fold_checksum_tree (argarray[i], &ctx, &ht);
12387 md5_finish_ctx (&ctx, checksum_before_arglist);
12388 ht.empty ();
12389 #endif
12391 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12392 if (!tem)
12393 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12395 #ifdef ENABLE_FOLD_CHECKING
12396 md5_init_ctx (&ctx);
12397 fold_checksum_tree (fn, &ctx, &ht);
12398 md5_finish_ctx (&ctx, checksum_after_fn);
12399 ht.empty ();
12401 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12402 fold_check_failed (fn, tem);
12404 md5_init_ctx (&ctx);
12405 for (i = 0; i < nargs; i++)
12406 fold_checksum_tree (argarray[i], &ctx, &ht);
12407 md5_finish_ctx (&ctx, checksum_after_arglist);
12409 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12410 fold_check_failed (NULL_TREE, tem);
12411 #endif
12412 return tem;
12415 /* Perform constant folding and related simplification of initializer
12416 expression EXPR. These behave identically to "fold_buildN" but ignore
12417 potential run-time traps and exceptions that fold must preserve. */
12419 #define START_FOLD_INIT \
12420 int saved_signaling_nans = flag_signaling_nans;\
12421 int saved_trapping_math = flag_trapping_math;\
12422 int saved_rounding_math = flag_rounding_math;\
12423 int saved_trapv = flag_trapv;\
12424 int saved_folding_initializer = folding_initializer;\
12425 flag_signaling_nans = 0;\
12426 flag_trapping_math = 0;\
12427 flag_rounding_math = 0;\
12428 flag_trapv = 0;\
12429 folding_initializer = 1;
12431 #define END_FOLD_INIT \
12432 flag_signaling_nans = saved_signaling_nans;\
12433 flag_trapping_math = saved_trapping_math;\
12434 flag_rounding_math = saved_rounding_math;\
12435 flag_trapv = saved_trapv;\
12436 folding_initializer = saved_folding_initializer;
12438 tree
12439 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12440 tree type, tree op)
12442 tree result;
12443 START_FOLD_INIT;
12445 result = fold_build1_loc (loc, code, type, op);
12447 END_FOLD_INIT;
12448 return result;
12451 tree
12452 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12453 tree type, tree op0, tree op1)
12455 tree result;
12456 START_FOLD_INIT;
12458 result = fold_build2_loc (loc, code, type, op0, op1);
12460 END_FOLD_INIT;
12461 return result;
12464 tree
12465 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12466 int nargs, tree *argarray)
12468 tree result;
12469 START_FOLD_INIT;
12471 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12473 END_FOLD_INIT;
12474 return result;
12477 #undef START_FOLD_INIT
12478 #undef END_FOLD_INIT
12480 /* Determine if first argument is a multiple of second argument. Return 0 if
12481 it is not, or we cannot easily determined it to be.
12483 An example of the sort of thing we care about (at this point; this routine
12484 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12485 fold cases do now) is discovering that
12487 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12489 is a multiple of
12491 SAVE_EXPR (J * 8)
12493 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12495 This code also handles discovering that
12497 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12499 is a multiple of 8 so we don't have to worry about dealing with a
12500 possible remainder.
12502 Note that we *look* inside a SAVE_EXPR only to determine how it was
12503 calculated; it is not safe for fold to do much of anything else with the
12504 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12505 at run time. For example, the latter example above *cannot* be implemented
12506 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12507 evaluation time of the original SAVE_EXPR is not necessarily the same at
12508 the time the new expression is evaluated. The only optimization of this
12509 sort that would be valid is changing
12511 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12513 divided by 8 to
12515 SAVE_EXPR (I) * SAVE_EXPR (J)
12517 (where the same SAVE_EXPR (J) is used in the original and the
12518 transformed version). */
12521 multiple_of_p (tree type, const_tree top, const_tree bottom)
12523 gimple *stmt;
12524 tree t1, op1, op2;
12526 if (operand_equal_p (top, bottom, 0))
12527 return 1;
12529 if (TREE_CODE (type) != INTEGER_TYPE)
12530 return 0;
12532 switch (TREE_CODE (top))
12534 case BIT_AND_EXPR:
12535 /* Bitwise and provides a power of two multiple. If the mask is
12536 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12537 if (!integer_pow2p (bottom))
12538 return 0;
12539 /* FALLTHRU */
12541 case MULT_EXPR:
12542 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12543 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12545 case MINUS_EXPR:
12546 /* It is impossible to prove if op0 - op1 is multiple of bottom
12547 precisely, so be conservative here checking if both op0 and op1
12548 are multiple of bottom. Note we check the second operand first
12549 since it's usually simpler. */
12550 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12551 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12553 case PLUS_EXPR:
12554 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12555 as op0 - 3 if the expression has unsigned type. For example,
12556 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12557 op1 = TREE_OPERAND (top, 1);
12558 if (TYPE_UNSIGNED (type)
12559 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12560 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12561 return (multiple_of_p (type, op1, bottom)
12562 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12564 case LSHIFT_EXPR:
12565 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12567 op1 = TREE_OPERAND (top, 1);
12568 /* const_binop may not detect overflow correctly,
12569 so check for it explicitly here. */
12570 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12571 wi::to_wide (op1))
12572 && (t1 = fold_convert (type,
12573 const_binop (LSHIFT_EXPR, size_one_node,
12574 op1))) != 0
12575 && !TREE_OVERFLOW (t1))
12576 return multiple_of_p (type, t1, bottom);
12578 return 0;
12580 case NOP_EXPR:
12581 /* Can't handle conversions from non-integral or wider integral type. */
12582 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12583 || (TYPE_PRECISION (type)
12584 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12585 return 0;
12587 /* fall through */
12589 case SAVE_EXPR:
12590 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12592 case COND_EXPR:
12593 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12594 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12596 case INTEGER_CST:
12597 if (TREE_CODE (bottom) != INTEGER_CST
12598 || integer_zerop (bottom)
12599 || (TYPE_UNSIGNED (type)
12600 && (tree_int_cst_sgn (top) < 0
12601 || tree_int_cst_sgn (bottom) < 0)))
12602 return 0;
12603 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12604 SIGNED);
12606 case SSA_NAME:
12607 if (TREE_CODE (bottom) == INTEGER_CST
12608 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12609 && gimple_code (stmt) == GIMPLE_ASSIGN)
12611 enum tree_code code = gimple_assign_rhs_code (stmt);
12613 /* Check for special cases to see if top is defined as multiple
12614 of bottom:
12616 top = (X & ~(bottom - 1) ; bottom is power of 2
12620 Y = X % bottom
12621 top = X - Y. */
12622 if (code == BIT_AND_EXPR
12623 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12624 && TREE_CODE (op2) == INTEGER_CST
12625 && integer_pow2p (bottom)
12626 && wi::multiple_of_p (wi::to_widest (op2),
12627 wi::to_widest (bottom), UNSIGNED))
12628 return 1;
12630 op1 = gimple_assign_rhs1 (stmt);
12631 if (code == MINUS_EXPR
12632 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12633 && TREE_CODE (op2) == SSA_NAME
12634 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12635 && gimple_code (stmt) == GIMPLE_ASSIGN
12636 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12637 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12638 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12639 return 1;
12642 /* fall through */
12644 default:
12645 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
12646 return multiple_p (wi::to_poly_widest (top),
12647 wi::to_poly_widest (bottom));
12649 return 0;
12653 #define tree_expr_nonnegative_warnv_p(X, Y) \
12654 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12656 #define RECURSE(X) \
12657 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12659 /* Return true if CODE or TYPE is known to be non-negative. */
12661 static bool
12662 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12664 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12665 && truth_value_p (code))
12666 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12667 have a signed:1 type (where the value is -1 and 0). */
12668 return true;
12669 return false;
12672 /* Return true if (CODE OP0) is known to be non-negative. If the return
12673 value is based on the assumption that signed overflow is undefined,
12674 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12675 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12677 bool
12678 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12679 bool *strict_overflow_p, int depth)
12681 if (TYPE_UNSIGNED (type))
12682 return true;
12684 switch (code)
12686 case ABS_EXPR:
12687 /* We can't return 1 if flag_wrapv is set because
12688 ABS_EXPR<INT_MIN> = INT_MIN. */
12689 if (!ANY_INTEGRAL_TYPE_P (type))
12690 return true;
12691 if (TYPE_OVERFLOW_UNDEFINED (type))
12693 *strict_overflow_p = true;
12694 return true;
12696 break;
12698 case NON_LVALUE_EXPR:
12699 case FLOAT_EXPR:
12700 case FIX_TRUNC_EXPR:
12701 return RECURSE (op0);
12703 CASE_CONVERT:
12705 tree inner_type = TREE_TYPE (op0);
12706 tree outer_type = type;
12708 if (TREE_CODE (outer_type) == REAL_TYPE)
12710 if (TREE_CODE (inner_type) == REAL_TYPE)
12711 return RECURSE (op0);
12712 if (INTEGRAL_TYPE_P (inner_type))
12714 if (TYPE_UNSIGNED (inner_type))
12715 return true;
12716 return RECURSE (op0);
12719 else if (INTEGRAL_TYPE_P (outer_type))
12721 if (TREE_CODE (inner_type) == REAL_TYPE)
12722 return RECURSE (op0);
12723 if (INTEGRAL_TYPE_P (inner_type))
12724 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12725 && TYPE_UNSIGNED (inner_type);
12728 break;
12730 default:
12731 return tree_simple_nonnegative_warnv_p (code, type);
12734 /* We don't know sign of `t', so be conservative and return false. */
12735 return false;
12738 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12739 value is based on the assumption that signed overflow is undefined,
12740 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12741 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12743 bool
12744 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12745 tree op1, bool *strict_overflow_p,
12746 int depth)
12748 if (TYPE_UNSIGNED (type))
12749 return true;
12751 switch (code)
12753 case POINTER_PLUS_EXPR:
12754 case PLUS_EXPR:
12755 if (FLOAT_TYPE_P (type))
12756 return RECURSE (op0) && RECURSE (op1);
12758 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12759 both unsigned and at least 2 bits shorter than the result. */
12760 if (TREE_CODE (type) == INTEGER_TYPE
12761 && TREE_CODE (op0) == NOP_EXPR
12762 && TREE_CODE (op1) == NOP_EXPR)
12764 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12765 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12766 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12767 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12769 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12770 TYPE_PRECISION (inner2)) + 1;
12771 return prec < TYPE_PRECISION (type);
12774 break;
12776 case MULT_EXPR:
12777 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12779 /* x * x is always non-negative for floating point x
12780 or without overflow. */
12781 if (operand_equal_p (op0, op1, 0)
12782 || (RECURSE (op0) && RECURSE (op1)))
12784 if (ANY_INTEGRAL_TYPE_P (type)
12785 && TYPE_OVERFLOW_UNDEFINED (type))
12786 *strict_overflow_p = true;
12787 return true;
12791 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12792 both unsigned and their total bits is shorter than the result. */
12793 if (TREE_CODE (type) == INTEGER_TYPE
12794 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12795 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12797 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12798 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12799 : TREE_TYPE (op0);
12800 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12801 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12802 : TREE_TYPE (op1);
12804 bool unsigned0 = TYPE_UNSIGNED (inner0);
12805 bool unsigned1 = TYPE_UNSIGNED (inner1);
12807 if (TREE_CODE (op0) == INTEGER_CST)
12808 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12810 if (TREE_CODE (op1) == INTEGER_CST)
12811 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12813 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12814 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12816 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12817 ? tree_int_cst_min_precision (op0, UNSIGNED)
12818 : TYPE_PRECISION (inner0);
12820 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12821 ? tree_int_cst_min_precision (op1, UNSIGNED)
12822 : TYPE_PRECISION (inner1);
12824 return precision0 + precision1 < TYPE_PRECISION (type);
12827 return false;
12829 case BIT_AND_EXPR:
12830 case MAX_EXPR:
12831 return RECURSE (op0) || RECURSE (op1);
12833 case BIT_IOR_EXPR:
12834 case BIT_XOR_EXPR:
12835 case MIN_EXPR:
12836 case RDIV_EXPR:
12837 case TRUNC_DIV_EXPR:
12838 case CEIL_DIV_EXPR:
12839 case FLOOR_DIV_EXPR:
12840 case ROUND_DIV_EXPR:
12841 return RECURSE (op0) && RECURSE (op1);
12843 case TRUNC_MOD_EXPR:
12844 return RECURSE (op0);
12846 case FLOOR_MOD_EXPR:
12847 return RECURSE (op1);
12849 case CEIL_MOD_EXPR:
12850 case ROUND_MOD_EXPR:
12851 default:
12852 return tree_simple_nonnegative_warnv_p (code, type);
12855 /* We don't know sign of `t', so be conservative and return false. */
12856 return false;
12859 /* Return true if T is known to be non-negative. If the return
12860 value is based on the assumption that signed overflow is undefined,
12861 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12862 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12864 bool
12865 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12867 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12868 return true;
12870 switch (TREE_CODE (t))
12872 case INTEGER_CST:
12873 return tree_int_cst_sgn (t) >= 0;
12875 case REAL_CST:
12876 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12878 case FIXED_CST:
12879 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12881 case COND_EXPR:
12882 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12884 case SSA_NAME:
12885 /* Limit the depth of recursion to avoid quadratic behavior.
12886 This is expected to catch almost all occurrences in practice.
12887 If this code misses important cases that unbounded recursion
12888 would not, passes that need this information could be revised
12889 to provide it through dataflow propagation. */
12890 return (!name_registered_for_update_p (t)
12891 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12892 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12893 strict_overflow_p, depth));
12895 default:
12896 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12900 /* Return true if T is known to be non-negative. If the return
12901 value is based on the assumption that signed overflow is undefined,
12902 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12903 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12905 bool
12906 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12907 bool *strict_overflow_p, int depth)
12909 switch (fn)
12911 CASE_CFN_ACOS:
12912 CASE_CFN_ACOSH:
12913 CASE_CFN_CABS:
12914 CASE_CFN_COSH:
12915 CASE_CFN_ERFC:
12916 CASE_CFN_EXP:
12917 CASE_CFN_EXP10:
12918 CASE_CFN_EXP2:
12919 CASE_CFN_FABS:
12920 CASE_CFN_FDIM:
12921 CASE_CFN_HYPOT:
12922 CASE_CFN_POW10:
12923 CASE_CFN_FFS:
12924 CASE_CFN_PARITY:
12925 CASE_CFN_POPCOUNT:
12926 CASE_CFN_CLZ:
12927 CASE_CFN_CLRSB:
12928 case CFN_BUILT_IN_BSWAP32:
12929 case CFN_BUILT_IN_BSWAP64:
12930 /* Always true. */
12931 return true;
12933 CASE_CFN_SQRT:
12934 CASE_CFN_SQRT_FN:
12935 /* sqrt(-0.0) is -0.0. */
12936 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12937 return true;
12938 return RECURSE (arg0);
12940 CASE_CFN_ASINH:
12941 CASE_CFN_ATAN:
12942 CASE_CFN_ATANH:
12943 CASE_CFN_CBRT:
12944 CASE_CFN_CEIL:
12945 CASE_CFN_ERF:
12946 CASE_CFN_EXPM1:
12947 CASE_CFN_FLOOR:
12948 CASE_CFN_FMOD:
12949 CASE_CFN_FREXP:
12950 CASE_CFN_ICEIL:
12951 CASE_CFN_IFLOOR:
12952 CASE_CFN_IRINT:
12953 CASE_CFN_IROUND:
12954 CASE_CFN_LCEIL:
12955 CASE_CFN_LDEXP:
12956 CASE_CFN_LFLOOR:
12957 CASE_CFN_LLCEIL:
12958 CASE_CFN_LLFLOOR:
12959 CASE_CFN_LLRINT:
12960 CASE_CFN_LLROUND:
12961 CASE_CFN_LRINT:
12962 CASE_CFN_LROUND:
12963 CASE_CFN_MODF:
12964 CASE_CFN_NEARBYINT:
12965 CASE_CFN_RINT:
12966 CASE_CFN_ROUND:
12967 CASE_CFN_SCALB:
12968 CASE_CFN_SCALBLN:
12969 CASE_CFN_SCALBN:
12970 CASE_CFN_SIGNBIT:
12971 CASE_CFN_SIGNIFICAND:
12972 CASE_CFN_SINH:
12973 CASE_CFN_TANH:
12974 CASE_CFN_TRUNC:
12975 /* True if the 1st argument is nonnegative. */
12976 return RECURSE (arg0);
12978 CASE_CFN_FMAX:
12979 CASE_CFN_FMAX_FN:
12980 /* True if the 1st OR 2nd arguments are nonnegative. */
12981 return RECURSE (arg0) || RECURSE (arg1);
12983 CASE_CFN_FMIN:
12984 CASE_CFN_FMIN_FN:
12985 /* True if the 1st AND 2nd arguments are nonnegative. */
12986 return RECURSE (arg0) && RECURSE (arg1);
12988 CASE_CFN_COPYSIGN:
12989 CASE_CFN_COPYSIGN_FN:
12990 /* True if the 2nd argument is nonnegative. */
12991 return RECURSE (arg1);
12993 CASE_CFN_POWI:
12994 /* True if the 1st argument is nonnegative or the second
12995 argument is an even integer. */
12996 if (TREE_CODE (arg1) == INTEGER_CST
12997 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12998 return true;
12999 return RECURSE (arg0);
13001 CASE_CFN_POW:
13002 /* True if the 1st argument is nonnegative or the second
13003 argument is an even integer valued real. */
13004 if (TREE_CODE (arg1) == REAL_CST)
13006 REAL_VALUE_TYPE c;
13007 HOST_WIDE_INT n;
13009 c = TREE_REAL_CST (arg1);
13010 n = real_to_integer (&c);
13011 if ((n & 1) == 0)
13013 REAL_VALUE_TYPE cint;
13014 real_from_integer (&cint, VOIDmode, n, SIGNED);
13015 if (real_identical (&c, &cint))
13016 return true;
13019 return RECURSE (arg0);
13021 default:
13022 break;
13024 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13027 /* Return true if T is known to be non-negative. If the return
13028 value is based on the assumption that signed overflow is undefined,
13029 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13030 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13032 static bool
13033 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13035 enum tree_code code = TREE_CODE (t);
13036 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13037 return true;
13039 switch (code)
13041 case TARGET_EXPR:
13043 tree temp = TARGET_EXPR_SLOT (t);
13044 t = TARGET_EXPR_INITIAL (t);
13046 /* If the initializer is non-void, then it's a normal expression
13047 that will be assigned to the slot. */
13048 if (!VOID_TYPE_P (t))
13049 return RECURSE (t);
13051 /* Otherwise, the initializer sets the slot in some way. One common
13052 way is an assignment statement at the end of the initializer. */
13053 while (1)
13055 if (TREE_CODE (t) == BIND_EXPR)
13056 t = expr_last (BIND_EXPR_BODY (t));
13057 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13058 || TREE_CODE (t) == TRY_CATCH_EXPR)
13059 t = expr_last (TREE_OPERAND (t, 0));
13060 else if (TREE_CODE (t) == STATEMENT_LIST)
13061 t = expr_last (t);
13062 else
13063 break;
13065 if (TREE_CODE (t) == MODIFY_EXPR
13066 && TREE_OPERAND (t, 0) == temp)
13067 return RECURSE (TREE_OPERAND (t, 1));
13069 return false;
13072 case CALL_EXPR:
13074 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13075 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13077 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13078 get_call_combined_fn (t),
13079 arg0,
13080 arg1,
13081 strict_overflow_p, depth);
13083 case COMPOUND_EXPR:
13084 case MODIFY_EXPR:
13085 return RECURSE (TREE_OPERAND (t, 1));
13087 case BIND_EXPR:
13088 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13090 case SAVE_EXPR:
13091 return RECURSE (TREE_OPERAND (t, 0));
13093 default:
13094 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13098 #undef RECURSE
13099 #undef tree_expr_nonnegative_warnv_p
13101 /* Return true if T is known to be non-negative. If the return
13102 value is based on the assumption that signed overflow is undefined,
13103 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13104 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13106 bool
13107 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13109 enum tree_code code;
13110 if (t == error_mark_node)
13111 return false;
13113 code = TREE_CODE (t);
13114 switch (TREE_CODE_CLASS (code))
13116 case tcc_binary:
13117 case tcc_comparison:
13118 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13119 TREE_TYPE (t),
13120 TREE_OPERAND (t, 0),
13121 TREE_OPERAND (t, 1),
13122 strict_overflow_p, depth);
13124 case tcc_unary:
13125 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13126 TREE_TYPE (t),
13127 TREE_OPERAND (t, 0),
13128 strict_overflow_p, depth);
13130 case tcc_constant:
13131 case tcc_declaration:
13132 case tcc_reference:
13133 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13135 default:
13136 break;
13139 switch (code)
13141 case TRUTH_AND_EXPR:
13142 case TRUTH_OR_EXPR:
13143 case TRUTH_XOR_EXPR:
13144 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13145 TREE_TYPE (t),
13146 TREE_OPERAND (t, 0),
13147 TREE_OPERAND (t, 1),
13148 strict_overflow_p, depth);
13149 case TRUTH_NOT_EXPR:
13150 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13151 TREE_TYPE (t),
13152 TREE_OPERAND (t, 0),
13153 strict_overflow_p, depth);
13155 case COND_EXPR:
13156 case CONSTRUCTOR:
13157 case OBJ_TYPE_REF:
13158 case ASSERT_EXPR:
13159 case ADDR_EXPR:
13160 case WITH_SIZE_EXPR:
13161 case SSA_NAME:
13162 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13164 default:
13165 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13169 /* Return true if `t' is known to be non-negative. Handle warnings
13170 about undefined signed overflow. */
13172 bool
13173 tree_expr_nonnegative_p (tree t)
13175 bool ret, strict_overflow_p;
13177 strict_overflow_p = false;
13178 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13179 if (strict_overflow_p)
13180 fold_overflow_warning (("assuming signed overflow does not occur when "
13181 "determining that expression is always "
13182 "non-negative"),
13183 WARN_STRICT_OVERFLOW_MISC);
13184 return ret;
13188 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13189 For floating point we further ensure that T is not denormal.
13190 Similar logic is present in nonzero_address in rtlanal.h.
13192 If the return value is based on the assumption that signed overflow
13193 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13194 change *STRICT_OVERFLOW_P. */
13196 bool
13197 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13198 bool *strict_overflow_p)
13200 switch (code)
13202 case ABS_EXPR:
13203 return tree_expr_nonzero_warnv_p (op0,
13204 strict_overflow_p);
13206 case NOP_EXPR:
13208 tree inner_type = TREE_TYPE (op0);
13209 tree outer_type = type;
13211 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13212 && tree_expr_nonzero_warnv_p (op0,
13213 strict_overflow_p));
13215 break;
13217 case NON_LVALUE_EXPR:
13218 return tree_expr_nonzero_warnv_p (op0,
13219 strict_overflow_p);
13221 default:
13222 break;
13225 return false;
13228 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13229 For floating point we further ensure that T is not denormal.
13230 Similar logic is present in nonzero_address in rtlanal.h.
13232 If the return value is based on the assumption that signed overflow
13233 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13234 change *STRICT_OVERFLOW_P. */
13236 bool
13237 tree_binary_nonzero_warnv_p (enum tree_code code,
13238 tree type,
13239 tree op0,
13240 tree op1, bool *strict_overflow_p)
13242 bool sub_strict_overflow_p;
13243 switch (code)
13245 case POINTER_PLUS_EXPR:
13246 case PLUS_EXPR:
13247 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13249 /* With the presence of negative values it is hard
13250 to say something. */
13251 sub_strict_overflow_p = false;
13252 if (!tree_expr_nonnegative_warnv_p (op0,
13253 &sub_strict_overflow_p)
13254 || !tree_expr_nonnegative_warnv_p (op1,
13255 &sub_strict_overflow_p))
13256 return false;
13257 /* One of operands must be positive and the other non-negative. */
13258 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13259 overflows, on a twos-complement machine the sum of two
13260 nonnegative numbers can never be zero. */
13261 return (tree_expr_nonzero_warnv_p (op0,
13262 strict_overflow_p)
13263 || tree_expr_nonzero_warnv_p (op1,
13264 strict_overflow_p));
13266 break;
13268 case MULT_EXPR:
13269 if (TYPE_OVERFLOW_UNDEFINED (type))
13271 if (tree_expr_nonzero_warnv_p (op0,
13272 strict_overflow_p)
13273 && tree_expr_nonzero_warnv_p (op1,
13274 strict_overflow_p))
13276 *strict_overflow_p = true;
13277 return true;
13280 break;
13282 case MIN_EXPR:
13283 sub_strict_overflow_p = false;
13284 if (tree_expr_nonzero_warnv_p (op0,
13285 &sub_strict_overflow_p)
13286 && tree_expr_nonzero_warnv_p (op1,
13287 &sub_strict_overflow_p))
13289 if (sub_strict_overflow_p)
13290 *strict_overflow_p = true;
13292 break;
13294 case MAX_EXPR:
13295 sub_strict_overflow_p = false;
13296 if (tree_expr_nonzero_warnv_p (op0,
13297 &sub_strict_overflow_p))
13299 if (sub_strict_overflow_p)
13300 *strict_overflow_p = true;
13302 /* When both operands are nonzero, then MAX must be too. */
13303 if (tree_expr_nonzero_warnv_p (op1,
13304 strict_overflow_p))
13305 return true;
13307 /* MAX where operand 0 is positive is positive. */
13308 return tree_expr_nonnegative_warnv_p (op0,
13309 strict_overflow_p);
13311 /* MAX where operand 1 is positive is positive. */
13312 else if (tree_expr_nonzero_warnv_p (op1,
13313 &sub_strict_overflow_p)
13314 && tree_expr_nonnegative_warnv_p (op1,
13315 &sub_strict_overflow_p))
13317 if (sub_strict_overflow_p)
13318 *strict_overflow_p = true;
13319 return true;
13321 break;
13323 case BIT_IOR_EXPR:
13324 return (tree_expr_nonzero_warnv_p (op1,
13325 strict_overflow_p)
13326 || tree_expr_nonzero_warnv_p (op0,
13327 strict_overflow_p));
13329 default:
13330 break;
13333 return false;
13336 /* Return true when T is an address and is known to be nonzero.
13337 For floating point we further ensure that T is not denormal.
13338 Similar logic is present in nonzero_address in rtlanal.h.
13340 If the return value is based on the assumption that signed overflow
13341 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13342 change *STRICT_OVERFLOW_P. */
13344 bool
13345 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13347 bool sub_strict_overflow_p;
13348 switch (TREE_CODE (t))
13350 case INTEGER_CST:
13351 return !integer_zerop (t);
13353 case ADDR_EXPR:
13355 tree base = TREE_OPERAND (t, 0);
13357 if (!DECL_P (base))
13358 base = get_base_address (base);
13360 if (base && TREE_CODE (base) == TARGET_EXPR)
13361 base = TARGET_EXPR_SLOT (base);
13363 if (!base)
13364 return false;
13366 /* For objects in symbol table check if we know they are non-zero.
13367 Don't do anything for variables and functions before symtab is built;
13368 it is quite possible that they will be declared weak later. */
13369 int nonzero_addr = maybe_nonzero_address (base);
13370 if (nonzero_addr >= 0)
13371 return nonzero_addr;
13373 /* Constants are never weak. */
13374 if (CONSTANT_CLASS_P (base))
13375 return true;
13377 return false;
13380 case COND_EXPR:
13381 sub_strict_overflow_p = false;
13382 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13383 &sub_strict_overflow_p)
13384 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13385 &sub_strict_overflow_p))
13387 if (sub_strict_overflow_p)
13388 *strict_overflow_p = true;
13389 return true;
13391 break;
13393 case SSA_NAME:
13394 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13395 break;
13396 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13398 default:
13399 break;
13401 return false;
13404 #define integer_valued_real_p(X) \
13405 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13407 #define RECURSE(X) \
13408 ((integer_valued_real_p) (X, depth + 1))
13410 /* Return true if the floating point result of (CODE OP0) has an
13411 integer value. We also allow +Inf, -Inf and NaN to be considered
13412 integer values. Return false for signaling NaN.
13414 DEPTH is the current nesting depth of the query. */
13416 bool
13417 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13419 switch (code)
13421 case FLOAT_EXPR:
13422 return true;
13424 case ABS_EXPR:
13425 return RECURSE (op0);
13427 CASE_CONVERT:
13429 tree type = TREE_TYPE (op0);
13430 if (TREE_CODE (type) == INTEGER_TYPE)
13431 return true;
13432 if (TREE_CODE (type) == REAL_TYPE)
13433 return RECURSE (op0);
13434 break;
13437 default:
13438 break;
13440 return false;
13443 /* Return true if the floating point result of (CODE OP0 OP1) has an
13444 integer value. We also allow +Inf, -Inf and NaN to be considered
13445 integer values. Return false for signaling NaN.
13447 DEPTH is the current nesting depth of the query. */
13449 bool
13450 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13452 switch (code)
13454 case PLUS_EXPR:
13455 case MINUS_EXPR:
13456 case MULT_EXPR:
13457 case MIN_EXPR:
13458 case MAX_EXPR:
13459 return RECURSE (op0) && RECURSE (op1);
13461 default:
13462 break;
13464 return false;
13467 /* Return true if the floating point result of calling FNDECL with arguments
13468 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13469 considered integer values. Return false for signaling NaN. If FNDECL
13470 takes fewer than 2 arguments, the remaining ARGn are null.
13472 DEPTH is the current nesting depth of the query. */
13474 bool
13475 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13477 switch (fn)
13479 CASE_CFN_CEIL:
13480 CASE_CFN_FLOOR:
13481 CASE_CFN_NEARBYINT:
13482 CASE_CFN_RINT:
13483 CASE_CFN_ROUND:
13484 CASE_CFN_TRUNC:
13485 return true;
13487 CASE_CFN_FMIN:
13488 CASE_CFN_FMIN_FN:
13489 CASE_CFN_FMAX:
13490 CASE_CFN_FMAX_FN:
13491 return RECURSE (arg0) && RECURSE (arg1);
13493 default:
13494 break;
13496 return false;
13499 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13500 has an integer value. We also allow +Inf, -Inf and NaN to be
13501 considered integer values. Return false for signaling NaN.
13503 DEPTH is the current nesting depth of the query. */
13505 bool
13506 integer_valued_real_single_p (tree t, int depth)
13508 switch (TREE_CODE (t))
13510 case REAL_CST:
13511 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13513 case COND_EXPR:
13514 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13516 case SSA_NAME:
13517 /* Limit the depth of recursion to avoid quadratic behavior.
13518 This is expected to catch almost all occurrences in practice.
13519 If this code misses important cases that unbounded recursion
13520 would not, passes that need this information could be revised
13521 to provide it through dataflow propagation. */
13522 return (!name_registered_for_update_p (t)
13523 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13524 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13525 depth));
13527 default:
13528 break;
13530 return false;
13533 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13534 has an integer value. We also allow +Inf, -Inf and NaN to be
13535 considered integer values. Return false for signaling NaN.
13537 DEPTH is the current nesting depth of the query. */
13539 static bool
13540 integer_valued_real_invalid_p (tree t, int depth)
13542 switch (TREE_CODE (t))
13544 case COMPOUND_EXPR:
13545 case MODIFY_EXPR:
13546 case BIND_EXPR:
13547 return RECURSE (TREE_OPERAND (t, 1));
13549 case SAVE_EXPR:
13550 return RECURSE (TREE_OPERAND (t, 0));
13552 default:
13553 break;
13555 return false;
13558 #undef RECURSE
13559 #undef integer_valued_real_p
13561 /* Return true if the floating point expression T has an integer value.
13562 We also allow +Inf, -Inf and NaN to be considered integer values.
13563 Return false for signaling NaN.
13565 DEPTH is the current nesting depth of the query. */
13567 bool
13568 integer_valued_real_p (tree t, int depth)
13570 if (t == error_mark_node)
13571 return false;
13573 tree_code code = TREE_CODE (t);
13574 switch (TREE_CODE_CLASS (code))
13576 case tcc_binary:
13577 case tcc_comparison:
13578 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13579 TREE_OPERAND (t, 1), depth);
13581 case tcc_unary:
13582 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13584 case tcc_constant:
13585 case tcc_declaration:
13586 case tcc_reference:
13587 return integer_valued_real_single_p (t, depth);
13589 default:
13590 break;
13593 switch (code)
13595 case COND_EXPR:
13596 case SSA_NAME:
13597 return integer_valued_real_single_p (t, depth);
13599 case CALL_EXPR:
13601 tree arg0 = (call_expr_nargs (t) > 0
13602 ? CALL_EXPR_ARG (t, 0)
13603 : NULL_TREE);
13604 tree arg1 = (call_expr_nargs (t) > 1
13605 ? CALL_EXPR_ARG (t, 1)
13606 : NULL_TREE);
13607 return integer_valued_real_call_p (get_call_combined_fn (t),
13608 arg0, arg1, depth);
13611 default:
13612 return integer_valued_real_invalid_p (t, depth);
13616 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13617 attempt to fold the expression to a constant without modifying TYPE,
13618 OP0 or OP1.
13620 If the expression could be simplified to a constant, then return
13621 the constant. If the expression would not be simplified to a
13622 constant, then return NULL_TREE. */
13624 tree
13625 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13627 tree tem = fold_binary (code, type, op0, op1);
13628 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13631 /* Given the components of a unary expression CODE, TYPE and OP0,
13632 attempt to fold the expression to a constant without modifying
13633 TYPE or OP0.
13635 If the expression could be simplified to a constant, then return
13636 the constant. If the expression would not be simplified to a
13637 constant, then return NULL_TREE. */
13639 tree
13640 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13642 tree tem = fold_unary (code, type, op0);
13643 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13646 /* If EXP represents referencing an element in a constant string
13647 (either via pointer arithmetic or array indexing), return the
13648 tree representing the value accessed, otherwise return NULL. */
13650 tree
13651 fold_read_from_constant_string (tree exp)
13653 if ((TREE_CODE (exp) == INDIRECT_REF
13654 || TREE_CODE (exp) == ARRAY_REF)
13655 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13657 tree exp1 = TREE_OPERAND (exp, 0);
13658 tree index;
13659 tree string;
13660 location_t loc = EXPR_LOCATION (exp);
13662 if (TREE_CODE (exp) == INDIRECT_REF)
13663 string = string_constant (exp1, &index);
13664 else
13666 tree low_bound = array_ref_low_bound (exp);
13667 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13669 /* Optimize the special-case of a zero lower bound.
13671 We convert the low_bound to sizetype to avoid some problems
13672 with constant folding. (E.g. suppose the lower bound is 1,
13673 and its mode is QI. Without the conversion,l (ARRAY
13674 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13675 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13676 if (! integer_zerop (low_bound))
13677 index = size_diffop_loc (loc, index,
13678 fold_convert_loc (loc, sizetype, low_bound));
13680 string = exp1;
13683 scalar_int_mode char_mode;
13684 if (string
13685 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13686 && TREE_CODE (string) == STRING_CST
13687 && TREE_CODE (index) == INTEGER_CST
13688 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13689 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13690 &char_mode)
13691 && GET_MODE_SIZE (char_mode) == 1)
13692 return build_int_cst_type (TREE_TYPE (exp),
13693 (TREE_STRING_POINTER (string)
13694 [TREE_INT_CST_LOW (index)]));
13696 return NULL;
13699 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13700 an integer constant, real, or fixed-point constant.
13702 TYPE is the type of the result. */
13704 static tree
13705 fold_negate_const (tree arg0, tree type)
13707 tree t = NULL_TREE;
13709 switch (TREE_CODE (arg0))
13711 case REAL_CST:
13712 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13713 break;
13715 case FIXED_CST:
13717 FIXED_VALUE_TYPE f;
13718 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13719 &(TREE_FIXED_CST (arg0)), NULL,
13720 TYPE_SATURATING (type));
13721 t = build_fixed (type, f);
13722 /* Propagate overflow flags. */
13723 if (overflow_p | TREE_OVERFLOW (arg0))
13724 TREE_OVERFLOW (t) = 1;
13725 break;
13728 default:
13729 if (poly_int_tree_p (arg0))
13731 bool overflow;
13732 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
13733 t = force_fit_type (type, res, 1,
13734 (overflow && ! TYPE_UNSIGNED (type))
13735 || TREE_OVERFLOW (arg0));
13736 break;
13739 gcc_unreachable ();
13742 return t;
13745 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13746 an integer constant or real constant.
13748 TYPE is the type of the result. */
13750 tree
13751 fold_abs_const (tree arg0, tree type)
13753 tree t = NULL_TREE;
13755 switch (TREE_CODE (arg0))
13757 case INTEGER_CST:
13759 /* If the value is unsigned or non-negative, then the absolute value
13760 is the same as the ordinary value. */
13761 if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13762 t = arg0;
13764 /* If the value is negative, then the absolute value is
13765 its negation. */
13766 else
13768 bool overflow;
13769 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13770 t = force_fit_type (type, val, -1,
13771 overflow | TREE_OVERFLOW (arg0));
13774 break;
13776 case REAL_CST:
13777 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13778 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13779 else
13780 t = arg0;
13781 break;
13783 default:
13784 gcc_unreachable ();
13787 return t;
13790 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13791 constant. TYPE is the type of the result. */
13793 static tree
13794 fold_not_const (const_tree arg0, tree type)
13796 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13798 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13801 /* Given CODE, a relational operator, the target type, TYPE and two
13802 constant operands OP0 and OP1, return the result of the
13803 relational operation. If the result is not a compile time
13804 constant, then return NULL_TREE. */
13806 static tree
13807 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13809 int result, invert;
13811 /* From here on, the only cases we handle are when the result is
13812 known to be a constant. */
13814 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13816 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13817 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13819 /* Handle the cases where either operand is a NaN. */
13820 if (real_isnan (c0) || real_isnan (c1))
13822 switch (code)
13824 case EQ_EXPR:
13825 case ORDERED_EXPR:
13826 result = 0;
13827 break;
13829 case NE_EXPR:
13830 case UNORDERED_EXPR:
13831 case UNLT_EXPR:
13832 case UNLE_EXPR:
13833 case UNGT_EXPR:
13834 case UNGE_EXPR:
13835 case UNEQ_EXPR:
13836 result = 1;
13837 break;
13839 case LT_EXPR:
13840 case LE_EXPR:
13841 case GT_EXPR:
13842 case GE_EXPR:
13843 case LTGT_EXPR:
13844 if (flag_trapping_math)
13845 return NULL_TREE;
13846 result = 0;
13847 break;
13849 default:
13850 gcc_unreachable ();
13853 return constant_boolean_node (result, type);
13856 return constant_boolean_node (real_compare (code, c0, c1), type);
13859 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13861 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13862 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13863 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13866 /* Handle equality/inequality of complex constants. */
13867 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13869 tree rcond = fold_relational_const (code, type,
13870 TREE_REALPART (op0),
13871 TREE_REALPART (op1));
13872 tree icond = fold_relational_const (code, type,
13873 TREE_IMAGPART (op0),
13874 TREE_IMAGPART (op1));
13875 if (code == EQ_EXPR)
13876 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13877 else if (code == NE_EXPR)
13878 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13879 else
13880 return NULL_TREE;
13883 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13885 if (!VECTOR_TYPE_P (type))
13887 /* Have vector comparison with scalar boolean result. */
13888 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13889 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13890 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13892 tree elem0 = VECTOR_CST_ELT (op0, i);
13893 tree elem1 = VECTOR_CST_ELT (op1, i);
13894 tree tmp = fold_relational_const (code, type, elem0, elem1);
13895 if (tmp == NULL_TREE)
13896 return NULL_TREE;
13897 if (integer_zerop (tmp))
13898 return constant_boolean_node (false, type);
13900 return constant_boolean_node (true, type);
13902 tree_vector_builder elts;
13903 if (!elts.new_binary_operation (type, op0, op1, false))
13904 return NULL_TREE;
13905 unsigned int count = elts.encoded_nelts ();
13906 for (unsigned i = 0; i < count; i++)
13908 tree elem_type = TREE_TYPE (type);
13909 tree elem0 = VECTOR_CST_ELT (op0, i);
13910 tree elem1 = VECTOR_CST_ELT (op1, i);
13912 tree tem = fold_relational_const (code, elem_type,
13913 elem0, elem1);
13915 if (tem == NULL_TREE)
13916 return NULL_TREE;
13918 elts.quick_push (build_int_cst (elem_type,
13919 integer_zerop (tem) ? 0 : -1));
13922 return elts.build ();
13925 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13927 To compute GT, swap the arguments and do LT.
13928 To compute GE, do LT and invert the result.
13929 To compute LE, swap the arguments, do LT and invert the result.
13930 To compute NE, do EQ and invert the result.
13932 Therefore, the code below must handle only EQ and LT. */
13934 if (code == LE_EXPR || code == GT_EXPR)
13936 std::swap (op0, op1);
13937 code = swap_tree_comparison (code);
13940 /* Note that it is safe to invert for real values here because we
13941 have already handled the one case that it matters. */
13943 invert = 0;
13944 if (code == NE_EXPR || code == GE_EXPR)
13946 invert = 1;
13947 code = invert_tree_comparison (code, false);
13950 /* Compute a result for LT or EQ if args permit;
13951 Otherwise return T. */
13952 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13954 if (code == EQ_EXPR)
13955 result = tree_int_cst_equal (op0, op1);
13956 else
13957 result = tree_int_cst_lt (op0, op1);
13959 else
13960 return NULL_TREE;
13962 if (invert)
13963 result ^= 1;
13964 return constant_boolean_node (result, type);
13967 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13968 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13969 itself. */
13971 tree
13972 fold_build_cleanup_point_expr (tree type, tree expr)
13974 /* If the expression does not have side effects then we don't have to wrap
13975 it with a cleanup point expression. */
13976 if (!TREE_SIDE_EFFECTS (expr))
13977 return expr;
13979 /* If the expression is a return, check to see if the expression inside the
13980 return has no side effects or the right hand side of the modify expression
13981 inside the return. If either don't have side effects set we don't need to
13982 wrap the expression in a cleanup point expression. Note we don't check the
13983 left hand side of the modify because it should always be a return decl. */
13984 if (TREE_CODE (expr) == RETURN_EXPR)
13986 tree op = TREE_OPERAND (expr, 0);
13987 if (!op || !TREE_SIDE_EFFECTS (op))
13988 return expr;
13989 op = TREE_OPERAND (op, 1);
13990 if (!TREE_SIDE_EFFECTS (op))
13991 return expr;
13994 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13997 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13998 of an indirection through OP0, or NULL_TREE if no simplification is
13999 possible. */
14001 tree
14002 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14004 tree sub = op0;
14005 tree subtype;
14007 STRIP_NOPS (sub);
14008 subtype = TREE_TYPE (sub);
14009 if (!POINTER_TYPE_P (subtype)
14010 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14011 return NULL_TREE;
14013 if (TREE_CODE (sub) == ADDR_EXPR)
14015 tree op = TREE_OPERAND (sub, 0);
14016 tree optype = TREE_TYPE (op);
14017 /* *&CONST_DECL -> to the value of the const decl. */
14018 if (TREE_CODE (op) == CONST_DECL)
14019 return DECL_INITIAL (op);
14020 /* *&p => p; make sure to handle *&"str"[cst] here. */
14021 if (type == optype)
14023 tree fop = fold_read_from_constant_string (op);
14024 if (fop)
14025 return fop;
14026 else
14027 return op;
14029 /* *(foo *)&fooarray => fooarray[0] */
14030 else if (TREE_CODE (optype) == ARRAY_TYPE
14031 && type == TREE_TYPE (optype)
14032 && (!in_gimple_form
14033 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14035 tree type_domain = TYPE_DOMAIN (optype);
14036 tree min_val = size_zero_node;
14037 if (type_domain && TYPE_MIN_VALUE (type_domain))
14038 min_val = TYPE_MIN_VALUE (type_domain);
14039 if (in_gimple_form
14040 && TREE_CODE (min_val) != INTEGER_CST)
14041 return NULL_TREE;
14042 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14043 NULL_TREE, NULL_TREE);
14045 /* *(foo *)&complexfoo => __real__ complexfoo */
14046 else if (TREE_CODE (optype) == COMPLEX_TYPE
14047 && type == TREE_TYPE (optype))
14048 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14049 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14050 else if (TREE_CODE (optype) == VECTOR_TYPE
14051 && type == TREE_TYPE (optype))
14053 tree part_width = TYPE_SIZE (type);
14054 tree index = bitsize_int (0);
14055 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14059 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14060 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14062 tree op00 = TREE_OPERAND (sub, 0);
14063 tree op01 = TREE_OPERAND (sub, 1);
14065 STRIP_NOPS (op00);
14066 if (TREE_CODE (op00) == ADDR_EXPR)
14068 tree op00type;
14069 op00 = TREE_OPERAND (op00, 0);
14070 op00type = TREE_TYPE (op00);
14072 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14073 if (TREE_CODE (op00type) == VECTOR_TYPE
14074 && type == TREE_TYPE (op00type))
14076 tree part_width = TYPE_SIZE (type);
14077 unsigned HOST_WIDE_INT max_offset
14078 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14079 * TYPE_VECTOR_SUBPARTS (op00type));
14080 if (tree_int_cst_sign_bit (op01) == 0
14081 && compare_tree_int (op01, max_offset) == -1)
14083 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14084 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14085 tree index = bitsize_int (indexi);
14086 return fold_build3_loc (loc,
14087 BIT_FIELD_REF, type, op00,
14088 part_width, index);
14091 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14092 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14093 && type == TREE_TYPE (op00type))
14095 tree size = TYPE_SIZE_UNIT (type);
14096 if (tree_int_cst_equal (size, op01))
14097 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14099 /* ((foo *)&fooarray)[1] => fooarray[1] */
14100 else if (TREE_CODE (op00type) == ARRAY_TYPE
14101 && type == TREE_TYPE (op00type))
14103 tree type_domain = TYPE_DOMAIN (op00type);
14104 tree min = size_zero_node;
14105 if (type_domain && TYPE_MIN_VALUE (type_domain))
14106 min = TYPE_MIN_VALUE (type_domain);
14107 offset_int off = wi::to_offset (op01);
14108 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
14109 offset_int remainder;
14110 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
14111 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
14113 off = off + wi::to_offset (min);
14114 op01 = wide_int_to_tree (sizetype, off);
14115 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14116 NULL_TREE, NULL_TREE);
14122 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14123 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14124 && type == TREE_TYPE (TREE_TYPE (subtype))
14125 && (!in_gimple_form
14126 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14128 tree type_domain;
14129 tree min_val = size_zero_node;
14130 sub = build_fold_indirect_ref_loc (loc, sub);
14131 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14132 if (type_domain && TYPE_MIN_VALUE (type_domain))
14133 min_val = TYPE_MIN_VALUE (type_domain);
14134 if (in_gimple_form
14135 && TREE_CODE (min_val) != INTEGER_CST)
14136 return NULL_TREE;
14137 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14138 NULL_TREE);
14141 return NULL_TREE;
14144 /* Builds an expression for an indirection through T, simplifying some
14145 cases. */
14147 tree
14148 build_fold_indirect_ref_loc (location_t loc, tree t)
14150 tree type = TREE_TYPE (TREE_TYPE (t));
14151 tree sub = fold_indirect_ref_1 (loc, type, t);
14153 if (sub)
14154 return sub;
14156 return build1_loc (loc, INDIRECT_REF, type, t);
14159 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14161 tree
14162 fold_indirect_ref_loc (location_t loc, tree t)
14164 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14166 if (sub)
14167 return sub;
14168 else
14169 return t;
14172 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14173 whose result is ignored. The type of the returned tree need not be
14174 the same as the original expression. */
14176 tree
14177 fold_ignored_result (tree t)
14179 if (!TREE_SIDE_EFFECTS (t))
14180 return integer_zero_node;
14182 for (;;)
14183 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14185 case tcc_unary:
14186 t = TREE_OPERAND (t, 0);
14187 break;
14189 case tcc_binary:
14190 case tcc_comparison:
14191 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14192 t = TREE_OPERAND (t, 0);
14193 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14194 t = TREE_OPERAND (t, 1);
14195 else
14196 return t;
14197 break;
14199 case tcc_expression:
14200 switch (TREE_CODE (t))
14202 case COMPOUND_EXPR:
14203 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14204 return t;
14205 t = TREE_OPERAND (t, 0);
14206 break;
14208 case COND_EXPR:
14209 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14210 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14211 return t;
14212 t = TREE_OPERAND (t, 0);
14213 break;
14215 default:
14216 return t;
14218 break;
14220 default:
14221 return t;
14225 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14227 tree
14228 round_up_loc (location_t loc, tree value, unsigned int divisor)
14230 tree div = NULL_TREE;
14232 if (divisor == 1)
14233 return value;
14235 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14236 have to do anything. Only do this when we are not given a const,
14237 because in that case, this check is more expensive than just
14238 doing it. */
14239 if (TREE_CODE (value) != INTEGER_CST)
14241 div = build_int_cst (TREE_TYPE (value), divisor);
14243 if (multiple_of_p (TREE_TYPE (value), value, div))
14244 return value;
14247 /* If divisor is a power of two, simplify this to bit manipulation. */
14248 if (pow2_or_zerop (divisor))
14250 if (TREE_CODE (value) == INTEGER_CST)
14252 wide_int val = wi::to_wide (value);
14253 bool overflow_p;
14255 if ((val & (divisor - 1)) == 0)
14256 return value;
14258 overflow_p = TREE_OVERFLOW (value);
14259 val += divisor - 1;
14260 val &= (int) -divisor;
14261 if (val == 0)
14262 overflow_p = true;
14264 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14266 else
14268 tree t;
14270 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14271 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14272 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14273 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14276 else
14278 if (!div)
14279 div = build_int_cst (TREE_TYPE (value), divisor);
14280 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14281 value = size_binop_loc (loc, MULT_EXPR, value, div);
14284 return value;
14287 /* Likewise, but round down. */
14289 tree
14290 round_down_loc (location_t loc, tree value, int divisor)
14292 tree div = NULL_TREE;
14294 gcc_assert (divisor > 0);
14295 if (divisor == 1)
14296 return value;
14298 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14299 have to do anything. Only do this when we are not given a const,
14300 because in that case, this check is more expensive than just
14301 doing it. */
14302 if (TREE_CODE (value) != INTEGER_CST)
14304 div = build_int_cst (TREE_TYPE (value), divisor);
14306 if (multiple_of_p (TREE_TYPE (value), value, div))
14307 return value;
14310 /* If divisor is a power of two, simplify this to bit manipulation. */
14311 if (pow2_or_zerop (divisor))
14313 tree t;
14315 t = build_int_cst (TREE_TYPE (value), -divisor);
14316 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14318 else
14320 if (!div)
14321 div = build_int_cst (TREE_TYPE (value), divisor);
14322 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14323 value = size_binop_loc (loc, MULT_EXPR, value, div);
14326 return value;
14329 /* Returns the pointer to the base of the object addressed by EXP and
14330 extracts the information about the offset of the access, storing it
14331 to PBITPOS and POFFSET. */
14333 static tree
14334 split_address_to_core_and_offset (tree exp,
14335 HOST_WIDE_INT *pbitpos, tree *poffset)
14337 tree core;
14338 machine_mode mode;
14339 int unsignedp, reversep, volatilep;
14340 HOST_WIDE_INT bitsize;
14341 location_t loc = EXPR_LOCATION (exp);
14343 if (TREE_CODE (exp) == ADDR_EXPR)
14345 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14346 poffset, &mode, &unsignedp, &reversep,
14347 &volatilep);
14348 core = build_fold_addr_expr_loc (loc, core);
14350 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14352 core = TREE_OPERAND (exp, 0);
14353 STRIP_NOPS (core);
14354 *pbitpos = 0;
14355 *poffset = TREE_OPERAND (exp, 1);
14356 if (TREE_CODE (*poffset) == INTEGER_CST)
14358 offset_int tem = wi::sext (wi::to_offset (*poffset),
14359 TYPE_PRECISION (TREE_TYPE (*poffset)));
14360 tem <<= LOG2_BITS_PER_UNIT;
14361 if (wi::fits_shwi_p (tem))
14363 *pbitpos = tem.to_shwi ();
14364 *poffset = NULL_TREE;
14368 else
14370 core = exp;
14371 *pbitpos = 0;
14372 *poffset = NULL_TREE;
14375 return core;
14378 /* Returns true if addresses of E1 and E2 differ by a constant, false
14379 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14381 bool
14382 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14384 tree core1, core2;
14385 HOST_WIDE_INT bitpos1, bitpos2;
14386 tree toffset1, toffset2, tdiff, type;
14388 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14389 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14391 if (bitpos1 % BITS_PER_UNIT != 0
14392 || bitpos2 % BITS_PER_UNIT != 0
14393 || !operand_equal_p (core1, core2, 0))
14394 return false;
14396 if (toffset1 && toffset2)
14398 type = TREE_TYPE (toffset1);
14399 if (type != TREE_TYPE (toffset2))
14400 toffset2 = fold_convert (type, toffset2);
14402 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14403 if (!cst_and_fits_in_hwi (tdiff))
14404 return false;
14406 *diff = int_cst_value (tdiff);
14408 else if (toffset1 || toffset2)
14410 /* If only one of the offsets is non-constant, the difference cannot
14411 be a constant. */
14412 return false;
14414 else
14415 *diff = 0;
14417 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14418 return true;
14421 /* Return OFF converted to a pointer offset type suitable as offset for
14422 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14423 tree
14424 convert_to_ptrofftype_loc (location_t loc, tree off)
14426 return fold_convert_loc (loc, sizetype, off);
14429 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14430 tree
14431 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14433 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14434 ptr, convert_to_ptrofftype_loc (loc, off));
14437 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14438 tree
14439 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14441 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14442 ptr, size_int (off));
14445 /* Return a char pointer for a C string if it is a string constant
14446 or sum of string constant and integer constant. We only support
14447 string constants properly terminated with '\0' character.
14448 If STRLEN is a valid pointer, length (including terminating character)
14449 of returned string is stored to the argument. */
14451 const char *
14452 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14454 tree offset_node;
14456 if (strlen)
14457 *strlen = 0;
14459 src = string_constant (src, &offset_node);
14460 if (src == 0)
14461 return NULL;
14463 unsigned HOST_WIDE_INT offset = 0;
14464 if (offset_node != NULL_TREE)
14466 if (!tree_fits_uhwi_p (offset_node))
14467 return NULL;
14468 else
14469 offset = tree_to_uhwi (offset_node);
14472 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14473 const char *string = TREE_STRING_POINTER (src);
14475 /* Support only properly null-terminated strings. */
14476 if (string_length == 0
14477 || string[string_length - 1] != '\0'
14478 || offset >= string_length)
14479 return NULL;
14481 if (strlen)
14482 *strlen = string_length - offset;
14483 return string + offset;
14486 #if CHECKING_P
14488 namespace selftest {
14490 /* Helper functions for writing tests of folding trees. */
14492 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14494 static void
14495 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14496 tree constant)
14498 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14501 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14502 wrapping WRAPPED_EXPR. */
14504 static void
14505 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14506 tree wrapped_expr)
14508 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14509 ASSERT_NE (wrapped_expr, result);
14510 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14511 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14514 /* Verify that various arithmetic binary operations are folded
14515 correctly. */
14517 static void
14518 test_arithmetic_folding ()
14520 tree type = integer_type_node;
14521 tree x = create_tmp_var_raw (type, "x");
14522 tree zero = build_zero_cst (type);
14523 tree one = build_int_cst (type, 1);
14525 /* Addition. */
14526 /* 1 <-- (0 + 1) */
14527 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14528 one);
14529 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14530 one);
14532 /* (nonlvalue)x <-- (x + 0) */
14533 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14536 /* Subtraction. */
14537 /* 0 <-- (x - x) */
14538 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14539 zero);
14540 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14543 /* Multiplication. */
14544 /* 0 <-- (x * 0) */
14545 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14546 zero);
14548 /* (nonlvalue)x <-- (x * 1) */
14549 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14553 /* Verify that various binary operations on vectors are folded
14554 correctly. */
14556 static void
14557 test_vector_folding ()
14559 tree inner_type = integer_type_node;
14560 tree type = build_vector_type (inner_type, 4);
14561 tree zero = build_zero_cst (type);
14562 tree one = build_one_cst (type);
14564 /* Verify equality tests that return a scalar boolean result. */
14565 tree res_type = boolean_type_node;
14566 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14567 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14568 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14569 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14572 /* Verify folding of VEC_DUPLICATE_EXPRs. */
14574 static void
14575 test_vec_duplicate_folding ()
14577 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
14578 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
14579 /* This will be 1 if VEC_MODE isn't a vector mode. */
14580 unsigned int nunits = GET_MODE_NUNITS (vec_mode);
14582 tree type = build_vector_type (ssizetype, nunits);
14583 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
14584 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
14585 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
14588 /* Run all of the selftests within this file. */
14590 void
14591 fold_const_c_tests ()
14593 test_arithmetic_folding ();
14594 test_vector_folding ();
14595 test_vec_duplicate_folding ();
14598 } // namespace selftest
14600 #endif /* CHECKING_P */