testsuite: Update scanning symbol sections to support AIX.
[official-gcc.git] / gcc / fold-const.c
blobddf18f27cb7e0d7f8335908fff0b9ba5e84a68b3
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
146 static location_t
147 expr_location_or (tree t, location_t loc)
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
168 return x;
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 widest_int quo;
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 return NULL_TREE;
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
196 static int fold_deferring_overflow_warnings;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
213 void
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 const char *warnmsg;
232 location_t locus;
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
248 if (!issue || warnmsg == NULL)
249 return;
251 if (gimple_no_warning_p (stmt))
252 return;
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
259 if (!issue_strict_overflow_warning (code))
260 return;
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL, 0);
278 /* Whether we are deferring overflow warnings. */
280 bool
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings > 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 if (fold_deferring_overflow_warnings > 0)
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
308 bool
309 negate_mathfn_p (combined_fn fn)
311 switch (fn)
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
346 default:
347 break;
349 return false;
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
355 bool
356 may_negate_without_overflow_p (const_tree t)
358 tree type;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
366 return !wi::only_sign_bit_p (wi::to_wide (t));
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
372 static bool
373 negate_expr_p (tree t)
375 tree type;
377 if (t == 0)
378 return false;
380 type = TREE_TYPE (t);
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
395 case FIXED_CST:
396 return true;
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
465 /* Fall through. */
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
501 break;
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
517 break;
519 default:
520 break;
522 return false;
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
533 tree type = TREE_TYPE (t);
534 tree tem;
536 switch (TREE_CODE (t))
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case COMPLEX_CST:
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
568 break;
570 case VECTOR_CST:
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
583 return elts.build ();
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
624 break;
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
638 /* Fall through. */
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
652 break;
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
687 break;
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
694 tree fndecl, arg;
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
700 break;
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
717 break;
719 default:
720 break;
723 return NULL_TREE;
726 /* A wrapper for fold_negate_expr_1. */
728 static tree
729 fold_negate_expr (location_t loc, tree t)
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
743 static tree
744 negate_expr (tree t)
746 tree type, tem;
747 location_t loc;
749 if (t == NULL_TREE)
750 return NULL_TREE;
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
780 If IN is itself a literal or constant, return it as appropriate.
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
861 else
862 var = in;
864 if (negate_p)
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
887 return var;
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 if (t1 == 0)
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
903 else if (t2 == 0)
904 return t1;
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
913 if (code == PLUS_EXPR)
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
928 else if (code == MINUS_EXPR)
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
953 switch (code)
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
961 default:
962 break;
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1005 else
1006 tmp = arg2;
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1027 else
1028 tmp = arg2;
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1109 default:
1110 return false;
1112 return true;
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1163 default:
1164 return false;
1166 return true;
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1206 switch (op)
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1216 default:
1217 return false;
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1242 return int_const_binop (code, arg1, arg2);
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1266 default:
1267 return NULL_TREE;
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1300 else if (REAL_VALUE_ISNAN (d2))
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode)
1316 && REAL_VALUE_ISINF (result)
1317 && !REAL_VALUE_ISINF (d1)
1318 && !REAL_VALUE_ISINF (d2))
1319 return NULL_TREE;
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 && (inexact || !real_identical (&result, &value)))
1328 return NULL_TREE;
1330 t = build_real (type, result);
1332 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333 return t;
1336 if (TREE_CODE (arg1) == FIXED_CST)
1338 FIXED_VALUE_TYPE f1;
1339 FIXED_VALUE_TYPE f2;
1340 FIXED_VALUE_TYPE result;
1341 tree t, type;
1342 int sat_p;
1343 bool overflow_p;
1345 /* The following codes are handled by fixed_arithmetic. */
1346 switch (code)
1348 case PLUS_EXPR:
1349 case MINUS_EXPR:
1350 case MULT_EXPR:
1351 case TRUNC_DIV_EXPR:
1352 if (TREE_CODE (arg2) != FIXED_CST)
1353 return NULL_TREE;
1354 f2 = TREE_FIXED_CST (arg2);
1355 break;
1357 case LSHIFT_EXPR:
1358 case RSHIFT_EXPR:
1360 if (TREE_CODE (arg2) != INTEGER_CST)
1361 return NULL_TREE;
1362 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 f2.data.high = w2.elt (1);
1364 f2.data.low = w2.ulow ();
1365 f2.mode = SImode;
1367 break;
1369 default:
1370 return NULL_TREE;
1373 f1 = TREE_FIXED_CST (arg1);
1374 type = TREE_TYPE (arg1);
1375 sat_p = TYPE_SATURATING (type);
1376 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377 t = build_fixed (type, result);
1378 /* Propagate overflow flags. */
1379 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 TREE_OVERFLOW (t) = 1;
1381 return t;
1384 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1386 tree type = TREE_TYPE (arg1);
1387 tree r1 = TREE_REALPART (arg1);
1388 tree i1 = TREE_IMAGPART (arg1);
1389 tree r2 = TREE_REALPART (arg2);
1390 tree i2 = TREE_IMAGPART (arg2);
1391 tree real, imag;
1393 switch (code)
1395 case PLUS_EXPR:
1396 case MINUS_EXPR:
1397 real = const_binop (code, r1, r2);
1398 imag = const_binop (code, i1, i2);
1399 break;
1401 case MULT_EXPR:
1402 if (COMPLEX_FLOAT_TYPE_P (type))
1403 return do_mpc_arg2 (arg1, arg2, type,
1404 /* do_nonfinite= */ folding_initializer,
1405 mpc_mul);
1407 real = const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, r1, r2),
1409 const_binop (MULT_EXPR, i1, i2));
1410 imag = const_binop (PLUS_EXPR,
1411 const_binop (MULT_EXPR, r1, i2),
1412 const_binop (MULT_EXPR, i1, r2));
1413 break;
1415 case RDIV_EXPR:
1416 if (COMPLEX_FLOAT_TYPE_P (type))
1417 return do_mpc_arg2 (arg1, arg2, type,
1418 /* do_nonfinite= */ folding_initializer,
1419 mpc_div);
1420 /* Fallthru. */
1421 case TRUNC_DIV_EXPR:
1422 case CEIL_DIV_EXPR:
1423 case FLOOR_DIV_EXPR:
1424 case ROUND_DIV_EXPR:
1425 if (flag_complex_method == 0)
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 t = br*br + bi*bi
1434 tree magsquared
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r2, r2),
1437 const_binop (MULT_EXPR, i2, i2));
1438 tree t1
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r1, r2),
1441 const_binop (MULT_EXPR, i1, i2));
1442 tree t2
1443 = const_binop (MINUS_EXPR,
1444 const_binop (MULT_EXPR, i1, r2),
1445 const_binop (MULT_EXPR, r1, i2));
1447 real = const_binop (code, t1, magsquared);
1448 imag = const_binop (code, t2, magsquared);
1450 else
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 fold_abs_const (r2, TREE_TYPE (type)),
1459 fold_abs_const (i2, TREE_TYPE (type)));
1461 if (integer_nonzerop (compare))
1463 /* In the TRUE branch, we compute
1464 ratio = br/bi;
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1468 tr = tr / div;
1469 ti = ti / div; */
1470 tree ratio = const_binop (code, r2, i2);
1471 tree div = const_binop (PLUS_EXPR, i2,
1472 const_binop (MULT_EXPR, r2, ratio));
1473 real = const_binop (MULT_EXPR, r1, ratio);
1474 real = const_binop (PLUS_EXPR, real, i1);
1475 real = const_binop (code, real, div);
1477 imag = const_binop (MULT_EXPR, i1, ratio);
1478 imag = const_binop (MINUS_EXPR, imag, r1);
1479 imag = const_binop (code, imag, div);
1481 else
1483 /* In the FALSE branch, we compute
1484 ratio = d/c;
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1488 tr = tr / div;
1489 ti = ti / div; */
1490 tree ratio = const_binop (code, i2, r2);
1491 tree div = const_binop (PLUS_EXPR, r2,
1492 const_binop (MULT_EXPR, i2, ratio));
1494 real = const_binop (MULT_EXPR, i1, ratio);
1495 real = const_binop (PLUS_EXPR, real, r1);
1496 real = const_binop (code, real, div);
1498 imag = const_binop (MULT_EXPR, r1, ratio);
1499 imag = const_binop (MINUS_EXPR, i1, imag);
1500 imag = const_binop (code, imag, div);
1503 break;
1505 default:
1506 return NULL_TREE;
1509 if (real && imag)
1510 return build_complex (type, real, imag);
1513 if (TREE_CODE (arg1) == VECTOR_CST
1514 && TREE_CODE (arg2) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1518 tree type = TREE_TYPE (arg1);
1519 bool step_ok_p;
1520 if (VECTOR_CST_STEPPED_P (arg1)
1521 && VECTOR_CST_STEPPED_P (arg2))
1522 /* We can operate directly on the encoding if:
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 implies
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531 else if (VECTOR_CST_STEPPED_P (arg1))
1532 /* We can operate directly on stepped encodings if:
1534 a3 - a2 == a2 - a1
1535 implies:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p = distributes_over_addition_p (code, 1);
1540 else
1541 /* Similarly in reverse. */
1542 step_ok_p = distributes_over_addition_p (code, 2);
1543 tree_vector_builder elts;
1544 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 return NULL_TREE;
1546 unsigned int count = elts.encoded_nelts ();
1547 for (unsigned int i = 0; i < count; ++i)
1549 tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 tree elem2 = VECTOR_CST_ELT (arg2, i);
1552 tree elt = const_binop (code, elem1, elem2);
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt == NULL_TREE)
1557 return NULL_TREE;
1558 elts.quick_push (elt);
1561 return elts.build ();
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1) == VECTOR_CST
1566 && TREE_CODE (arg2) == INTEGER_CST)
1568 tree type = TREE_TYPE (arg1);
1569 bool step_ok_p = distributes_over_addition_p (code, 1);
1570 tree_vector_builder elts;
1571 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 return NULL_TREE;
1573 unsigned int count = elts.encoded_nelts ();
1574 for (unsigned int i = 0; i < count; ++i)
1576 tree elem1 = VECTOR_CST_ELT (arg1, i);
1578 tree elt = const_binop (code, elem1, arg2);
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt == NULL_TREE)
1583 return NULL_TREE;
1584 elts.quick_push (elt);
1587 return elts.build ();
1589 return NULL_TREE;
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1595 tree
1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1598 if (TREE_CODE_CLASS (code) == tcc_comparison)
1599 return fold_relational_const (code, type, arg1, arg2);
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1603 switch (code)
1605 case VEC_SERIES_EXPR:
1606 if (CONSTANT_CLASS_P (arg1)
1607 && CONSTANT_CLASS_P (arg2))
1608 return build_vec_series (type, arg1, arg2);
1609 return NULL_TREE;
1611 case COMPLEX_EXPR:
1612 if ((TREE_CODE (arg1) == REAL_CST
1613 && TREE_CODE (arg2) == REAL_CST)
1614 || (TREE_CODE (arg1) == INTEGER_CST
1615 && TREE_CODE (arg2) == INTEGER_CST))
1616 return build_complex (type, arg1, arg2);
1617 return NULL_TREE;
1619 case POINTER_DIFF_EXPR:
1620 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1622 poly_offset_int res = (wi::to_poly_offset (arg1)
1623 - wi::to_poly_offset (arg2));
1624 return force_fit_type (type, res, 1,
1625 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1627 return NULL_TREE;
1629 case VEC_PACK_TRUNC_EXPR:
1630 case VEC_PACK_FIX_TRUNC_EXPR:
1631 case VEC_PACK_FLOAT_EXPR:
1633 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1635 if (TREE_CODE (arg1) != VECTOR_CST
1636 || TREE_CODE (arg2) != VECTOR_CST)
1637 return NULL_TREE;
1639 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 return NULL_TREE;
1642 out_nelts = in_nelts * 2;
1643 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1646 tree_vector_builder elts (type, out_nelts, 1);
1647 for (i = 0; i < out_nelts; i++)
1649 tree elt = (i < in_nelts
1650 ? VECTOR_CST_ELT (arg1, i)
1651 : VECTOR_CST_ELT (arg2, i - in_nelts));
1652 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 ? NOP_EXPR
1654 : code == VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 TREE_TYPE (type), elt);
1657 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 return NULL_TREE;
1659 elts.quick_push (elt);
1662 return elts.build ();
1665 case VEC_WIDEN_MULT_LO_EXPR:
1666 case VEC_WIDEN_MULT_HI_EXPR:
1667 case VEC_WIDEN_MULT_EVEN_EXPR:
1668 case VEC_WIDEN_MULT_ODD_EXPR:
1670 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1672 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 return NULL_TREE;
1675 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 return NULL_TREE;
1677 out_nelts = in_nelts / 2;
1678 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1681 if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 scale = 1, ofs = 0;
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 scale = 1, ofs = 1;
1690 tree_vector_builder elts (type, out_nelts, 1);
1691 for (out = 0; out < out_nelts; out++)
1693 unsigned int in = (out << scale) + ofs;
1694 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 VECTOR_CST_ELT (arg1, in));
1696 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 VECTOR_CST_ELT (arg2, in));
1699 if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 return NULL_TREE;
1701 tree elt = const_binop (MULT_EXPR, t1, t2);
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1707 return elts.build ();
1710 default:;
1713 if (TREE_CODE_CLASS (code) != tcc_binary)
1714 return NULL_TREE;
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type)
1718 == TYPE_SATURATING (TREE_TYPE (arg1)));
1720 return const_binop (code, arg1, arg2);
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1726 tree
1727 const_unop (enum tree_code code, tree type, tree arg0)
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734 && code != NEGATE_EXPR
1735 && code != ABS_EXPR
1736 && code != ABSU_EXPR)
1737 return NULL_TREE;
1739 switch (code)
1741 CASE_CONVERT:
1742 case FLOAT_EXPR:
1743 case FIX_TRUNC_EXPR:
1744 case FIXED_CONVERT_EXPR:
1745 return fold_convert_const (code, type, arg0);
1747 case ADDR_SPACE_CONVERT_EXPR:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0)
1751 && !(targetm.addr_space.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 return fold_convert_const (code, type, arg0);
1754 break;
1756 case VIEW_CONVERT_EXPR:
1757 return fold_view_convert_expr (type, arg0);
1759 case NEGATE_EXPR:
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1763 constants. */
1764 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 if (tem && CONSTANT_CLASS_P (tem))
1766 return tem;
1767 break;
1770 case ABS_EXPR:
1771 case ABSU_EXPR:
1772 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 return fold_abs_const (arg0, type);
1774 break;
1776 case CONJ_EXPR:
1777 if (TREE_CODE (arg0) == COMPLEX_CST)
1779 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 TREE_TYPE (type));
1781 return build_complex (type, TREE_REALPART (arg0), ipart);
1783 break;
1785 case BIT_NOT_EXPR:
1786 if (TREE_CODE (arg0) == INTEGER_CST)
1787 return fold_not_const (arg0, type);
1788 else if (POLY_INT_CST_P (arg0))
1789 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0) == VECTOR_CST)
1793 tree elem;
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements;
1797 elements.new_unary_operation (type, arg0, true);
1798 unsigned int i, count = elements.encoded_nelts ();
1799 for (i = 0; i < count; ++i)
1801 elem = VECTOR_CST_ELT (arg0, i);
1802 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 if (elem == NULL_TREE)
1804 break;
1805 elements.quick_push (elem);
1807 if (i == count)
1808 return elements.build ();
1810 break;
1812 case TRUTH_NOT_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST)
1814 return constant_boolean_node (integer_zerop (arg0), type);
1815 break;
1817 case REALPART_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1819 return fold_convert (type, TREE_REALPART (arg0));
1820 break;
1822 case IMAGPART_EXPR:
1823 if (TREE_CODE (arg0) == COMPLEX_CST)
1824 return fold_convert (type, TREE_IMAGPART (arg0));
1825 break;
1827 case VEC_UNPACK_LO_EXPR:
1828 case VEC_UNPACK_HI_EXPR:
1829 case VEC_UNPACK_FLOAT_LO_EXPR:
1830 case VEC_UNPACK_FLOAT_HI_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1834 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 enum tree_code subcode;
1837 if (TREE_CODE (arg0) != VECTOR_CST)
1838 return NULL_TREE;
1840 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 return NULL_TREE;
1842 out_nelts = in_nelts / 2;
1843 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1845 unsigned int offset = 0;
1846 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 offset = out_nelts;
1851 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 subcode = NOP_EXPR;
1853 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 subcode = FLOAT_EXPR;
1856 else
1857 subcode = FIX_TRUNC_EXPR;
1859 tree_vector_builder elts (type, out_nelts, 1);
1860 for (i = 0; i < out_nelts; i++)
1862 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 VECTOR_CST_ELT (arg0, i + offset));
1864 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 return NULL_TREE;
1866 elts.quick_push (elt);
1869 return elts.build ();
1872 case VEC_DUPLICATE_EXPR:
1873 if (CONSTANT_CLASS_P (arg0))
1874 return build_vector_from_val (type, arg0);
1875 return NULL_TREE;
1877 default:
1878 break;
1881 return NULL_TREE;
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1887 tree
1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1890 return build_int_cst (sizetype_tab[(int) kind], number);
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1898 tree
1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1901 tree type = TREE_TYPE (arg0);
1903 if (arg0 == error_mark_node || arg1 == error_mark_node)
1904 return error_mark_node;
1906 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907 TREE_TYPE (arg1)));
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1912 /* And some specific cases even faster than that. */
1913 if (code == PLUS_EXPR)
1915 if (integer_zerop (arg0)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 return arg1;
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1922 else if (code == MINUS_EXPR)
1924 if (integer_zerop (arg1)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 return arg0;
1928 else if (code == MULT_EXPR)
1930 if (integer_onep (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res = int_const_binop (code, arg0, arg1, -1);
1939 if (res != NULL_TREE)
1940 return res;
1943 return fold_build2_loc (loc, code, type, arg0, arg1);
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1950 tree
1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1953 tree type = TREE_TYPE (arg0);
1954 tree ctype;
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 TREE_TYPE (arg1)));
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type))
1961 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1963 if (type == sizetype)
1964 ctype = ssizetype;
1965 else if (type == bitsizetype)
1966 ctype = sbitsizetype;
1967 else
1968 ctype = signed_type_for (type);
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974 return size_binop_loc (loc, MINUS_EXPR,
1975 fold_convert_loc (loc, ctype, arg0),
1976 fold_convert_loc (loc, ctype, arg1));
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0, arg1))
1983 return build_int_cst (ctype, 0);
1984 else if (tree_int_cst_lt (arg1, arg0))
1985 return fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987 else
1988 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 fold_convert_loc (loc, ctype,
1990 size_binop_loc (loc,
1991 MINUS_EXPR,
1992 arg1, arg0)));
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1998 static tree
1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type, wi::to_widest (arg1),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 TREE_OVERFLOW (arg1));
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2012 static tree
2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2015 bool overflow = false;
2016 tree t;
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2027 wide_int val;
2028 REAL_VALUE_TYPE r;
2029 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2031 switch (code)
2033 case FIX_TRUNC_EXPR:
2034 real_trunc (&r, VOIDmode, &x);
2035 break;
2037 default:
2038 gcc_unreachable ();
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r))
2044 overflow = true;
2045 val = wi::zero (TYPE_PRECISION (type));
2048 /* See if R is less than the lower bound or greater than the
2049 upper bound. */
2051 if (! overflow)
2053 tree lt = TYPE_MIN_VALUE (type);
2054 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055 if (real_less (&r, &l))
2057 overflow = true;
2058 val = wi::to_wide (lt);
2062 if (! overflow)
2064 tree ut = TYPE_MAX_VALUE (type);
2065 if (ut)
2067 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 if (real_less (&u, &r))
2070 overflow = true;
2071 val = wi::to_wide (ut);
2076 if (! overflow)
2077 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2079 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080 return t;
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2086 static tree
2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2089 tree t;
2090 double_int temp, temp_trunc;
2091 scalar_mode mode;
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp = TREE_FIXED_CST (arg1).data;
2095 mode = TREE_FIXED_CST (arg1).mode;
2096 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2098 temp = temp.rshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 HOST_BITS_PER_DOUBLE_INT,
2105 SIGNED_FIXED_POINT_MODE_P (mode));
2107 else
2109 temp = double_int_zero;
2110 temp_trunc = double_int_zero;
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode)
2116 && temp_trunc.is_negative ()
2117 && TREE_FIXED_CST (arg1).data != temp_trunc)
2118 temp += double_int_one;
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type (type, temp, -1,
2123 (temp.is_negative ()
2124 && (TYPE_UNSIGNED (type)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 | TREE_OVERFLOW (arg1));
2128 return t;
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2134 static tree
2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2137 REAL_VALUE_TYPE value;
2138 tree t;
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144 return NULL_TREE;
2146 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147 t = build_real (type, value);
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155 TREE_OVERFLOW (t) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 && !MODE_HAS_NANS (TYPE_MODE (type)))
2158 TREE_OVERFLOW (t) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 && REAL_VALUE_ISINF (value)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164 TREE_OVERFLOW (t) = 1;
2165 else
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167 return t;
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2173 static tree
2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2176 REAL_VALUE_TYPE value;
2177 tree t;
2179 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 &TREE_FIXED_CST (arg1));
2181 t = build_real (type, value);
2183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184 return t;
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2190 static tree
2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2193 FIXED_VALUE_TYPE value;
2194 tree t;
2195 bool overflow_p;
2197 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199 t = build_fixed (type, value);
2201 /* Propagate overflow flags. */
2202 if (overflow_p | TREE_OVERFLOW (arg1))
2203 TREE_OVERFLOW (t) = 1;
2204 return t;
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2210 static tree
2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2213 FIXED_VALUE_TYPE value;
2214 tree t;
2215 bool overflow_p;
2216 double_int di;
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2220 di.low = TREE_INT_CST_ELT (arg1, 0);
2221 if (TREE_INT_CST_NUNITS (arg1) == 1)
2222 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223 else
2224 di.high = TREE_INT_CST_ELT (arg1, 1);
2226 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 TYPE_SATURATING (type));
2229 t = build_fixed (type, value);
2231 /* Propagate overflow flags. */
2232 if (overflow_p | TREE_OVERFLOW (arg1))
2233 TREE_OVERFLOW (t) = 1;
2234 return t;
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2240 static tree
2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2243 FIXED_VALUE_TYPE value;
2244 tree t;
2245 bool overflow_p;
2247 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 &TREE_REAL_CST (arg1),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2261 static tree
2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2264 tree arg_type = TREE_TYPE (arg1);
2265 if (arg_type == type)
2266 return arg1;
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1)
2271 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273 return build_poly_int_cst (type,
2274 poly_wide_int::from (poly_int_cst_value (arg1),
2275 TYPE_PRECISION (type),
2276 TYPE_SIGN (arg_type)));
2278 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279 || TREE_CODE (type) == OFFSET_TYPE)
2281 if (TREE_CODE (arg1) == INTEGER_CST)
2282 return fold_convert_const_int_from_int (type, arg1);
2283 else if (TREE_CODE (arg1) == REAL_CST)
2284 return fold_convert_const_int_from_real (code, type, arg1);
2285 else if (TREE_CODE (arg1) == FIXED_CST)
2286 return fold_convert_const_int_from_fixed (type, arg1);
2288 else if (TREE_CODE (type) == REAL_TYPE)
2290 if (TREE_CODE (arg1) == INTEGER_CST)
2291 return build_real_from_int_cst (type, arg1);
2292 else if (TREE_CODE (arg1) == REAL_CST)
2293 return fold_convert_const_real_from_real (type, arg1);
2294 else if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_real_from_fixed (type, arg1);
2297 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2299 if (TREE_CODE (arg1) == FIXED_CST)
2300 return fold_convert_const_fixed_from_fixed (type, arg1);
2301 else if (TREE_CODE (arg1) == INTEGER_CST)
2302 return fold_convert_const_fixed_from_int (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_fixed_from_real (type, arg1);
2306 else if (TREE_CODE (type) == VECTOR_TYPE)
2308 if (TREE_CODE (arg1) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2311 tree elttype = TREE_TYPE (type);
2312 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2315 bool step_ok_p
2316 = (INTEGRAL_TYPE_P (elttype)
2317 && INTEGRAL_TYPE_P (arg1_elttype)
2318 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 tree_vector_builder v;
2320 if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 return NULL_TREE;
2322 unsigned int len = v.encoded_nelts ();
2323 for (unsigned int i = 0; i < len; ++i)
2325 tree elt = VECTOR_CST_ELT (arg1, i);
2326 tree cvt = fold_convert_const (code, elttype, elt);
2327 if (cvt == NULL_TREE)
2328 return NULL_TREE;
2329 v.quick_push (cvt);
2331 return v.build ();
2334 return NULL_TREE;
2337 /* Construct a vector of zero elements of vector type TYPE. */
2339 static tree
2340 build_zero_vector (tree type)
2342 tree t;
2344 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345 return build_vector_from_val (type, t);
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2350 bool
2351 fold_convertible_p (const_tree type, const_tree arg)
2353 tree orig = TREE_TYPE (arg);
2355 if (type == orig)
2356 return true;
2358 if (TREE_CODE (arg) == ERROR_MARK
2359 || TREE_CODE (type) == ERROR_MARK
2360 || TREE_CODE (orig) == ERROR_MARK)
2361 return false;
2363 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364 return true;
2366 switch (TREE_CODE (type))
2368 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369 case POINTER_TYPE: case REFERENCE_TYPE:
2370 case OFFSET_TYPE:
2371 return (INTEGRAL_TYPE_P (orig)
2372 || (POINTER_TYPE_P (orig)
2373 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 || TREE_CODE (orig) == OFFSET_TYPE);
2376 case REAL_TYPE:
2377 case FIXED_POINT_TYPE:
2378 case VOID_TYPE:
2379 return TREE_CODE (type) == TREE_CODE (orig);
2381 case VECTOR_TYPE:
2382 return (VECTOR_TYPE_P (orig)
2383 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2384 TYPE_VECTOR_SUBPARTS (orig))
2385 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2387 default:
2388 return false;
2392 /* Convert expression ARG to type TYPE. Used by the middle-end for
2393 simple conversions in preference to calling the front-end's convert. */
2395 tree
2396 fold_convert_loc (location_t loc, tree type, tree arg)
2398 tree orig = TREE_TYPE (arg);
2399 tree tem;
2401 if (type == orig)
2402 return arg;
2404 if (TREE_CODE (arg) == ERROR_MARK
2405 || TREE_CODE (type) == ERROR_MARK
2406 || TREE_CODE (orig) == ERROR_MARK)
2407 return error_mark_node;
2409 switch (TREE_CODE (type))
2411 case POINTER_TYPE:
2412 case REFERENCE_TYPE:
2413 /* Handle conversions between pointers to different address spaces. */
2414 if (POINTER_TYPE_P (orig)
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2417 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2418 /* fall through */
2420 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2421 case OFFSET_TYPE:
2422 if (TREE_CODE (arg) == INTEGER_CST)
2424 tem = fold_convert_const (NOP_EXPR, type, arg);
2425 if (tem != NULL_TREE)
2426 return tem;
2428 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2429 || TREE_CODE (orig) == OFFSET_TYPE)
2430 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2431 if (TREE_CODE (orig) == COMPLEX_TYPE)
2432 return fold_convert_loc (loc, type,
2433 fold_build1_loc (loc, REALPART_EXPR,
2434 TREE_TYPE (orig), arg));
2435 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2436 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2437 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2439 case REAL_TYPE:
2440 if (TREE_CODE (arg) == INTEGER_CST)
2442 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2443 if (tem != NULL_TREE)
2444 return tem;
2446 else if (TREE_CODE (arg) == REAL_CST)
2448 tem = fold_convert_const (NOP_EXPR, type, arg);
2449 if (tem != NULL_TREE)
2450 return tem;
2452 else if (TREE_CODE (arg) == FIXED_CST)
2454 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2455 if (tem != NULL_TREE)
2456 return tem;
2459 switch (TREE_CODE (orig))
2461 case INTEGER_TYPE:
2462 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2463 case POINTER_TYPE: case REFERENCE_TYPE:
2464 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2466 case REAL_TYPE:
2467 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2469 case FIXED_POINT_TYPE:
2470 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2472 case COMPLEX_TYPE:
2473 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2474 return fold_convert_loc (loc, type, tem);
2476 default:
2477 gcc_unreachable ();
2480 case FIXED_POINT_TYPE:
2481 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2482 || TREE_CODE (arg) == REAL_CST)
2484 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2485 if (tem != NULL_TREE)
2486 goto fold_convert_exit;
2489 switch (TREE_CODE (orig))
2491 case FIXED_POINT_TYPE:
2492 case INTEGER_TYPE:
2493 case ENUMERAL_TYPE:
2494 case BOOLEAN_TYPE:
2495 case REAL_TYPE:
2496 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2498 case COMPLEX_TYPE:
2499 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2500 return fold_convert_loc (loc, type, tem);
2502 default:
2503 gcc_unreachable ();
2506 case COMPLEX_TYPE:
2507 switch (TREE_CODE (orig))
2509 case INTEGER_TYPE:
2510 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case REAL_TYPE:
2513 case FIXED_POINT_TYPE:
2514 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2515 fold_convert_loc (loc, TREE_TYPE (type), arg),
2516 fold_convert_loc (loc, TREE_TYPE (type),
2517 integer_zero_node));
2518 case COMPLEX_TYPE:
2520 tree rpart, ipart;
2522 if (TREE_CODE (arg) == COMPLEX_EXPR)
2524 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2525 TREE_OPERAND (arg, 0));
2526 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2527 TREE_OPERAND (arg, 1));
2528 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2531 arg = save_expr (arg);
2532 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2533 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2534 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2535 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2536 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2539 default:
2540 gcc_unreachable ();
2543 case VECTOR_TYPE:
2544 if (integer_zerop (arg))
2545 return build_zero_vector (type);
2546 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2547 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2548 || TREE_CODE (orig) == VECTOR_TYPE);
2549 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2551 case VOID_TYPE:
2552 tem = fold_ignored_result (arg);
2553 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2555 default:
2556 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2557 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2558 gcc_unreachable ();
2560 fold_convert_exit:
2561 protected_set_expr_location_unshare (tem, loc);
2562 return tem;
2565 /* Return false if expr can be assumed not to be an lvalue, true
2566 otherwise. */
2568 static bool
2569 maybe_lvalue_p (const_tree x)
2571 /* We only need to wrap lvalue tree codes. */
2572 switch (TREE_CODE (x))
2574 case VAR_DECL:
2575 case PARM_DECL:
2576 case RESULT_DECL:
2577 case LABEL_DECL:
2578 case FUNCTION_DECL:
2579 case SSA_NAME:
2581 case COMPONENT_REF:
2582 case MEM_REF:
2583 case INDIRECT_REF:
2584 case ARRAY_REF:
2585 case ARRAY_RANGE_REF:
2586 case BIT_FIELD_REF:
2587 case OBJ_TYPE_REF:
2589 case REALPART_EXPR:
2590 case IMAGPART_EXPR:
2591 case PREINCREMENT_EXPR:
2592 case PREDECREMENT_EXPR:
2593 case SAVE_EXPR:
2594 case TRY_CATCH_EXPR:
2595 case WITH_CLEANUP_EXPR:
2596 case COMPOUND_EXPR:
2597 case MODIFY_EXPR:
2598 case TARGET_EXPR:
2599 case COND_EXPR:
2600 case BIND_EXPR:
2601 case VIEW_CONVERT_EXPR:
2602 break;
2604 default:
2605 /* Assume the worst for front-end tree codes. */
2606 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2607 break;
2608 return false;
2611 return true;
2614 /* Return an expr equal to X but certainly not valid as an lvalue. */
2616 tree
2617 non_lvalue_loc (location_t loc, tree x)
2619 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2620 us. */
2621 if (in_gimple_form)
2622 return x;
2624 if (! maybe_lvalue_p (x))
2625 return x;
2626 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2629 /* When pedantic, return an expr equal to X but certainly not valid as a
2630 pedantic lvalue. Otherwise, return X. */
2632 static tree
2633 pedantic_non_lvalue_loc (location_t loc, tree x)
2635 return protected_set_expr_location_unshare (x, loc);
2638 /* Given a tree comparison code, return the code that is the logical inverse.
2639 It is generally not safe to do this for floating-point comparisons, except
2640 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2641 ERROR_MARK in this case. */
2643 enum tree_code
2644 invert_tree_comparison (enum tree_code code, bool honor_nans)
2646 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2647 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2648 return ERROR_MARK;
2650 switch (code)
2652 case EQ_EXPR:
2653 return NE_EXPR;
2654 case NE_EXPR:
2655 return EQ_EXPR;
2656 case GT_EXPR:
2657 return honor_nans ? UNLE_EXPR : LE_EXPR;
2658 case GE_EXPR:
2659 return honor_nans ? UNLT_EXPR : LT_EXPR;
2660 case LT_EXPR:
2661 return honor_nans ? UNGE_EXPR : GE_EXPR;
2662 case LE_EXPR:
2663 return honor_nans ? UNGT_EXPR : GT_EXPR;
2664 case LTGT_EXPR:
2665 return UNEQ_EXPR;
2666 case UNEQ_EXPR:
2667 return LTGT_EXPR;
2668 case UNGT_EXPR:
2669 return LE_EXPR;
2670 case UNGE_EXPR:
2671 return LT_EXPR;
2672 case UNLT_EXPR:
2673 return GE_EXPR;
2674 case UNLE_EXPR:
2675 return GT_EXPR;
2676 case ORDERED_EXPR:
2677 return UNORDERED_EXPR;
2678 case UNORDERED_EXPR:
2679 return ORDERED_EXPR;
2680 default:
2681 gcc_unreachable ();
2685 /* Similar, but return the comparison that results if the operands are
2686 swapped. This is safe for floating-point. */
2688 enum tree_code
2689 swap_tree_comparison (enum tree_code code)
2691 switch (code)
2693 case EQ_EXPR:
2694 case NE_EXPR:
2695 case ORDERED_EXPR:
2696 case UNORDERED_EXPR:
2697 case LTGT_EXPR:
2698 case UNEQ_EXPR:
2699 return code;
2700 case GT_EXPR:
2701 return LT_EXPR;
2702 case GE_EXPR:
2703 return LE_EXPR;
2704 case LT_EXPR:
2705 return GT_EXPR;
2706 case LE_EXPR:
2707 return GE_EXPR;
2708 case UNGT_EXPR:
2709 return UNLT_EXPR;
2710 case UNGE_EXPR:
2711 return UNLE_EXPR;
2712 case UNLT_EXPR:
2713 return UNGT_EXPR;
2714 case UNLE_EXPR:
2715 return UNGE_EXPR;
2716 default:
2717 gcc_unreachable ();
2722 /* Convert a comparison tree code from an enum tree_code representation
2723 into a compcode bit-based encoding. This function is the inverse of
2724 compcode_to_comparison. */
2726 static enum comparison_code
2727 comparison_to_compcode (enum tree_code code)
2729 switch (code)
2731 case LT_EXPR:
2732 return COMPCODE_LT;
2733 case EQ_EXPR:
2734 return COMPCODE_EQ;
2735 case LE_EXPR:
2736 return COMPCODE_LE;
2737 case GT_EXPR:
2738 return COMPCODE_GT;
2739 case NE_EXPR:
2740 return COMPCODE_NE;
2741 case GE_EXPR:
2742 return COMPCODE_GE;
2743 case ORDERED_EXPR:
2744 return COMPCODE_ORD;
2745 case UNORDERED_EXPR:
2746 return COMPCODE_UNORD;
2747 case UNLT_EXPR:
2748 return COMPCODE_UNLT;
2749 case UNEQ_EXPR:
2750 return COMPCODE_UNEQ;
2751 case UNLE_EXPR:
2752 return COMPCODE_UNLE;
2753 case UNGT_EXPR:
2754 return COMPCODE_UNGT;
2755 case LTGT_EXPR:
2756 return COMPCODE_LTGT;
2757 case UNGE_EXPR:
2758 return COMPCODE_UNGE;
2759 default:
2760 gcc_unreachable ();
2764 /* Convert a compcode bit-based encoding of a comparison operator back
2765 to GCC's enum tree_code representation. This function is the
2766 inverse of comparison_to_compcode. */
2768 static enum tree_code
2769 compcode_to_comparison (enum comparison_code code)
2771 switch (code)
2773 case COMPCODE_LT:
2774 return LT_EXPR;
2775 case COMPCODE_EQ:
2776 return EQ_EXPR;
2777 case COMPCODE_LE:
2778 return LE_EXPR;
2779 case COMPCODE_GT:
2780 return GT_EXPR;
2781 case COMPCODE_NE:
2782 return NE_EXPR;
2783 case COMPCODE_GE:
2784 return GE_EXPR;
2785 case COMPCODE_ORD:
2786 return ORDERED_EXPR;
2787 case COMPCODE_UNORD:
2788 return UNORDERED_EXPR;
2789 case COMPCODE_UNLT:
2790 return UNLT_EXPR;
2791 case COMPCODE_UNEQ:
2792 return UNEQ_EXPR;
2793 case COMPCODE_UNLE:
2794 return UNLE_EXPR;
2795 case COMPCODE_UNGT:
2796 return UNGT_EXPR;
2797 case COMPCODE_LTGT:
2798 return LTGT_EXPR;
2799 case COMPCODE_UNGE:
2800 return UNGE_EXPR;
2801 default:
2802 gcc_unreachable ();
2806 /* Return true if COND1 tests the opposite condition of COND2. */
2808 bool
2809 inverse_conditions_p (const_tree cond1, const_tree cond2)
2811 return (COMPARISON_CLASS_P (cond1)
2812 && COMPARISON_CLASS_P (cond2)
2813 && (invert_tree_comparison
2814 (TREE_CODE (cond1),
2815 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2816 && operand_equal_p (TREE_OPERAND (cond1, 0),
2817 TREE_OPERAND (cond2, 0), 0)
2818 && operand_equal_p (TREE_OPERAND (cond1, 1),
2819 TREE_OPERAND (cond2, 1), 0));
2822 /* Return a tree for the comparison which is the combination of
2823 doing the AND or OR (depending on CODE) of the two operations LCODE
2824 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2825 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2826 if this makes the transformation invalid. */
2828 tree
2829 combine_comparisons (location_t loc,
2830 enum tree_code code, enum tree_code lcode,
2831 enum tree_code rcode, tree truth_type,
2832 tree ll_arg, tree lr_arg)
2834 bool honor_nans = HONOR_NANS (ll_arg);
2835 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2836 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2837 int compcode;
2839 switch (code)
2841 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2842 compcode = lcompcode & rcompcode;
2843 break;
2845 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2846 compcode = lcompcode | rcompcode;
2847 break;
2849 default:
2850 return NULL_TREE;
2853 if (!honor_nans)
2855 /* Eliminate unordered comparisons, as well as LTGT and ORD
2856 which are not used unless the mode has NaNs. */
2857 compcode &= ~COMPCODE_UNORD;
2858 if (compcode == COMPCODE_LTGT)
2859 compcode = COMPCODE_NE;
2860 else if (compcode == COMPCODE_ORD)
2861 compcode = COMPCODE_TRUE;
2863 else if (flag_trapping_math)
2865 /* Check that the original operation and the optimized ones will trap
2866 under the same condition. */
2867 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2868 && (lcompcode != COMPCODE_EQ)
2869 && (lcompcode != COMPCODE_ORD);
2870 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2871 && (rcompcode != COMPCODE_EQ)
2872 && (rcompcode != COMPCODE_ORD);
2873 bool trap = (compcode & COMPCODE_UNORD) == 0
2874 && (compcode != COMPCODE_EQ)
2875 && (compcode != COMPCODE_ORD);
2877 /* In a short-circuited boolean expression the LHS might be
2878 such that the RHS, if evaluated, will never trap. For
2879 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2880 if neither x nor y is NaN. (This is a mixed blessing: for
2881 example, the expression above will never trap, hence
2882 optimizing it to x < y would be invalid). */
2883 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2884 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2885 rtrap = false;
2887 /* If the comparison was short-circuited, and only the RHS
2888 trapped, we may now generate a spurious trap. */
2889 if (rtrap && !ltrap
2890 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2891 return NULL_TREE;
2893 /* If we changed the conditions that cause a trap, we lose. */
2894 if ((ltrap || rtrap) != trap)
2895 return NULL_TREE;
2898 if (compcode == COMPCODE_TRUE)
2899 return constant_boolean_node (true, truth_type);
2900 else if (compcode == COMPCODE_FALSE)
2901 return constant_boolean_node (false, truth_type);
2902 else
2904 enum tree_code tcode;
2906 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2907 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2911 /* Return nonzero if two operands (typically of the same tree node)
2912 are necessarily equal. FLAGS modifies behavior as follows:
2914 If OEP_ONLY_CONST is set, only return nonzero for constants.
2915 This function tests whether the operands are indistinguishable;
2916 it does not test whether they are equal using C's == operation.
2917 The distinction is important for IEEE floating point, because
2918 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2919 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2921 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2922 even though it may hold multiple values during a function.
2923 This is because a GCC tree node guarantees that nothing else is
2924 executed between the evaluation of its "operands" (which may often
2925 be evaluated in arbitrary order). Hence if the operands themselves
2926 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2927 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2928 unset means assuming isochronic (or instantaneous) tree equivalence.
2929 Unless comparing arbitrary expression trees, such as from different
2930 statements, this flag can usually be left unset.
2932 If OEP_PURE_SAME is set, then pure functions with identical arguments
2933 are considered the same. It is used when the caller has other ways
2934 to ensure that global memory is unchanged in between.
2936 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2937 not values of expressions.
2939 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2940 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2942 If OEP_BITWISE is set, then require the values to be bitwise identical
2943 rather than simply numerically equal. Do not take advantage of things
2944 like math-related flags or undefined behavior; only return true for
2945 values that are provably bitwise identical in all circumstances.
2947 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2948 any operand with side effect. This is unnecesarily conservative in the
2949 case we know that arg0 and arg1 are in disjoint code paths (such as in
2950 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2951 addresses with TREE_CONSTANT flag set so we know that &var == &var
2952 even if var is volatile. */
2954 bool
2955 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2956 unsigned int flags)
2958 bool r;
2959 if (verify_hash_value (arg0, arg1, flags, &r))
2960 return r;
2962 STRIP_ANY_LOCATION_WRAPPER (arg0);
2963 STRIP_ANY_LOCATION_WRAPPER (arg1);
2965 /* If either is ERROR_MARK, they aren't equal. */
2966 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2967 || TREE_TYPE (arg0) == error_mark_node
2968 || TREE_TYPE (arg1) == error_mark_node)
2969 return false;
2971 /* Similar, if either does not have a type (like a template id),
2972 they aren't equal. */
2973 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2974 return false;
2976 /* Bitwise identity makes no sense if the values have different layouts. */
2977 if ((flags & OEP_BITWISE)
2978 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2979 return false;
2981 /* We cannot consider pointers to different address space equal. */
2982 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2983 && POINTER_TYPE_P (TREE_TYPE (arg1))
2984 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2985 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2986 return false;
2988 /* Check equality of integer constants before bailing out due to
2989 precision differences. */
2990 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2992 /* Address of INTEGER_CST is not defined; check that we did not forget
2993 to drop the OEP_ADDRESS_OF flags. */
2994 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2995 return tree_int_cst_equal (arg0, arg1);
2998 if (!(flags & OEP_ADDRESS_OF))
3000 /* If both types don't have the same signedness, then we can't consider
3001 them equal. We must check this before the STRIP_NOPS calls
3002 because they may change the signedness of the arguments. As pointers
3003 strictly don't have a signedness, require either two pointers or
3004 two non-pointers as well. */
3005 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3006 || POINTER_TYPE_P (TREE_TYPE (arg0))
3007 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3008 return false;
3010 /* If both types don't have the same precision, then it is not safe
3011 to strip NOPs. */
3012 if (element_precision (TREE_TYPE (arg0))
3013 != element_precision (TREE_TYPE (arg1)))
3014 return false;
3016 STRIP_NOPS (arg0);
3017 STRIP_NOPS (arg1);
3019 #if 0
3020 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3021 sanity check once the issue is solved. */
3022 else
3023 /* Addresses of conversions and SSA_NAMEs (and many other things)
3024 are not defined. Check that we did not forget to drop the
3025 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3026 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3027 && TREE_CODE (arg0) != SSA_NAME);
3028 #endif
3030 /* In case both args are comparisons but with different comparison
3031 code, try to swap the comparison operands of one arg to produce
3032 a match and compare that variant. */
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034 && COMPARISON_CLASS_P (arg0)
3035 && COMPARISON_CLASS_P (arg1))
3037 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3039 if (TREE_CODE (arg0) == swap_code)
3040 return operand_equal_p (TREE_OPERAND (arg0, 0),
3041 TREE_OPERAND (arg1, 1), flags)
3042 && operand_equal_p (TREE_OPERAND (arg0, 1),
3043 TREE_OPERAND (arg1, 0), flags);
3046 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3048 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3049 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3051 else if (flags & OEP_ADDRESS_OF)
3053 /* If we are interested in comparing addresses ignore
3054 MEM_REF wrappings of the base that can appear just for
3055 TBAA reasons. */
3056 if (TREE_CODE (arg0) == MEM_REF
3057 && DECL_P (arg1)
3058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3060 && integer_zerop (TREE_OPERAND (arg0, 1)))
3061 return true;
3062 else if (TREE_CODE (arg1) == MEM_REF
3063 && DECL_P (arg0)
3064 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3066 && integer_zerop (TREE_OPERAND (arg1, 1)))
3067 return true;
3068 return false;
3070 else
3071 return false;
3074 /* When not checking adddresses, this is needed for conversions and for
3075 COMPONENT_REF. Might as well play it safe and always test this. */
3076 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3077 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3078 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3079 && !(flags & OEP_ADDRESS_OF)))
3080 return false;
3082 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3083 We don't care about side effects in that case because the SAVE_EXPR
3084 takes care of that for us. In all other cases, two expressions are
3085 equal if they have no side effects. If we have two identical
3086 expressions with side effects that should be treated the same due
3087 to the only side effects being identical SAVE_EXPR's, that will
3088 be detected in the recursive calls below.
3089 If we are taking an invariant address of two identical objects
3090 they are necessarily equal as well. */
3091 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3092 && (TREE_CODE (arg0) == SAVE_EXPR
3093 || (flags & OEP_MATCH_SIDE_EFFECTS)
3094 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3095 return true;
3097 /* Next handle constant cases, those for which we can return 1 even
3098 if ONLY_CONST is set. */
3099 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3100 switch (TREE_CODE (arg0))
3102 case INTEGER_CST:
3103 return tree_int_cst_equal (arg0, arg1);
3105 case FIXED_CST:
3106 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3107 TREE_FIXED_CST (arg1));
3109 case REAL_CST:
3110 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3111 return true;
3113 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3115 /* If we do not distinguish between signed and unsigned zero,
3116 consider them equal. */
3117 if (real_zerop (arg0) && real_zerop (arg1))
3118 return true;
3120 return false;
3122 case VECTOR_CST:
3124 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3125 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3126 return false;
3128 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3129 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3130 return false;
3132 unsigned int count = vector_cst_encoded_nelts (arg0);
3133 for (unsigned int i = 0; i < count; ++i)
3134 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3135 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3136 return false;
3137 return true;
3140 case COMPLEX_CST:
3141 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3142 flags)
3143 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3144 flags));
3146 case STRING_CST:
3147 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3148 && ! memcmp (TREE_STRING_POINTER (arg0),
3149 TREE_STRING_POINTER (arg1),
3150 TREE_STRING_LENGTH (arg0)));
3152 case ADDR_EXPR:
3153 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3154 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3155 flags | OEP_ADDRESS_OF
3156 | OEP_MATCH_SIDE_EFFECTS);
3157 case CONSTRUCTOR:
3158 /* In GIMPLE empty constructors are allowed in initializers of
3159 aggregates. */
3160 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3161 default:
3162 break;
3165 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3166 two instances of undefined behavior will give identical results. */
3167 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3168 return false;
3170 /* Define macros to test an operand from arg0 and arg1 for equality and a
3171 variant that allows null and views null as being different from any
3172 non-null value. In the latter case, if either is null, the both
3173 must be; otherwise, do the normal comparison. */
3174 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3175 TREE_OPERAND (arg1, N), flags)
3177 #define OP_SAME_WITH_NULL(N) \
3178 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3179 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3181 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3183 case tcc_unary:
3184 /* Two conversions are equal only if signedness and modes match. */
3185 switch (TREE_CODE (arg0))
3187 CASE_CONVERT:
3188 case FIX_TRUNC_EXPR:
3189 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3190 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3191 return false;
3192 break;
3193 default:
3194 break;
3197 return OP_SAME (0);
3200 case tcc_comparison:
3201 case tcc_binary:
3202 if (OP_SAME (0) && OP_SAME (1))
3203 return true;
3205 /* For commutative ops, allow the other order. */
3206 return (commutative_tree_code (TREE_CODE (arg0))
3207 && operand_equal_p (TREE_OPERAND (arg0, 0),
3208 TREE_OPERAND (arg1, 1), flags)
3209 && operand_equal_p (TREE_OPERAND (arg0, 1),
3210 TREE_OPERAND (arg1, 0), flags));
3212 case tcc_reference:
3213 /* If either of the pointer (or reference) expressions we are
3214 dereferencing contain a side effect, these cannot be equal,
3215 but their addresses can be. */
3216 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3217 && (TREE_SIDE_EFFECTS (arg0)
3218 || TREE_SIDE_EFFECTS (arg1)))
3219 return false;
3221 switch (TREE_CODE (arg0))
3223 case INDIRECT_REF:
3224 if (!(flags & OEP_ADDRESS_OF))
3226 if (TYPE_ALIGN (TREE_TYPE (arg0))
3227 != TYPE_ALIGN (TREE_TYPE (arg1)))
3228 return false;
3229 /* Verify that the access types are compatible. */
3230 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3231 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3232 return false;
3234 flags &= ~OEP_ADDRESS_OF;
3235 return OP_SAME (0);
3237 case IMAGPART_EXPR:
3238 /* Require the same offset. */
3239 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3240 TYPE_SIZE (TREE_TYPE (arg1)),
3241 flags & ~OEP_ADDRESS_OF))
3242 return false;
3244 /* Fallthru. */
3245 case REALPART_EXPR:
3246 case VIEW_CONVERT_EXPR:
3247 return OP_SAME (0);
3249 case TARGET_MEM_REF:
3250 case MEM_REF:
3251 if (!(flags & OEP_ADDRESS_OF))
3253 /* Require equal access sizes */
3254 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3255 && (!TYPE_SIZE (TREE_TYPE (arg0))
3256 || !TYPE_SIZE (TREE_TYPE (arg1))
3257 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3258 TYPE_SIZE (TREE_TYPE (arg1)),
3259 flags)))
3260 return false;
3261 /* Verify that access happens in similar types. */
3262 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3263 return false;
3264 /* Verify that accesses are TBAA compatible. */
3265 if (!alias_ptr_types_compatible_p
3266 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3267 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3268 || (MR_DEPENDENCE_CLIQUE (arg0)
3269 != MR_DEPENDENCE_CLIQUE (arg1))
3270 || (MR_DEPENDENCE_BASE (arg0)
3271 != MR_DEPENDENCE_BASE (arg1)))
3272 return false;
3273 /* Verify that alignment is compatible. */
3274 if (TYPE_ALIGN (TREE_TYPE (arg0))
3275 != TYPE_ALIGN (TREE_TYPE (arg1)))
3276 return false;
3278 flags &= ~OEP_ADDRESS_OF;
3279 return (OP_SAME (0) && OP_SAME (1)
3280 /* TARGET_MEM_REF require equal extra operands. */
3281 && (TREE_CODE (arg0) != TARGET_MEM_REF
3282 || (OP_SAME_WITH_NULL (2)
3283 && OP_SAME_WITH_NULL (3)
3284 && OP_SAME_WITH_NULL (4))));
3286 case ARRAY_REF:
3287 case ARRAY_RANGE_REF:
3288 if (!OP_SAME (0))
3289 return false;
3290 flags &= ~OEP_ADDRESS_OF;
3291 /* Compare the array index by value if it is constant first as we
3292 may have different types but same value here. */
3293 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3294 TREE_OPERAND (arg1, 1))
3295 || OP_SAME (1))
3296 && OP_SAME_WITH_NULL (2)
3297 && OP_SAME_WITH_NULL (3)
3298 /* Compare low bound and element size as with OEP_ADDRESS_OF
3299 we have to account for the offset of the ref. */
3300 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3301 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3302 || (operand_equal_p (array_ref_low_bound
3303 (CONST_CAST_TREE (arg0)),
3304 array_ref_low_bound
3305 (CONST_CAST_TREE (arg1)), flags)
3306 && operand_equal_p (array_ref_element_size
3307 (CONST_CAST_TREE (arg0)),
3308 array_ref_element_size
3309 (CONST_CAST_TREE (arg1)),
3310 flags))));
3312 case COMPONENT_REF:
3313 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3314 may be NULL when we're called to compare MEM_EXPRs. */
3315 if (!OP_SAME_WITH_NULL (0))
3316 return false;
3317 /* Most of time we only need to compare FIELD_DECLs for equality.
3318 However when determining address look into actual offsets.
3319 These may match for unions and unshared record types. */
3320 flags &= ~OEP_ADDRESS_OF;
3321 if (!OP_SAME (1))
3323 if (flags & OEP_ADDRESS_OF)
3325 if (TREE_OPERAND (arg0, 2)
3326 || TREE_OPERAND (arg1, 2))
3327 return OP_SAME_WITH_NULL (2);
3328 tree field0 = TREE_OPERAND (arg0, 1);
3329 tree field1 = TREE_OPERAND (arg1, 1);
3331 if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3332 DECL_FIELD_OFFSET (field1), flags)
3333 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3334 DECL_FIELD_BIT_OFFSET (field1),
3335 flags))
3336 return false;
3338 else
3339 return false;
3341 return OP_SAME_WITH_NULL (2);
3343 case BIT_FIELD_REF:
3344 if (!OP_SAME (0))
3345 return false;
3346 flags &= ~OEP_ADDRESS_OF;
3347 return OP_SAME (1) && OP_SAME (2);
3349 /* Virtual table call. */
3350 case OBJ_TYPE_REF:
3352 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3353 OBJ_TYPE_REF_EXPR (arg1), flags))
3354 return false;
3355 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3356 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3357 return false;
3358 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3359 OBJ_TYPE_REF_OBJECT (arg1), flags))
3360 return false;
3361 if (!types_same_for_odr (obj_type_ref_class (arg0),
3362 obj_type_ref_class (arg1)))
3363 return false;
3364 return true;
3367 default:
3368 return false;
3371 case tcc_expression:
3372 switch (TREE_CODE (arg0))
3374 case ADDR_EXPR:
3375 /* Be sure we pass right ADDRESS_OF flag. */
3376 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3377 return operand_equal_p (TREE_OPERAND (arg0, 0),
3378 TREE_OPERAND (arg1, 0),
3379 flags | OEP_ADDRESS_OF);
3381 case TRUTH_NOT_EXPR:
3382 return OP_SAME (0);
3384 case TRUTH_ANDIF_EXPR:
3385 case TRUTH_ORIF_EXPR:
3386 return OP_SAME (0) && OP_SAME (1);
3388 case WIDEN_MULT_PLUS_EXPR:
3389 case WIDEN_MULT_MINUS_EXPR:
3390 if (!OP_SAME (2))
3391 return false;
3392 /* The multiplcation operands are commutative. */
3393 /* FALLTHRU */
3395 case TRUTH_AND_EXPR:
3396 case TRUTH_OR_EXPR:
3397 case TRUTH_XOR_EXPR:
3398 if (OP_SAME (0) && OP_SAME (1))
3399 return true;
3401 /* Otherwise take into account this is a commutative operation. */
3402 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3403 TREE_OPERAND (arg1, 1), flags)
3404 && operand_equal_p (TREE_OPERAND (arg0, 1),
3405 TREE_OPERAND (arg1, 0), flags));
3407 case COND_EXPR:
3408 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3409 return false;
3410 flags &= ~OEP_ADDRESS_OF;
3411 return OP_SAME (0);
3413 case BIT_INSERT_EXPR:
3414 /* BIT_INSERT_EXPR has an implict operand as the type precision
3415 of op1. Need to check to make sure they are the same. */
3416 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3417 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3418 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3419 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3420 return false;
3421 /* FALLTHRU */
3423 case VEC_COND_EXPR:
3424 case DOT_PROD_EXPR:
3425 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3427 case MODIFY_EXPR:
3428 case INIT_EXPR:
3429 case COMPOUND_EXPR:
3430 case PREDECREMENT_EXPR:
3431 case PREINCREMENT_EXPR:
3432 case POSTDECREMENT_EXPR:
3433 case POSTINCREMENT_EXPR:
3434 if (flags & OEP_LEXICOGRAPHIC)
3435 return OP_SAME (0) && OP_SAME (1);
3436 return false;
3438 case CLEANUP_POINT_EXPR:
3439 case EXPR_STMT:
3440 case SAVE_EXPR:
3441 if (flags & OEP_LEXICOGRAPHIC)
3442 return OP_SAME (0);
3443 return false;
3445 default:
3446 return false;
3449 case tcc_vl_exp:
3450 switch (TREE_CODE (arg0))
3452 case CALL_EXPR:
3453 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3454 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3455 /* If not both CALL_EXPRs are either internal or normal function
3456 functions, then they are not equal. */
3457 return false;
3458 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3460 /* If the CALL_EXPRs call different internal functions, then they
3461 are not equal. */
3462 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3463 return false;
3465 else
3467 /* If the CALL_EXPRs call different functions, then they are not
3468 equal. */
3469 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3470 flags))
3471 return false;
3474 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3476 unsigned int cef = call_expr_flags (arg0);
3477 if (flags & OEP_PURE_SAME)
3478 cef &= ECF_CONST | ECF_PURE;
3479 else
3480 cef &= ECF_CONST;
3481 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3482 return false;
3485 /* Now see if all the arguments are the same. */
3487 const_call_expr_arg_iterator iter0, iter1;
3488 const_tree a0, a1;
3489 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3490 a1 = first_const_call_expr_arg (arg1, &iter1);
3491 a0 && a1;
3492 a0 = next_const_call_expr_arg (&iter0),
3493 a1 = next_const_call_expr_arg (&iter1))
3494 if (! operand_equal_p (a0, a1, flags))
3495 return false;
3497 /* If we get here and both argument lists are exhausted
3498 then the CALL_EXPRs are equal. */
3499 return ! (a0 || a1);
3501 default:
3502 return false;
3505 case tcc_declaration:
3506 /* Consider __builtin_sqrt equal to sqrt. */
3507 return (TREE_CODE (arg0) == FUNCTION_DECL
3508 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3509 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3510 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3511 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3513 case tcc_exceptional:
3514 if (TREE_CODE (arg0) == CONSTRUCTOR)
3516 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3517 return false;
3519 /* In GIMPLE constructors are used only to build vectors from
3520 elements. Individual elements in the constructor must be
3521 indexed in increasing order and form an initial sequence.
3523 We make no effort to compare constructors in generic.
3524 (see sem_variable::equals in ipa-icf which can do so for
3525 constants). */
3526 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3527 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3528 return false;
3530 /* Be sure that vectors constructed have the same representation.
3531 We only tested element precision and modes to match.
3532 Vectors may be BLKmode and thus also check that the number of
3533 parts match. */
3534 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3535 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3536 return false;
3538 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3539 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3540 unsigned int len = vec_safe_length (v0);
3542 if (len != vec_safe_length (v1))
3543 return false;
3545 for (unsigned int i = 0; i < len; i++)
3547 constructor_elt *c0 = &(*v0)[i];
3548 constructor_elt *c1 = &(*v1)[i];
3550 if (!operand_equal_p (c0->value, c1->value, flags)
3551 /* In GIMPLE the indexes can be either NULL or matching i.
3552 Double check this so we won't get false
3553 positives for GENERIC. */
3554 || (c0->index
3555 && (TREE_CODE (c0->index) != INTEGER_CST
3556 || compare_tree_int (c0->index, i)))
3557 || (c1->index
3558 && (TREE_CODE (c1->index) != INTEGER_CST
3559 || compare_tree_int (c1->index, i))))
3560 return false;
3562 return true;
3564 else if (TREE_CODE (arg0) == STATEMENT_LIST
3565 && (flags & OEP_LEXICOGRAPHIC))
3567 /* Compare the STATEMENT_LISTs. */
3568 tree_stmt_iterator tsi1, tsi2;
3569 tree body1 = CONST_CAST_TREE (arg0);
3570 tree body2 = CONST_CAST_TREE (arg1);
3571 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3572 tsi_next (&tsi1), tsi_next (&tsi2))
3574 /* The lists don't have the same number of statements. */
3575 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3576 return false;
3577 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3578 return true;
3579 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3580 flags & (OEP_LEXICOGRAPHIC
3581 | OEP_NO_HASH_CHECK)))
3582 return false;
3585 return false;
3587 case tcc_statement:
3588 switch (TREE_CODE (arg0))
3590 case RETURN_EXPR:
3591 if (flags & OEP_LEXICOGRAPHIC)
3592 return OP_SAME_WITH_NULL (0);
3593 return false;
3594 case DEBUG_BEGIN_STMT:
3595 if (flags & OEP_LEXICOGRAPHIC)
3596 return true;
3597 return false;
3598 default:
3599 return false;
3602 default:
3603 return false;
3606 #undef OP_SAME
3607 #undef OP_SAME_WITH_NULL
3610 /* Generate a hash value for an expression. This can be used iteratively
3611 by passing a previous result as the HSTATE argument. */
3613 void
3614 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3615 unsigned int flags)
3617 int i;
3618 enum tree_code code;
3619 enum tree_code_class tclass;
3621 if (t == NULL_TREE || t == error_mark_node)
3623 hstate.merge_hash (0);
3624 return;
3627 STRIP_ANY_LOCATION_WRAPPER (t);
3629 if (!(flags & OEP_ADDRESS_OF))
3630 STRIP_NOPS (t);
3632 code = TREE_CODE (t);
3634 switch (code)
3636 /* Alas, constants aren't shared, so we can't rely on pointer
3637 identity. */
3638 case VOID_CST:
3639 hstate.merge_hash (0);
3640 return;
3641 case INTEGER_CST:
3642 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3643 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3644 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3645 return;
3646 case REAL_CST:
3648 unsigned int val2;
3649 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3650 val2 = rvc_zero;
3651 else
3652 val2 = real_hash (TREE_REAL_CST_PTR (t));
3653 hstate.merge_hash (val2);
3654 return;
3656 case FIXED_CST:
3658 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3659 hstate.merge_hash (val2);
3660 return;
3662 case STRING_CST:
3663 hstate.add ((const void *) TREE_STRING_POINTER (t),
3664 TREE_STRING_LENGTH (t));
3665 return;
3666 case COMPLEX_CST:
3667 hash_operand (TREE_REALPART (t), hstate, flags);
3668 hash_operand (TREE_IMAGPART (t), hstate, flags);
3669 return;
3670 case VECTOR_CST:
3672 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3673 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3674 unsigned int count = vector_cst_encoded_nelts (t);
3675 for (unsigned int i = 0; i < count; ++i)
3676 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3677 return;
3679 case SSA_NAME:
3680 /* We can just compare by pointer. */
3681 hstate.add_hwi (SSA_NAME_VERSION (t));
3682 return;
3683 case PLACEHOLDER_EXPR:
3684 /* The node itself doesn't matter. */
3685 return;
3686 case BLOCK:
3687 case OMP_CLAUSE:
3688 /* Ignore. */
3689 return;
3690 case TREE_LIST:
3691 /* A list of expressions, for a CALL_EXPR or as the elements of a
3692 VECTOR_CST. */
3693 for (; t; t = TREE_CHAIN (t))
3694 hash_operand (TREE_VALUE (t), hstate, flags);
3695 return;
3696 case CONSTRUCTOR:
3698 unsigned HOST_WIDE_INT idx;
3699 tree field, value;
3700 flags &= ~OEP_ADDRESS_OF;
3701 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3702 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3704 /* In GIMPLE the indexes can be either NULL or matching i. */
3705 if (field == NULL_TREE)
3706 field = bitsize_int (idx);
3707 hash_operand (field, hstate, flags);
3708 hash_operand (value, hstate, flags);
3710 return;
3712 case STATEMENT_LIST:
3714 tree_stmt_iterator i;
3715 for (i = tsi_start (CONST_CAST_TREE (t));
3716 !tsi_end_p (i); tsi_next (&i))
3717 hash_operand (tsi_stmt (i), hstate, flags);
3718 return;
3720 case TREE_VEC:
3721 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3722 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3723 return;
3724 case IDENTIFIER_NODE:
3725 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3726 return;
3727 case FUNCTION_DECL:
3728 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3729 Otherwise nodes that compare equal according to operand_equal_p might
3730 get different hash codes. However, don't do this for machine specific
3731 or front end builtins, since the function code is overloaded in those
3732 cases. */
3733 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3734 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3736 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3737 code = TREE_CODE (t);
3739 /* FALL THROUGH */
3740 default:
3741 if (POLY_INT_CST_P (t))
3743 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3744 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3745 return;
3747 tclass = TREE_CODE_CLASS (code);
3749 if (tclass == tcc_declaration)
3751 /* DECL's have a unique ID */
3752 hstate.add_hwi (DECL_UID (t));
3754 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3756 /* For comparisons that can be swapped, use the lower
3757 tree code. */
3758 enum tree_code ccode = swap_tree_comparison (code);
3759 if (code < ccode)
3760 ccode = code;
3761 hstate.add_object (ccode);
3762 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3763 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3765 else if (CONVERT_EXPR_CODE_P (code))
3767 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3768 operand_equal_p. */
3769 enum tree_code ccode = NOP_EXPR;
3770 hstate.add_object (ccode);
3772 /* Don't hash the type, that can lead to having nodes which
3773 compare equal according to operand_equal_p, but which
3774 have different hash codes. Make sure to include signedness
3775 in the hash computation. */
3776 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3777 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3779 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3780 else if (code == MEM_REF
3781 && (flags & OEP_ADDRESS_OF) != 0
3782 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3783 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3784 && integer_zerop (TREE_OPERAND (t, 1)))
3785 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3786 hstate, flags);
3787 /* Don't ICE on FE specific trees, or their arguments etc.
3788 during operand_equal_p hash verification. */
3789 else if (!IS_EXPR_CODE_CLASS (tclass))
3790 gcc_assert (flags & OEP_HASH_CHECK);
3791 else
3793 unsigned int sflags = flags;
3795 hstate.add_object (code);
3797 switch (code)
3799 case ADDR_EXPR:
3800 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3801 flags |= OEP_ADDRESS_OF;
3802 sflags = flags;
3803 break;
3805 case INDIRECT_REF:
3806 case MEM_REF:
3807 case TARGET_MEM_REF:
3808 flags &= ~OEP_ADDRESS_OF;
3809 sflags = flags;
3810 break;
3812 case COMPONENT_REF:
3813 if (sflags & OEP_ADDRESS_OF)
3815 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3816 if (TREE_OPERAND (t, 2))
3817 hash_operand (TREE_OPERAND (t, 2), hstate,
3818 flags & ~OEP_ADDRESS_OF);
3819 else
3821 tree field = TREE_OPERAND (t, 1);
3822 hash_operand (DECL_FIELD_OFFSET (field),
3823 hstate, flags & ~OEP_ADDRESS_OF);
3824 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3825 hstate, flags & ~OEP_ADDRESS_OF);
3827 return;
3829 break;
3830 case ARRAY_REF:
3831 case ARRAY_RANGE_REF:
3832 case BIT_FIELD_REF:
3833 sflags &= ~OEP_ADDRESS_OF;
3834 break;
3836 case COND_EXPR:
3837 flags &= ~OEP_ADDRESS_OF;
3838 break;
3840 case WIDEN_MULT_PLUS_EXPR:
3841 case WIDEN_MULT_MINUS_EXPR:
3843 /* The multiplication operands are commutative. */
3844 inchash::hash one, two;
3845 hash_operand (TREE_OPERAND (t, 0), one, flags);
3846 hash_operand (TREE_OPERAND (t, 1), two, flags);
3847 hstate.add_commutative (one, two);
3848 hash_operand (TREE_OPERAND (t, 2), two, flags);
3849 return;
3852 case CALL_EXPR:
3853 if (CALL_EXPR_FN (t) == NULL_TREE)
3854 hstate.add_int (CALL_EXPR_IFN (t));
3855 break;
3857 case TARGET_EXPR:
3858 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3859 Usually different TARGET_EXPRs just should use
3860 different temporaries in their slots. */
3861 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3862 return;
3864 /* Virtual table call. */
3865 case OBJ_TYPE_REF:
3866 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3867 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3868 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3869 return;
3870 default:
3871 break;
3874 /* Don't hash the type, that can lead to having nodes which
3875 compare equal according to operand_equal_p, but which
3876 have different hash codes. */
3877 if (code == NON_LVALUE_EXPR)
3879 /* Make sure to include signness in the hash computation. */
3880 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3881 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3884 else if (commutative_tree_code (code))
3886 /* It's a commutative expression. We want to hash it the same
3887 however it appears. We do this by first hashing both operands
3888 and then rehashing based on the order of their independent
3889 hashes. */
3890 inchash::hash one, two;
3891 hash_operand (TREE_OPERAND (t, 0), one, flags);
3892 hash_operand (TREE_OPERAND (t, 1), two, flags);
3893 hstate.add_commutative (one, two);
3895 else
3896 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3897 hash_operand (TREE_OPERAND (t, i), hstate,
3898 i == 0 ? flags : sflags);
3900 return;
3904 bool
3905 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3906 unsigned int flags, bool *ret)
3908 /* When checking, verify at the outermost operand_equal_p call that
3909 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3910 hash value. */
3911 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3913 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3915 if (arg0 != arg1)
3917 inchash::hash hstate0 (0), hstate1 (0);
3918 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3919 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3920 hashval_t h0 = hstate0.end ();
3921 hashval_t h1 = hstate1.end ();
3922 gcc_assert (h0 == h1);
3924 *ret = true;
3926 else
3927 *ret = false;
3929 return true;
3932 return false;
3936 static operand_compare default_compare_instance;
3938 /* Conveinece wrapper around operand_compare class because usually we do
3939 not need to play with the valueizer. */
3941 bool
3942 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3944 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3947 namespace inchash
3950 /* Generate a hash value for an expression. This can be used iteratively
3951 by passing a previous result as the HSTATE argument.
3953 This function is intended to produce the same hash for expressions which
3954 would compare equal using operand_equal_p. */
3955 void
3956 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3958 default_compare_instance.hash_operand (t, hstate, flags);
3963 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3964 with a different signedness or a narrower precision. */
3966 static bool
3967 operand_equal_for_comparison_p (tree arg0, tree arg1)
3969 if (operand_equal_p (arg0, arg1, 0))
3970 return true;
3972 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3973 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3974 return false;
3976 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3977 and see if the inner values are the same. This removes any
3978 signedness comparison, which doesn't matter here. */
3979 tree op0 = arg0;
3980 tree op1 = arg1;
3981 STRIP_NOPS (op0);
3982 STRIP_NOPS (op1);
3983 if (operand_equal_p (op0, op1, 0))
3984 return true;
3986 /* Discard a single widening conversion from ARG1 and see if the inner
3987 value is the same as ARG0. */
3988 if (CONVERT_EXPR_P (arg1)
3989 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3990 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3991 < TYPE_PRECISION (TREE_TYPE (arg1))
3992 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3993 return true;
3995 return false;
3998 /* See if ARG is an expression that is either a comparison or is performing
3999 arithmetic on comparisons. The comparisons must only be comparing
4000 two different values, which will be stored in *CVAL1 and *CVAL2; if
4001 they are nonzero it means that some operands have already been found.
4002 No variables may be used anywhere else in the expression except in the
4003 comparisons.
4005 If this is true, return 1. Otherwise, return zero. */
4007 static bool
4008 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4010 enum tree_code code = TREE_CODE (arg);
4011 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4013 /* We can handle some of the tcc_expression cases here. */
4014 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4015 tclass = tcc_unary;
4016 else if (tclass == tcc_expression
4017 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4018 || code == COMPOUND_EXPR))
4019 tclass = tcc_binary;
4021 switch (tclass)
4023 case tcc_unary:
4024 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4026 case tcc_binary:
4027 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4028 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4030 case tcc_constant:
4031 return true;
4033 case tcc_expression:
4034 if (code == COND_EXPR)
4035 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4036 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4037 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4038 return false;
4040 case tcc_comparison:
4041 /* First see if we can handle the first operand, then the second. For
4042 the second operand, we know *CVAL1 can't be zero. It must be that
4043 one side of the comparison is each of the values; test for the
4044 case where this isn't true by failing if the two operands
4045 are the same. */
4047 if (operand_equal_p (TREE_OPERAND (arg, 0),
4048 TREE_OPERAND (arg, 1), 0))
4049 return false;
4051 if (*cval1 == 0)
4052 *cval1 = TREE_OPERAND (arg, 0);
4053 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4055 else if (*cval2 == 0)
4056 *cval2 = TREE_OPERAND (arg, 0);
4057 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4059 else
4060 return false;
4062 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4064 else if (*cval2 == 0)
4065 *cval2 = TREE_OPERAND (arg, 1);
4066 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4068 else
4069 return false;
4071 return true;
4073 default:
4074 return false;
4078 /* ARG is a tree that is known to contain just arithmetic operations and
4079 comparisons. Evaluate the operations in the tree substituting NEW0 for
4080 any occurrence of OLD0 as an operand of a comparison and likewise for
4081 NEW1 and OLD1. */
4083 static tree
4084 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4085 tree old1, tree new1)
4087 tree type = TREE_TYPE (arg);
4088 enum tree_code code = TREE_CODE (arg);
4089 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4091 /* We can handle some of the tcc_expression cases here. */
4092 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4093 tclass = tcc_unary;
4094 else if (tclass == tcc_expression
4095 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4096 tclass = tcc_binary;
4098 switch (tclass)
4100 case tcc_unary:
4101 return fold_build1_loc (loc, code, type,
4102 eval_subst (loc, TREE_OPERAND (arg, 0),
4103 old0, new0, old1, new1));
4105 case tcc_binary:
4106 return fold_build2_loc (loc, code, type,
4107 eval_subst (loc, TREE_OPERAND (arg, 0),
4108 old0, new0, old1, new1),
4109 eval_subst (loc, TREE_OPERAND (arg, 1),
4110 old0, new0, old1, new1));
4112 case tcc_expression:
4113 switch (code)
4115 case SAVE_EXPR:
4116 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4117 old1, new1);
4119 case COMPOUND_EXPR:
4120 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4121 old1, new1);
4123 case COND_EXPR:
4124 return fold_build3_loc (loc, code, type,
4125 eval_subst (loc, TREE_OPERAND (arg, 0),
4126 old0, new0, old1, new1),
4127 eval_subst (loc, TREE_OPERAND (arg, 1),
4128 old0, new0, old1, new1),
4129 eval_subst (loc, TREE_OPERAND (arg, 2),
4130 old0, new0, old1, new1));
4131 default:
4132 break;
4134 /* Fall through - ??? */
4136 case tcc_comparison:
4138 tree arg0 = TREE_OPERAND (arg, 0);
4139 tree arg1 = TREE_OPERAND (arg, 1);
4141 /* We need to check both for exact equality and tree equality. The
4142 former will be true if the operand has a side-effect. In that
4143 case, we know the operand occurred exactly once. */
4145 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4146 arg0 = new0;
4147 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4148 arg0 = new1;
4150 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4151 arg1 = new0;
4152 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4153 arg1 = new1;
4155 return fold_build2_loc (loc, code, type, arg0, arg1);
4158 default:
4159 return arg;
4163 /* Return a tree for the case when the result of an expression is RESULT
4164 converted to TYPE and OMITTED was previously an operand of the expression
4165 but is now not needed (e.g., we folded OMITTED * 0).
4167 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4168 the conversion of RESULT to TYPE. */
4170 tree
4171 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4173 tree t = fold_convert_loc (loc, type, result);
4175 /* If the resulting operand is an empty statement, just return the omitted
4176 statement casted to void. */
4177 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4178 return build1_loc (loc, NOP_EXPR, void_type_node,
4179 fold_ignored_result (omitted));
4181 if (TREE_SIDE_EFFECTS (omitted))
4182 return build2_loc (loc, COMPOUND_EXPR, type,
4183 fold_ignored_result (omitted), t);
4185 return non_lvalue_loc (loc, t);
4188 /* Return a tree for the case when the result of an expression is RESULT
4189 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4190 of the expression but are now not needed.
4192 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4193 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4194 evaluated before OMITTED2. Otherwise, if neither has side effects,
4195 just do the conversion of RESULT to TYPE. */
4197 tree
4198 omit_two_operands_loc (location_t loc, tree type, tree result,
4199 tree omitted1, tree omitted2)
4201 tree t = fold_convert_loc (loc, type, result);
4203 if (TREE_SIDE_EFFECTS (omitted2))
4204 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4205 if (TREE_SIDE_EFFECTS (omitted1))
4206 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4208 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4212 /* Return a simplified tree node for the truth-negation of ARG. This
4213 never alters ARG itself. We assume that ARG is an operation that
4214 returns a truth value (0 or 1).
4216 FIXME: one would think we would fold the result, but it causes
4217 problems with the dominator optimizer. */
4219 static tree
4220 fold_truth_not_expr (location_t loc, tree arg)
4222 tree type = TREE_TYPE (arg);
4223 enum tree_code code = TREE_CODE (arg);
4224 location_t loc1, loc2;
4226 /* If this is a comparison, we can simply invert it, except for
4227 floating-point non-equality comparisons, in which case we just
4228 enclose a TRUTH_NOT_EXPR around what we have. */
4230 if (TREE_CODE_CLASS (code) == tcc_comparison)
4232 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4233 if (FLOAT_TYPE_P (op_type)
4234 && flag_trapping_math
4235 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4236 && code != NE_EXPR && code != EQ_EXPR)
4237 return NULL_TREE;
4239 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4240 if (code == ERROR_MARK)
4241 return NULL_TREE;
4243 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4244 TREE_OPERAND (arg, 1));
4245 if (TREE_NO_WARNING (arg))
4246 TREE_NO_WARNING (ret) = 1;
4247 return ret;
4250 switch (code)
4252 case INTEGER_CST:
4253 return constant_boolean_node (integer_zerop (arg), type);
4255 case TRUTH_AND_EXPR:
4256 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4257 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4258 return build2_loc (loc, TRUTH_OR_EXPR, type,
4259 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4260 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4262 case TRUTH_OR_EXPR:
4263 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4264 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4265 return build2_loc (loc, TRUTH_AND_EXPR, type,
4266 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4267 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4269 case TRUTH_XOR_EXPR:
4270 /* Here we can invert either operand. We invert the first operand
4271 unless the second operand is a TRUTH_NOT_EXPR in which case our
4272 result is the XOR of the first operand with the inside of the
4273 negation of the second operand. */
4275 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4276 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4277 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4278 else
4279 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4280 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4281 TREE_OPERAND (arg, 1));
4283 case TRUTH_ANDIF_EXPR:
4284 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4285 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4286 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4287 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4288 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4290 case TRUTH_ORIF_EXPR:
4291 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4292 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4293 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4294 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4295 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4297 case TRUTH_NOT_EXPR:
4298 return TREE_OPERAND (arg, 0);
4300 case COND_EXPR:
4302 tree arg1 = TREE_OPERAND (arg, 1);
4303 tree arg2 = TREE_OPERAND (arg, 2);
4305 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4306 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4308 /* A COND_EXPR may have a throw as one operand, which
4309 then has void type. Just leave void operands
4310 as they are. */
4311 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4312 VOID_TYPE_P (TREE_TYPE (arg1))
4313 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4314 VOID_TYPE_P (TREE_TYPE (arg2))
4315 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4318 case COMPOUND_EXPR:
4319 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4320 return build2_loc (loc, COMPOUND_EXPR, type,
4321 TREE_OPERAND (arg, 0),
4322 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4324 case NON_LVALUE_EXPR:
4325 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4326 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4328 CASE_CONVERT:
4329 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4330 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4332 /* fall through */
4334 case FLOAT_EXPR:
4335 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4336 return build1_loc (loc, TREE_CODE (arg), type,
4337 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4339 case BIT_AND_EXPR:
4340 if (!integer_onep (TREE_OPERAND (arg, 1)))
4341 return NULL_TREE;
4342 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4344 case SAVE_EXPR:
4345 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4347 case CLEANUP_POINT_EXPR:
4348 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4349 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4350 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4352 default:
4353 return NULL_TREE;
4357 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4358 assume that ARG is an operation that returns a truth value (0 or 1
4359 for scalars, 0 or -1 for vectors). Return the folded expression if
4360 folding is successful. Otherwise, return NULL_TREE. */
4362 static tree
4363 fold_invert_truthvalue (location_t loc, tree arg)
4365 tree type = TREE_TYPE (arg);
4366 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4367 ? BIT_NOT_EXPR
4368 : TRUTH_NOT_EXPR,
4369 type, arg);
4372 /* Return a simplified tree node for the truth-negation of ARG. This
4373 never alters ARG itself. We assume that ARG is an operation that
4374 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4376 tree
4377 invert_truthvalue_loc (location_t loc, tree arg)
4379 if (TREE_CODE (arg) == ERROR_MARK)
4380 return arg;
4382 tree type = TREE_TYPE (arg);
4383 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4384 ? BIT_NOT_EXPR
4385 : TRUTH_NOT_EXPR,
4386 type, arg);
4389 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4390 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4391 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4392 is the original memory reference used to preserve the alias set of
4393 the access. */
4395 static tree
4396 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4397 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4398 int unsignedp, int reversep)
4400 tree result, bftype;
4402 /* Attempt not to lose the access path if possible. */
4403 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4405 tree ninner = TREE_OPERAND (orig_inner, 0);
4406 machine_mode nmode;
4407 poly_int64 nbitsize, nbitpos;
4408 tree noffset;
4409 int nunsignedp, nreversep, nvolatilep = 0;
4410 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4411 &noffset, &nmode, &nunsignedp,
4412 &nreversep, &nvolatilep);
4413 if (base == inner
4414 && noffset == NULL_TREE
4415 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4416 && !reversep
4417 && !nreversep
4418 && !nvolatilep)
4420 inner = ninner;
4421 bitpos -= nbitpos;
4425 alias_set_type iset = get_alias_set (orig_inner);
4426 if (iset == 0 && get_alias_set (inner) != iset)
4427 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4428 build_fold_addr_expr (inner),
4429 build_int_cst (ptr_type_node, 0));
4431 if (known_eq (bitpos, 0) && !reversep)
4433 tree size = TYPE_SIZE (TREE_TYPE (inner));
4434 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4435 || POINTER_TYPE_P (TREE_TYPE (inner)))
4436 && tree_fits_shwi_p (size)
4437 && tree_to_shwi (size) == bitsize)
4438 return fold_convert_loc (loc, type, inner);
4441 bftype = type;
4442 if (TYPE_PRECISION (bftype) != bitsize
4443 || TYPE_UNSIGNED (bftype) == !unsignedp)
4444 bftype = build_nonstandard_integer_type (bitsize, 0);
4446 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4447 bitsize_int (bitsize), bitsize_int (bitpos));
4448 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4450 if (bftype != type)
4451 result = fold_convert_loc (loc, type, result);
4453 return result;
4456 /* Optimize a bit-field compare.
4458 There are two cases: First is a compare against a constant and the
4459 second is a comparison of two items where the fields are at the same
4460 bit position relative to the start of a chunk (byte, halfword, word)
4461 large enough to contain it. In these cases we can avoid the shift
4462 implicit in bitfield extractions.
4464 For constants, we emit a compare of the shifted constant with the
4465 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4466 compared. For two fields at the same position, we do the ANDs with the
4467 similar mask and compare the result of the ANDs.
4469 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4470 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4471 are the left and right operands of the comparison, respectively.
4473 If the optimization described above can be done, we return the resulting
4474 tree. Otherwise we return zero. */
4476 static tree
4477 optimize_bit_field_compare (location_t loc, enum tree_code code,
4478 tree compare_type, tree lhs, tree rhs)
4480 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4481 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4482 tree type = TREE_TYPE (lhs);
4483 tree unsigned_type;
4484 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4485 machine_mode lmode, rmode;
4486 scalar_int_mode nmode;
4487 int lunsignedp, runsignedp;
4488 int lreversep, rreversep;
4489 int lvolatilep = 0, rvolatilep = 0;
4490 tree linner, rinner = NULL_TREE;
4491 tree mask;
4492 tree offset;
4494 /* Get all the information about the extractions being done. If the bit size
4495 is the same as the size of the underlying object, we aren't doing an
4496 extraction at all and so can do nothing. We also don't want to
4497 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4498 then will no longer be able to replace it. */
4499 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4500 &lunsignedp, &lreversep, &lvolatilep);
4501 if (linner == lhs
4502 || !known_size_p (plbitsize)
4503 || !plbitsize.is_constant (&lbitsize)
4504 || !plbitpos.is_constant (&lbitpos)
4505 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4506 || offset != 0
4507 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4508 || lvolatilep)
4509 return 0;
4511 if (const_p)
4512 rreversep = lreversep;
4513 else
4515 /* If this is not a constant, we can only do something if bit positions,
4516 sizes, signedness and storage order are the same. */
4517 rinner
4518 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4519 &runsignedp, &rreversep, &rvolatilep);
4521 if (rinner == rhs
4522 || maybe_ne (lbitpos, rbitpos)
4523 || maybe_ne (lbitsize, rbitsize)
4524 || lunsignedp != runsignedp
4525 || lreversep != rreversep
4526 || offset != 0
4527 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4528 || rvolatilep)
4529 return 0;
4532 /* Honor the C++ memory model and mimic what RTL expansion does. */
4533 poly_uint64 bitstart = 0;
4534 poly_uint64 bitend = 0;
4535 if (TREE_CODE (lhs) == COMPONENT_REF)
4537 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4538 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4539 return 0;
4542 /* See if we can find a mode to refer to this field. We should be able to,
4543 but fail if we can't. */
4544 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4545 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4546 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4547 TYPE_ALIGN (TREE_TYPE (rinner))),
4548 BITS_PER_WORD, false, &nmode))
4549 return 0;
4551 /* Set signed and unsigned types of the precision of this mode for the
4552 shifts below. */
4553 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4555 /* Compute the bit position and size for the new reference and our offset
4556 within it. If the new reference is the same size as the original, we
4557 won't optimize anything, so return zero. */
4558 nbitsize = GET_MODE_BITSIZE (nmode);
4559 nbitpos = lbitpos & ~ (nbitsize - 1);
4560 lbitpos -= nbitpos;
4561 if (nbitsize == lbitsize)
4562 return 0;
4564 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4565 lbitpos = nbitsize - lbitsize - lbitpos;
4567 /* Make the mask to be used against the extracted field. */
4568 mask = build_int_cst_type (unsigned_type, -1);
4569 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4570 mask = const_binop (RSHIFT_EXPR, mask,
4571 size_int (nbitsize - lbitsize - lbitpos));
4573 if (! const_p)
4575 if (nbitpos < 0)
4576 return 0;
4578 /* If not comparing with constant, just rework the comparison
4579 and return. */
4580 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4581 nbitsize, nbitpos, 1, lreversep);
4582 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4583 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4584 nbitsize, nbitpos, 1, rreversep);
4585 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4586 return fold_build2_loc (loc, code, compare_type, t1, t2);
4589 /* Otherwise, we are handling the constant case. See if the constant is too
4590 big for the field. Warn and return a tree for 0 (false) if so. We do
4591 this not only for its own sake, but to avoid having to test for this
4592 error case below. If we didn't, we might generate wrong code.
4594 For unsigned fields, the constant shifted right by the field length should
4595 be all zero. For signed fields, the high-order bits should agree with
4596 the sign bit. */
4598 if (lunsignedp)
4600 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4602 warning (0, "comparison is always %d due to width of bit-field",
4603 code == NE_EXPR);
4604 return constant_boolean_node (code == NE_EXPR, compare_type);
4607 else
4609 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4610 if (tem != 0 && tem != -1)
4612 warning (0, "comparison is always %d due to width of bit-field",
4613 code == NE_EXPR);
4614 return constant_boolean_node (code == NE_EXPR, compare_type);
4618 if (nbitpos < 0)
4619 return 0;
4621 /* Single-bit compares should always be against zero. */
4622 if (lbitsize == 1 && ! integer_zerop (rhs))
4624 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4625 rhs = build_int_cst (type, 0);
4628 /* Make a new bitfield reference, shift the constant over the
4629 appropriate number of bits and mask it with the computed mask
4630 (in case this was a signed field). If we changed it, make a new one. */
4631 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4632 nbitsize, nbitpos, 1, lreversep);
4634 rhs = const_binop (BIT_AND_EXPR,
4635 const_binop (LSHIFT_EXPR,
4636 fold_convert_loc (loc, unsigned_type, rhs),
4637 size_int (lbitpos)),
4638 mask);
4640 lhs = build2_loc (loc, code, compare_type,
4641 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4642 return lhs;
4645 /* Subroutine for fold_truth_andor_1: decode a field reference.
4647 If EXP is a comparison reference, we return the innermost reference.
4649 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4650 set to the starting bit number.
4652 If the innermost field can be completely contained in a mode-sized
4653 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4655 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4656 otherwise it is not changed.
4658 *PUNSIGNEDP is set to the signedness of the field.
4660 *PREVERSEP is set to the storage order of the field.
4662 *PMASK is set to the mask used. This is either contained in a
4663 BIT_AND_EXPR or derived from the width of the field.
4665 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4667 Return 0 if this is not a component reference or is one that we can't
4668 do anything with. */
4670 static tree
4671 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4672 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4673 int *punsignedp, int *preversep, int *pvolatilep,
4674 tree *pmask, tree *pand_mask)
4676 tree exp = *exp_;
4677 tree outer_type = 0;
4678 tree and_mask = 0;
4679 tree mask, inner, offset;
4680 tree unsigned_type;
4681 unsigned int precision;
4683 /* All the optimizations using this function assume integer fields.
4684 There are problems with FP fields since the type_for_size call
4685 below can fail for, e.g., XFmode. */
4686 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4687 return NULL_TREE;
4689 /* We are interested in the bare arrangement of bits, so strip everything
4690 that doesn't affect the machine mode. However, record the type of the
4691 outermost expression if it may matter below. */
4692 if (CONVERT_EXPR_P (exp)
4693 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4694 outer_type = TREE_TYPE (exp);
4695 STRIP_NOPS (exp);
4697 if (TREE_CODE (exp) == BIT_AND_EXPR)
4699 and_mask = TREE_OPERAND (exp, 1);
4700 exp = TREE_OPERAND (exp, 0);
4701 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4702 if (TREE_CODE (and_mask) != INTEGER_CST)
4703 return NULL_TREE;
4706 poly_int64 poly_bitsize, poly_bitpos;
4707 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4708 pmode, punsignedp, preversep, pvolatilep);
4709 if ((inner == exp && and_mask == 0)
4710 || !poly_bitsize.is_constant (pbitsize)
4711 || !poly_bitpos.is_constant (pbitpos)
4712 || *pbitsize < 0
4713 || offset != 0
4714 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4715 /* Reject out-of-bound accesses (PR79731). */
4716 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4717 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4718 *pbitpos + *pbitsize) < 0))
4719 return NULL_TREE;
4721 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4722 if (unsigned_type == NULL_TREE)
4723 return NULL_TREE;
4725 *exp_ = exp;
4727 /* If the number of bits in the reference is the same as the bitsize of
4728 the outer type, then the outer type gives the signedness. Otherwise
4729 (in case of a small bitfield) the signedness is unchanged. */
4730 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4731 *punsignedp = TYPE_UNSIGNED (outer_type);
4733 /* Compute the mask to access the bitfield. */
4734 precision = TYPE_PRECISION (unsigned_type);
4736 mask = build_int_cst_type (unsigned_type, -1);
4738 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4739 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4741 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4742 if (and_mask != 0)
4743 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4744 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4746 *pmask = mask;
4747 *pand_mask = and_mask;
4748 return inner;
4751 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4752 bit positions and MASK is SIGNED. */
4754 static bool
4755 all_ones_mask_p (const_tree mask, unsigned int size)
4757 tree type = TREE_TYPE (mask);
4758 unsigned int precision = TYPE_PRECISION (type);
4760 /* If this function returns true when the type of the mask is
4761 UNSIGNED, then there will be errors. In particular see
4762 gcc.c-torture/execute/990326-1.c. There does not appear to be
4763 any documentation paper trail as to why this is so. But the pre
4764 wide-int worked with that restriction and it has been preserved
4765 here. */
4766 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4767 return false;
4769 return wi::mask (size, false, precision) == wi::to_wide (mask);
4772 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4773 represents the sign bit of EXP's type. If EXP represents a sign
4774 or zero extension, also test VAL against the unextended type.
4775 The return value is the (sub)expression whose sign bit is VAL,
4776 or NULL_TREE otherwise. */
4778 tree
4779 sign_bit_p (tree exp, const_tree val)
4781 int width;
4782 tree t;
4784 /* Tree EXP must have an integral type. */
4785 t = TREE_TYPE (exp);
4786 if (! INTEGRAL_TYPE_P (t))
4787 return NULL_TREE;
4789 /* Tree VAL must be an integer constant. */
4790 if (TREE_CODE (val) != INTEGER_CST
4791 || TREE_OVERFLOW (val))
4792 return NULL_TREE;
4794 width = TYPE_PRECISION (t);
4795 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4796 return exp;
4798 /* Handle extension from a narrower type. */
4799 if (TREE_CODE (exp) == NOP_EXPR
4800 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4801 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4803 return NULL_TREE;
4806 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4807 to be evaluated unconditionally. */
4809 static bool
4810 simple_operand_p (const_tree exp)
4812 /* Strip any conversions that don't change the machine mode. */
4813 STRIP_NOPS (exp);
4815 return (CONSTANT_CLASS_P (exp)
4816 || TREE_CODE (exp) == SSA_NAME
4817 || (DECL_P (exp)
4818 && ! TREE_ADDRESSABLE (exp)
4819 && ! TREE_THIS_VOLATILE (exp)
4820 && ! DECL_NONLOCAL (exp)
4821 /* Don't regard global variables as simple. They may be
4822 allocated in ways unknown to the compiler (shared memory,
4823 #pragma weak, etc). */
4824 && ! TREE_PUBLIC (exp)
4825 && ! DECL_EXTERNAL (exp)
4826 /* Weakrefs are not safe to be read, since they can be NULL.
4827 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4828 have DECL_WEAK flag set. */
4829 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4830 /* Loading a static variable is unduly expensive, but global
4831 registers aren't expensive. */
4832 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4835 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4836 to be evaluated unconditionally.
4837 I addition to simple_operand_p, we assume that comparisons, conversions,
4838 and logic-not operations are simple, if their operands are simple, too. */
4840 static bool
4841 simple_operand_p_2 (tree exp)
4843 enum tree_code code;
4845 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4846 return false;
4848 while (CONVERT_EXPR_P (exp))
4849 exp = TREE_OPERAND (exp, 0);
4851 code = TREE_CODE (exp);
4853 if (TREE_CODE_CLASS (code) == tcc_comparison)
4854 return (simple_operand_p (TREE_OPERAND (exp, 0))
4855 && simple_operand_p (TREE_OPERAND (exp, 1)));
4857 if (code == TRUTH_NOT_EXPR)
4858 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4860 return simple_operand_p (exp);
4864 /* The following functions are subroutines to fold_range_test and allow it to
4865 try to change a logical combination of comparisons into a range test.
4867 For example, both
4868 X == 2 || X == 3 || X == 4 || X == 5
4870 X >= 2 && X <= 5
4871 are converted to
4872 (unsigned) (X - 2) <= 3
4874 We describe each set of comparisons as being either inside or outside
4875 a range, using a variable named like IN_P, and then describe the
4876 range with a lower and upper bound. If one of the bounds is omitted,
4877 it represents either the highest or lowest value of the type.
4879 In the comments below, we represent a range by two numbers in brackets
4880 preceded by a "+" to designate being inside that range, or a "-" to
4881 designate being outside that range, so the condition can be inverted by
4882 flipping the prefix. An omitted bound is represented by a "-". For
4883 example, "- [-, 10]" means being outside the range starting at the lowest
4884 possible value and ending at 10, in other words, being greater than 10.
4885 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4886 always false.
4888 We set up things so that the missing bounds are handled in a consistent
4889 manner so neither a missing bound nor "true" and "false" need to be
4890 handled using a special case. */
4892 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4893 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4894 and UPPER1_P are nonzero if the respective argument is an upper bound
4895 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4896 must be specified for a comparison. ARG1 will be converted to ARG0's
4897 type if both are specified. */
4899 static tree
4900 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4901 tree arg1, int upper1_p)
4903 tree tem;
4904 int result;
4905 int sgn0, sgn1;
4907 /* If neither arg represents infinity, do the normal operation.
4908 Else, if not a comparison, return infinity. Else handle the special
4909 comparison rules. Note that most of the cases below won't occur, but
4910 are handled for consistency. */
4912 if (arg0 != 0 && arg1 != 0)
4914 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4915 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4916 STRIP_NOPS (tem);
4917 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4920 if (TREE_CODE_CLASS (code) != tcc_comparison)
4921 return 0;
4923 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4924 for neither. In real maths, we cannot assume open ended ranges are
4925 the same. But, this is computer arithmetic, where numbers are finite.
4926 We can therefore make the transformation of any unbounded range with
4927 the value Z, Z being greater than any representable number. This permits
4928 us to treat unbounded ranges as equal. */
4929 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4930 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4931 switch (code)
4933 case EQ_EXPR:
4934 result = sgn0 == sgn1;
4935 break;
4936 case NE_EXPR:
4937 result = sgn0 != sgn1;
4938 break;
4939 case LT_EXPR:
4940 result = sgn0 < sgn1;
4941 break;
4942 case LE_EXPR:
4943 result = sgn0 <= sgn1;
4944 break;
4945 case GT_EXPR:
4946 result = sgn0 > sgn1;
4947 break;
4948 case GE_EXPR:
4949 result = sgn0 >= sgn1;
4950 break;
4951 default:
4952 gcc_unreachable ();
4955 return constant_boolean_node (result, type);
4958 /* Helper routine for make_range. Perform one step for it, return
4959 new expression if the loop should continue or NULL_TREE if it should
4960 stop. */
4962 tree
4963 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4964 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4965 bool *strict_overflow_p)
4967 tree arg0_type = TREE_TYPE (arg0);
4968 tree n_low, n_high, low = *p_low, high = *p_high;
4969 int in_p = *p_in_p, n_in_p;
4971 switch (code)
4973 case TRUTH_NOT_EXPR:
4974 /* We can only do something if the range is testing for zero. */
4975 if (low == NULL_TREE || high == NULL_TREE
4976 || ! integer_zerop (low) || ! integer_zerop (high))
4977 return NULL_TREE;
4978 *p_in_p = ! in_p;
4979 return arg0;
4981 case EQ_EXPR: case NE_EXPR:
4982 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4983 /* We can only do something if the range is testing for zero
4984 and if the second operand is an integer constant. Note that
4985 saying something is "in" the range we make is done by
4986 complementing IN_P since it will set in the initial case of
4987 being not equal to zero; "out" is leaving it alone. */
4988 if (low == NULL_TREE || high == NULL_TREE
4989 || ! integer_zerop (low) || ! integer_zerop (high)
4990 || TREE_CODE (arg1) != INTEGER_CST)
4991 return NULL_TREE;
4993 switch (code)
4995 case NE_EXPR: /* - [c, c] */
4996 low = high = arg1;
4997 break;
4998 case EQ_EXPR: /* + [c, c] */
4999 in_p = ! in_p, low = high = arg1;
5000 break;
5001 case GT_EXPR: /* - [-, c] */
5002 low = 0, high = arg1;
5003 break;
5004 case GE_EXPR: /* + [c, -] */
5005 in_p = ! in_p, low = arg1, high = 0;
5006 break;
5007 case LT_EXPR: /* - [c, -] */
5008 low = arg1, high = 0;
5009 break;
5010 case LE_EXPR: /* + [-, c] */
5011 in_p = ! in_p, low = 0, high = arg1;
5012 break;
5013 default:
5014 gcc_unreachable ();
5017 /* If this is an unsigned comparison, we also know that EXP is
5018 greater than or equal to zero. We base the range tests we make
5019 on that fact, so we record it here so we can parse existing
5020 range tests. We test arg0_type since often the return type
5021 of, e.g. EQ_EXPR, is boolean. */
5022 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5024 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5025 in_p, low, high, 1,
5026 build_int_cst (arg0_type, 0),
5027 NULL_TREE))
5028 return NULL_TREE;
5030 in_p = n_in_p, low = n_low, high = n_high;
5032 /* If the high bound is missing, but we have a nonzero low
5033 bound, reverse the range so it goes from zero to the low bound
5034 minus 1. */
5035 if (high == 0 && low && ! integer_zerop (low))
5037 in_p = ! in_p;
5038 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5039 build_int_cst (TREE_TYPE (low), 1), 0);
5040 low = build_int_cst (arg0_type, 0);
5044 *p_low = low;
5045 *p_high = high;
5046 *p_in_p = in_p;
5047 return arg0;
5049 case NEGATE_EXPR:
5050 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5051 low and high are non-NULL, then normalize will DTRT. */
5052 if (!TYPE_UNSIGNED (arg0_type)
5053 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5055 if (low == NULL_TREE)
5056 low = TYPE_MIN_VALUE (arg0_type);
5057 if (high == NULL_TREE)
5058 high = TYPE_MAX_VALUE (arg0_type);
5061 /* (-x) IN [a,b] -> x in [-b, -a] */
5062 n_low = range_binop (MINUS_EXPR, exp_type,
5063 build_int_cst (exp_type, 0),
5064 0, high, 1);
5065 n_high = range_binop (MINUS_EXPR, exp_type,
5066 build_int_cst (exp_type, 0),
5067 0, low, 0);
5068 if (n_high != 0 && TREE_OVERFLOW (n_high))
5069 return NULL_TREE;
5070 goto normalize;
5072 case BIT_NOT_EXPR:
5073 /* ~ X -> -X - 1 */
5074 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5075 build_int_cst (exp_type, 1));
5077 case PLUS_EXPR:
5078 case MINUS_EXPR:
5079 if (TREE_CODE (arg1) != INTEGER_CST)
5080 return NULL_TREE;
5082 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5083 move a constant to the other side. */
5084 if (!TYPE_UNSIGNED (arg0_type)
5085 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5086 return NULL_TREE;
5088 /* If EXP is signed, any overflow in the computation is undefined,
5089 so we don't worry about it so long as our computations on
5090 the bounds don't overflow. For unsigned, overflow is defined
5091 and this is exactly the right thing. */
5092 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5093 arg0_type, low, 0, arg1, 0);
5094 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5095 arg0_type, high, 1, arg1, 0);
5096 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5097 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5098 return NULL_TREE;
5100 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5101 *strict_overflow_p = true;
5103 normalize:
5104 /* Check for an unsigned range which has wrapped around the maximum
5105 value thus making n_high < n_low, and normalize it. */
5106 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5108 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5109 build_int_cst (TREE_TYPE (n_high), 1), 0);
5110 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5111 build_int_cst (TREE_TYPE (n_low), 1), 0);
5113 /* If the range is of the form +/- [ x+1, x ], we won't
5114 be able to normalize it. But then, it represents the
5115 whole range or the empty set, so make it
5116 +/- [ -, - ]. */
5117 if (tree_int_cst_equal (n_low, low)
5118 && tree_int_cst_equal (n_high, high))
5119 low = high = 0;
5120 else
5121 in_p = ! in_p;
5123 else
5124 low = n_low, high = n_high;
5126 *p_low = low;
5127 *p_high = high;
5128 *p_in_p = in_p;
5129 return arg0;
5131 CASE_CONVERT:
5132 case NON_LVALUE_EXPR:
5133 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5134 return NULL_TREE;
5136 if (! INTEGRAL_TYPE_P (arg0_type)
5137 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5138 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5139 return NULL_TREE;
5141 n_low = low, n_high = high;
5143 if (n_low != 0)
5144 n_low = fold_convert_loc (loc, arg0_type, n_low);
5146 if (n_high != 0)
5147 n_high = fold_convert_loc (loc, arg0_type, n_high);
5149 /* If we're converting arg0 from an unsigned type, to exp,
5150 a signed type, we will be doing the comparison as unsigned.
5151 The tests above have already verified that LOW and HIGH
5152 are both positive.
5154 So we have to ensure that we will handle large unsigned
5155 values the same way that the current signed bounds treat
5156 negative values. */
5158 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5160 tree high_positive;
5161 tree equiv_type;
5162 /* For fixed-point modes, we need to pass the saturating flag
5163 as the 2nd parameter. */
5164 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5165 equiv_type
5166 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5167 TYPE_SATURATING (arg0_type));
5168 else
5169 equiv_type
5170 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5172 /* A range without an upper bound is, naturally, unbounded.
5173 Since convert would have cropped a very large value, use
5174 the max value for the destination type. */
5175 high_positive
5176 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5177 : TYPE_MAX_VALUE (arg0_type);
5179 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5180 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5181 fold_convert_loc (loc, arg0_type,
5182 high_positive),
5183 build_int_cst (arg0_type, 1));
5185 /* If the low bound is specified, "and" the range with the
5186 range for which the original unsigned value will be
5187 positive. */
5188 if (low != 0)
5190 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5191 1, fold_convert_loc (loc, arg0_type,
5192 integer_zero_node),
5193 high_positive))
5194 return NULL_TREE;
5196 in_p = (n_in_p == in_p);
5198 else
5200 /* Otherwise, "or" the range with the range of the input
5201 that will be interpreted as negative. */
5202 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5203 1, fold_convert_loc (loc, arg0_type,
5204 integer_zero_node),
5205 high_positive))
5206 return NULL_TREE;
5208 in_p = (in_p != n_in_p);
5212 *p_low = n_low;
5213 *p_high = n_high;
5214 *p_in_p = in_p;
5215 return arg0;
5217 default:
5218 return NULL_TREE;
5222 /* Given EXP, a logical expression, set the range it is testing into
5223 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5224 actually being tested. *PLOW and *PHIGH will be made of the same
5225 type as the returned expression. If EXP is not a comparison, we
5226 will most likely not be returning a useful value and range. Set
5227 *STRICT_OVERFLOW_P to true if the return value is only valid
5228 because signed overflow is undefined; otherwise, do not change
5229 *STRICT_OVERFLOW_P. */
5231 tree
5232 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5233 bool *strict_overflow_p)
5235 enum tree_code code;
5236 tree arg0, arg1 = NULL_TREE;
5237 tree exp_type, nexp;
5238 int in_p;
5239 tree low, high;
5240 location_t loc = EXPR_LOCATION (exp);
5242 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5243 and see if we can refine the range. Some of the cases below may not
5244 happen, but it doesn't seem worth worrying about this. We "continue"
5245 the outer loop when we've changed something; otherwise we "break"
5246 the switch, which will "break" the while. */
5248 in_p = 0;
5249 low = high = build_int_cst (TREE_TYPE (exp), 0);
5251 while (1)
5253 code = TREE_CODE (exp);
5254 exp_type = TREE_TYPE (exp);
5255 arg0 = NULL_TREE;
5257 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5259 if (TREE_OPERAND_LENGTH (exp) > 0)
5260 arg0 = TREE_OPERAND (exp, 0);
5261 if (TREE_CODE_CLASS (code) == tcc_binary
5262 || TREE_CODE_CLASS (code) == tcc_comparison
5263 || (TREE_CODE_CLASS (code) == tcc_expression
5264 && TREE_OPERAND_LENGTH (exp) > 1))
5265 arg1 = TREE_OPERAND (exp, 1);
5267 if (arg0 == NULL_TREE)
5268 break;
5270 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5271 &high, &in_p, strict_overflow_p);
5272 if (nexp == NULL_TREE)
5273 break;
5274 exp = nexp;
5277 /* If EXP is a constant, we can evaluate whether this is true or false. */
5278 if (TREE_CODE (exp) == INTEGER_CST)
5280 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5281 exp, 0, low, 0))
5282 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5283 exp, 1, high, 1)));
5284 low = high = 0;
5285 exp = 0;
5288 *pin_p = in_p, *plow = low, *phigh = high;
5289 return exp;
5292 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5293 a bitwise check i.e. when
5294 LOW == 0xXX...X00...0
5295 HIGH == 0xXX...X11...1
5296 Return corresponding mask in MASK and stem in VALUE. */
5298 static bool
5299 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5300 tree *value)
5302 if (TREE_CODE (low) != INTEGER_CST
5303 || TREE_CODE (high) != INTEGER_CST)
5304 return false;
5306 unsigned prec = TYPE_PRECISION (type);
5307 wide_int lo = wi::to_wide (low, prec);
5308 wide_int hi = wi::to_wide (high, prec);
5310 wide_int end_mask = lo ^ hi;
5311 if ((end_mask & (end_mask + 1)) != 0
5312 || (lo & end_mask) != 0)
5313 return false;
5315 wide_int stem_mask = ~end_mask;
5316 wide_int stem = lo & stem_mask;
5317 if (stem != (hi & stem_mask))
5318 return false;
5320 *mask = wide_int_to_tree (type, stem_mask);
5321 *value = wide_int_to_tree (type, stem);
5323 return true;
5326 /* Helper routine for build_range_check and match.pd. Return the type to
5327 perform the check or NULL if it shouldn't be optimized. */
5329 tree
5330 range_check_type (tree etype)
5332 /* First make sure that arithmetics in this type is valid, then make sure
5333 that it wraps around. */
5334 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5335 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5337 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5339 tree utype, minv, maxv;
5341 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5342 for the type in question, as we rely on this here. */
5343 utype = unsigned_type_for (etype);
5344 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5345 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5346 build_int_cst (TREE_TYPE (maxv), 1), 1);
5347 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5349 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5350 minv, 1, maxv, 1)))
5351 etype = utype;
5352 else
5353 return NULL_TREE;
5355 else if (POINTER_TYPE_P (etype))
5356 etype = unsigned_type_for (etype);
5357 return etype;
5360 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5361 type, TYPE, return an expression to test if EXP is in (or out of, depending
5362 on IN_P) the range. Return 0 if the test couldn't be created. */
5364 tree
5365 build_range_check (location_t loc, tree type, tree exp, int in_p,
5366 tree low, tree high)
5368 tree etype = TREE_TYPE (exp), mask, value;
5370 /* Disable this optimization for function pointer expressions
5371 on targets that require function pointer canonicalization. */
5372 if (targetm.have_canonicalize_funcptr_for_compare ()
5373 && POINTER_TYPE_P (etype)
5374 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5375 return NULL_TREE;
5377 if (! in_p)
5379 value = build_range_check (loc, type, exp, 1, low, high);
5380 if (value != 0)
5381 return invert_truthvalue_loc (loc, value);
5383 return 0;
5386 if (low == 0 && high == 0)
5387 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5389 if (low == 0)
5390 return fold_build2_loc (loc, LE_EXPR, type, exp,
5391 fold_convert_loc (loc, etype, high));
5393 if (high == 0)
5394 return fold_build2_loc (loc, GE_EXPR, type, exp,
5395 fold_convert_loc (loc, etype, low));
5397 if (operand_equal_p (low, high, 0))
5398 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5399 fold_convert_loc (loc, etype, low));
5401 if (TREE_CODE (exp) == BIT_AND_EXPR
5402 && maskable_range_p (low, high, etype, &mask, &value))
5403 return fold_build2_loc (loc, EQ_EXPR, type,
5404 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5405 exp, mask),
5406 value);
5408 if (integer_zerop (low))
5410 if (! TYPE_UNSIGNED (etype))
5412 etype = unsigned_type_for (etype);
5413 high = fold_convert_loc (loc, etype, high);
5414 exp = fold_convert_loc (loc, etype, exp);
5416 return build_range_check (loc, type, exp, 1, 0, high);
5419 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5420 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5422 int prec = TYPE_PRECISION (etype);
5424 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5426 if (TYPE_UNSIGNED (etype))
5428 tree signed_etype = signed_type_for (etype);
5429 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5430 etype
5431 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5432 else
5433 etype = signed_etype;
5434 exp = fold_convert_loc (loc, etype, exp);
5436 return fold_build2_loc (loc, GT_EXPR, type, exp,
5437 build_int_cst (etype, 0));
5441 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5442 This requires wrap-around arithmetics for the type of the expression. */
5443 etype = range_check_type (etype);
5444 if (etype == NULL_TREE)
5445 return NULL_TREE;
5447 high = fold_convert_loc (loc, etype, high);
5448 low = fold_convert_loc (loc, etype, low);
5449 exp = fold_convert_loc (loc, etype, exp);
5451 value = const_binop (MINUS_EXPR, high, low);
5453 if (value != 0 && !TREE_OVERFLOW (value))
5454 return build_range_check (loc, type,
5455 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5456 1, build_int_cst (etype, 0), value);
5458 return 0;
5461 /* Return the predecessor of VAL in its type, handling the infinite case. */
5463 static tree
5464 range_predecessor (tree val)
5466 tree type = TREE_TYPE (val);
5468 if (INTEGRAL_TYPE_P (type)
5469 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5470 return 0;
5471 else
5472 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5473 build_int_cst (TREE_TYPE (val), 1), 0);
5476 /* Return the successor of VAL in its type, handling the infinite case. */
5478 static tree
5479 range_successor (tree val)
5481 tree type = TREE_TYPE (val);
5483 if (INTEGRAL_TYPE_P (type)
5484 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5485 return 0;
5486 else
5487 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5488 build_int_cst (TREE_TYPE (val), 1), 0);
5491 /* Given two ranges, see if we can merge them into one. Return 1 if we
5492 can, 0 if we can't. Set the output range into the specified parameters. */
5494 bool
5495 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5496 tree high0, int in1_p, tree low1, tree high1)
5498 int no_overlap;
5499 int subset;
5500 int temp;
5501 tree tem;
5502 int in_p;
5503 tree low, high;
5504 int lowequal = ((low0 == 0 && low1 == 0)
5505 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5506 low0, 0, low1, 0)));
5507 int highequal = ((high0 == 0 && high1 == 0)
5508 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5509 high0, 1, high1, 1)));
5511 /* Make range 0 be the range that starts first, or ends last if they
5512 start at the same value. Swap them if it isn't. */
5513 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5514 low0, 0, low1, 0))
5515 || (lowequal
5516 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5517 high1, 1, high0, 1))))
5519 temp = in0_p, in0_p = in1_p, in1_p = temp;
5520 tem = low0, low0 = low1, low1 = tem;
5521 tem = high0, high0 = high1, high1 = tem;
5524 /* If the second range is != high1 where high1 is the type maximum of
5525 the type, try first merging with < high1 range. */
5526 if (low1
5527 && high1
5528 && TREE_CODE (low1) == INTEGER_CST
5529 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5530 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5531 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5532 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5533 && operand_equal_p (low1, high1, 0))
5535 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5536 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5537 !in1_p, NULL_TREE, range_predecessor (low1)))
5538 return true;
5539 /* Similarly for the second range != low1 where low1 is the type minimum
5540 of the type, try first merging with > low1 range. */
5541 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5542 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5543 !in1_p, range_successor (low1), NULL_TREE))
5544 return true;
5547 /* Now flag two cases, whether the ranges are disjoint or whether the
5548 second range is totally subsumed in the first. Note that the tests
5549 below are simplified by the ones above. */
5550 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5551 high0, 1, low1, 0));
5552 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5553 high1, 1, high0, 1));
5555 /* We now have four cases, depending on whether we are including or
5556 excluding the two ranges. */
5557 if (in0_p && in1_p)
5559 /* If they don't overlap, the result is false. If the second range
5560 is a subset it is the result. Otherwise, the range is from the start
5561 of the second to the end of the first. */
5562 if (no_overlap)
5563 in_p = 0, low = high = 0;
5564 else if (subset)
5565 in_p = 1, low = low1, high = high1;
5566 else
5567 in_p = 1, low = low1, high = high0;
5570 else if (in0_p && ! in1_p)
5572 /* If they don't overlap, the result is the first range. If they are
5573 equal, the result is false. If the second range is a subset of the
5574 first, and the ranges begin at the same place, we go from just after
5575 the end of the second range to the end of the first. If the second
5576 range is not a subset of the first, or if it is a subset and both
5577 ranges end at the same place, the range starts at the start of the
5578 first range and ends just before the second range.
5579 Otherwise, we can't describe this as a single range. */
5580 if (no_overlap)
5581 in_p = 1, low = low0, high = high0;
5582 else if (lowequal && highequal)
5583 in_p = 0, low = high = 0;
5584 else if (subset && lowequal)
5586 low = range_successor (high1);
5587 high = high0;
5588 in_p = 1;
5589 if (low == 0)
5591 /* We are in the weird situation where high0 > high1 but
5592 high1 has no successor. Punt. */
5593 return 0;
5596 else if (! subset || highequal)
5598 low = low0;
5599 high = range_predecessor (low1);
5600 in_p = 1;
5601 if (high == 0)
5603 /* low0 < low1 but low1 has no predecessor. Punt. */
5604 return 0;
5607 else
5608 return 0;
5611 else if (! in0_p && in1_p)
5613 /* If they don't overlap, the result is the second range. If the second
5614 is a subset of the first, the result is false. Otherwise,
5615 the range starts just after the first range and ends at the
5616 end of the second. */
5617 if (no_overlap)
5618 in_p = 1, low = low1, high = high1;
5619 else if (subset || highequal)
5620 in_p = 0, low = high = 0;
5621 else
5623 low = range_successor (high0);
5624 high = high1;
5625 in_p = 1;
5626 if (low == 0)
5628 /* high1 > high0 but high0 has no successor. Punt. */
5629 return 0;
5634 else
5636 /* The case where we are excluding both ranges. Here the complex case
5637 is if they don't overlap. In that case, the only time we have a
5638 range is if they are adjacent. If the second is a subset of the
5639 first, the result is the first. Otherwise, the range to exclude
5640 starts at the beginning of the first range and ends at the end of the
5641 second. */
5642 if (no_overlap)
5644 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5645 range_successor (high0),
5646 1, low1, 0)))
5647 in_p = 0, low = low0, high = high1;
5648 else
5650 /* Canonicalize - [min, x] into - [-, x]. */
5651 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5652 switch (TREE_CODE (TREE_TYPE (low0)))
5654 case ENUMERAL_TYPE:
5655 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5656 GET_MODE_BITSIZE
5657 (TYPE_MODE (TREE_TYPE (low0)))))
5658 break;
5659 /* FALLTHROUGH */
5660 case INTEGER_TYPE:
5661 if (tree_int_cst_equal (low0,
5662 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5663 low0 = 0;
5664 break;
5665 case POINTER_TYPE:
5666 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5667 && integer_zerop (low0))
5668 low0 = 0;
5669 break;
5670 default:
5671 break;
5674 /* Canonicalize - [x, max] into - [x, -]. */
5675 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5676 switch (TREE_CODE (TREE_TYPE (high1)))
5678 case ENUMERAL_TYPE:
5679 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5680 GET_MODE_BITSIZE
5681 (TYPE_MODE (TREE_TYPE (high1)))))
5682 break;
5683 /* FALLTHROUGH */
5684 case INTEGER_TYPE:
5685 if (tree_int_cst_equal (high1,
5686 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5687 high1 = 0;
5688 break;
5689 case POINTER_TYPE:
5690 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5691 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5692 high1, 1,
5693 build_int_cst (TREE_TYPE (high1), 1),
5694 1)))
5695 high1 = 0;
5696 break;
5697 default:
5698 break;
5701 /* The ranges might be also adjacent between the maximum and
5702 minimum values of the given type. For
5703 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5704 return + [x + 1, y - 1]. */
5705 if (low0 == 0 && high1 == 0)
5707 low = range_successor (high0);
5708 high = range_predecessor (low1);
5709 if (low == 0 || high == 0)
5710 return 0;
5712 in_p = 1;
5714 else
5715 return 0;
5718 else if (subset)
5719 in_p = 0, low = low0, high = high0;
5720 else
5721 in_p = 0, low = low0, high = high1;
5724 *pin_p = in_p, *plow = low, *phigh = high;
5725 return 1;
5729 /* Subroutine of fold, looking inside expressions of the form
5730 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5731 of the COND_EXPR. This function is being used also to optimize
5732 A op B ? C : A, by reversing the comparison first.
5734 Return a folded expression whose code is not a COND_EXPR
5735 anymore, or NULL_TREE if no folding opportunity is found. */
5737 static tree
5738 fold_cond_expr_with_comparison (location_t loc, tree type,
5739 tree arg0, tree arg1, tree arg2)
5741 enum tree_code comp_code = TREE_CODE (arg0);
5742 tree arg00 = TREE_OPERAND (arg0, 0);
5743 tree arg01 = TREE_OPERAND (arg0, 1);
5744 tree arg1_type = TREE_TYPE (arg1);
5745 tree tem;
5747 STRIP_NOPS (arg1);
5748 STRIP_NOPS (arg2);
5750 /* If we have A op 0 ? A : -A, consider applying the following
5751 transformations:
5753 A == 0? A : -A same as -A
5754 A != 0? A : -A same as A
5755 A >= 0? A : -A same as abs (A)
5756 A > 0? A : -A same as abs (A)
5757 A <= 0? A : -A same as -abs (A)
5758 A < 0? A : -A same as -abs (A)
5760 None of these transformations work for modes with signed
5761 zeros. If A is +/-0, the first two transformations will
5762 change the sign of the result (from +0 to -0, or vice
5763 versa). The last four will fix the sign of the result,
5764 even though the original expressions could be positive or
5765 negative, depending on the sign of A.
5767 Note that all these transformations are correct if A is
5768 NaN, since the two alternatives (A and -A) are also NaNs. */
5769 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5770 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5771 ? real_zerop (arg01)
5772 : integer_zerop (arg01))
5773 && ((TREE_CODE (arg2) == NEGATE_EXPR
5774 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5775 /* In the case that A is of the form X-Y, '-A' (arg2) may
5776 have already been folded to Y-X, check for that. */
5777 || (TREE_CODE (arg1) == MINUS_EXPR
5778 && TREE_CODE (arg2) == MINUS_EXPR
5779 && operand_equal_p (TREE_OPERAND (arg1, 0),
5780 TREE_OPERAND (arg2, 1), 0)
5781 && operand_equal_p (TREE_OPERAND (arg1, 1),
5782 TREE_OPERAND (arg2, 0), 0))))
5783 switch (comp_code)
5785 case EQ_EXPR:
5786 case UNEQ_EXPR:
5787 tem = fold_convert_loc (loc, arg1_type, arg1);
5788 return fold_convert_loc (loc, type, negate_expr (tem));
5789 case NE_EXPR:
5790 case LTGT_EXPR:
5791 return fold_convert_loc (loc, type, arg1);
5792 case UNGE_EXPR:
5793 case UNGT_EXPR:
5794 if (flag_trapping_math)
5795 break;
5796 /* Fall through. */
5797 case GE_EXPR:
5798 case GT_EXPR:
5799 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5800 break;
5801 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5802 return fold_convert_loc (loc, type, tem);
5803 case UNLE_EXPR:
5804 case UNLT_EXPR:
5805 if (flag_trapping_math)
5806 break;
5807 /* FALLTHRU */
5808 case LE_EXPR:
5809 case LT_EXPR:
5810 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5811 break;
5812 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5813 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5815 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5816 is not, invokes UB both in abs and in the negation of it.
5817 So, use ABSU_EXPR instead. */
5818 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5819 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5820 tem = negate_expr (tem);
5821 return fold_convert_loc (loc, type, tem);
5823 else
5825 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5826 return negate_expr (fold_convert_loc (loc, type, tem));
5828 default:
5829 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5830 break;
5833 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5834 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5835 both transformations are correct when A is NaN: A != 0
5836 is then true, and A == 0 is false. */
5838 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5839 && integer_zerop (arg01) && integer_zerop (arg2))
5841 if (comp_code == NE_EXPR)
5842 return fold_convert_loc (loc, type, arg1);
5843 else if (comp_code == EQ_EXPR)
5844 return build_zero_cst (type);
5847 /* Try some transformations of A op B ? A : B.
5849 A == B? A : B same as B
5850 A != B? A : B same as A
5851 A >= B? A : B same as max (A, B)
5852 A > B? A : B same as max (B, A)
5853 A <= B? A : B same as min (A, B)
5854 A < B? A : B same as min (B, A)
5856 As above, these transformations don't work in the presence
5857 of signed zeros. For example, if A and B are zeros of
5858 opposite sign, the first two transformations will change
5859 the sign of the result. In the last four, the original
5860 expressions give different results for (A=+0, B=-0) and
5861 (A=-0, B=+0), but the transformed expressions do not.
5863 The first two transformations are correct if either A or B
5864 is a NaN. In the first transformation, the condition will
5865 be false, and B will indeed be chosen. In the case of the
5866 second transformation, the condition A != B will be true,
5867 and A will be chosen.
5869 The conversions to max() and min() are not correct if B is
5870 a number and A is not. The conditions in the original
5871 expressions will be false, so all four give B. The min()
5872 and max() versions would give a NaN instead. */
5873 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5874 && operand_equal_for_comparison_p (arg01, arg2)
5875 /* Avoid these transformations if the COND_EXPR may be used
5876 as an lvalue in the C++ front-end. PR c++/19199. */
5877 && (in_gimple_form
5878 || VECTOR_TYPE_P (type)
5879 || (! lang_GNU_CXX ()
5880 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5881 || ! maybe_lvalue_p (arg1)
5882 || ! maybe_lvalue_p (arg2)))
5884 tree comp_op0 = arg00;
5885 tree comp_op1 = arg01;
5886 tree comp_type = TREE_TYPE (comp_op0);
5888 switch (comp_code)
5890 case EQ_EXPR:
5891 return fold_convert_loc (loc, type, arg2);
5892 case NE_EXPR:
5893 return fold_convert_loc (loc, type, arg1);
5894 case LE_EXPR:
5895 case LT_EXPR:
5896 case UNLE_EXPR:
5897 case UNLT_EXPR:
5898 /* In C++ a ?: expression can be an lvalue, so put the
5899 operand which will be used if they are equal first
5900 so that we can convert this back to the
5901 corresponding COND_EXPR. */
5902 if (!HONOR_NANS (arg1))
5904 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5905 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5906 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5907 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5908 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5909 comp_op1, comp_op0);
5910 return fold_convert_loc (loc, type, tem);
5912 break;
5913 case GE_EXPR:
5914 case GT_EXPR:
5915 case UNGE_EXPR:
5916 case UNGT_EXPR:
5917 if (!HONOR_NANS (arg1))
5919 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5920 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5921 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5922 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5923 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5924 comp_op1, comp_op0);
5925 return fold_convert_loc (loc, type, tem);
5927 break;
5928 case UNEQ_EXPR:
5929 if (!HONOR_NANS (arg1))
5930 return fold_convert_loc (loc, type, arg2);
5931 break;
5932 case LTGT_EXPR:
5933 if (!HONOR_NANS (arg1))
5934 return fold_convert_loc (loc, type, arg1);
5935 break;
5936 default:
5937 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5938 break;
5942 return NULL_TREE;
5947 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5948 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5949 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5950 false) >= 2)
5951 #endif
5953 /* EXP is some logical combination of boolean tests. See if we can
5954 merge it into some range test. Return the new tree if so. */
5956 static tree
5957 fold_range_test (location_t loc, enum tree_code code, tree type,
5958 tree op0, tree op1)
5960 int or_op = (code == TRUTH_ORIF_EXPR
5961 || code == TRUTH_OR_EXPR);
5962 int in0_p, in1_p, in_p;
5963 tree low0, low1, low, high0, high1, high;
5964 bool strict_overflow_p = false;
5965 tree tem, lhs, rhs;
5966 const char * const warnmsg = G_("assuming signed overflow does not occur "
5967 "when simplifying range test");
5969 if (!INTEGRAL_TYPE_P (type))
5970 return 0;
5972 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5973 /* If op0 is known true or false and this is a short-circuiting
5974 operation we must not merge with op1 since that makes side-effects
5975 unconditional. So special-case this. */
5976 if (!lhs
5977 && ((code == TRUTH_ORIF_EXPR && in0_p)
5978 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
5979 return op0;
5980 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5982 /* If this is an OR operation, invert both sides; we will invert
5983 again at the end. */
5984 if (or_op)
5985 in0_p = ! in0_p, in1_p = ! in1_p;
5987 /* If both expressions are the same, if we can merge the ranges, and we
5988 can build the range test, return it or it inverted. If one of the
5989 ranges is always true or always false, consider it to be the same
5990 expression as the other. */
5991 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5992 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5993 in1_p, low1, high1)
5994 && (tem = (build_range_check (loc, type,
5995 lhs != 0 ? lhs
5996 : rhs != 0 ? rhs : integer_zero_node,
5997 in_p, low, high))) != 0)
5999 if (strict_overflow_p)
6000 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6001 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6004 /* On machines where the branch cost is expensive, if this is a
6005 short-circuited branch and the underlying object on both sides
6006 is the same, make a non-short-circuit operation. */
6007 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6008 if (param_logical_op_non_short_circuit != -1)
6009 logical_op_non_short_circuit
6010 = param_logical_op_non_short_circuit;
6011 if (logical_op_non_short_circuit
6012 && !flag_sanitize_coverage
6013 && lhs != 0 && rhs != 0
6014 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6015 && operand_equal_p (lhs, rhs, 0))
6017 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6018 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6019 which cases we can't do this. */
6020 if (simple_operand_p (lhs))
6021 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6022 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6023 type, op0, op1);
6025 else if (!lang_hooks.decls.global_bindings_p ()
6026 && !CONTAINS_PLACEHOLDER_P (lhs))
6028 tree common = save_expr (lhs);
6030 if ((lhs = build_range_check (loc, type, common,
6031 or_op ? ! in0_p : in0_p,
6032 low0, high0)) != 0
6033 && (rhs = build_range_check (loc, type, common,
6034 or_op ? ! in1_p : in1_p,
6035 low1, high1)) != 0)
6037 if (strict_overflow_p)
6038 fold_overflow_warning (warnmsg,
6039 WARN_STRICT_OVERFLOW_COMPARISON);
6040 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6041 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6042 type, lhs, rhs);
6047 return 0;
6050 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6051 bit value. Arrange things so the extra bits will be set to zero if and
6052 only if C is signed-extended to its full width. If MASK is nonzero,
6053 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6055 static tree
6056 unextend (tree c, int p, int unsignedp, tree mask)
6058 tree type = TREE_TYPE (c);
6059 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6060 tree temp;
6062 if (p == modesize || unsignedp)
6063 return c;
6065 /* We work by getting just the sign bit into the low-order bit, then
6066 into the high-order bit, then sign-extend. We then XOR that value
6067 with C. */
6068 temp = build_int_cst (TREE_TYPE (c),
6069 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6071 /* We must use a signed type in order to get an arithmetic right shift.
6072 However, we must also avoid introducing accidental overflows, so that
6073 a subsequent call to integer_zerop will work. Hence we must
6074 do the type conversion here. At this point, the constant is either
6075 zero or one, and the conversion to a signed type can never overflow.
6076 We could get an overflow if this conversion is done anywhere else. */
6077 if (TYPE_UNSIGNED (type))
6078 temp = fold_convert (signed_type_for (type), temp);
6080 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6081 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6082 if (mask != 0)
6083 temp = const_binop (BIT_AND_EXPR, temp,
6084 fold_convert (TREE_TYPE (c), mask));
6085 /* If necessary, convert the type back to match the type of C. */
6086 if (TYPE_UNSIGNED (type))
6087 temp = fold_convert (type, temp);
6089 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6092 /* For an expression that has the form
6093 (A && B) || ~B
6095 (A || B) && ~B,
6096 we can drop one of the inner expressions and simplify to
6097 A || ~B
6099 A && ~B
6100 LOC is the location of the resulting expression. OP is the inner
6101 logical operation; the left-hand side in the examples above, while CMPOP
6102 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6103 removing a condition that guards another, as in
6104 (A != NULL && A->...) || A == NULL
6105 which we must not transform. If RHS_ONLY is true, only eliminate the
6106 right-most operand of the inner logical operation. */
6108 static tree
6109 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6110 bool rhs_only)
6112 tree type = TREE_TYPE (cmpop);
6113 enum tree_code code = TREE_CODE (cmpop);
6114 enum tree_code truthop_code = TREE_CODE (op);
6115 tree lhs = TREE_OPERAND (op, 0);
6116 tree rhs = TREE_OPERAND (op, 1);
6117 tree orig_lhs = lhs, orig_rhs = rhs;
6118 enum tree_code rhs_code = TREE_CODE (rhs);
6119 enum tree_code lhs_code = TREE_CODE (lhs);
6120 enum tree_code inv_code;
6122 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6123 return NULL_TREE;
6125 if (TREE_CODE_CLASS (code) != tcc_comparison)
6126 return NULL_TREE;
6128 if (rhs_code == truthop_code)
6130 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6131 if (newrhs != NULL_TREE)
6133 rhs = newrhs;
6134 rhs_code = TREE_CODE (rhs);
6137 if (lhs_code == truthop_code && !rhs_only)
6139 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6140 if (newlhs != NULL_TREE)
6142 lhs = newlhs;
6143 lhs_code = TREE_CODE (lhs);
6147 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6148 if (inv_code == rhs_code
6149 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6150 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6151 return lhs;
6152 if (!rhs_only && inv_code == lhs_code
6153 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6154 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6155 return rhs;
6156 if (rhs != orig_rhs || lhs != orig_lhs)
6157 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6158 lhs, rhs);
6159 return NULL_TREE;
6162 /* Find ways of folding logical expressions of LHS and RHS:
6163 Try to merge two comparisons to the same innermost item.
6164 Look for range tests like "ch >= '0' && ch <= '9'".
6165 Look for combinations of simple terms on machines with expensive branches
6166 and evaluate the RHS unconditionally.
6168 For example, if we have p->a == 2 && p->b == 4 and we can make an
6169 object large enough to span both A and B, we can do this with a comparison
6170 against the object ANDed with the a mask.
6172 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6173 operations to do this with one comparison.
6175 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6176 function and the one above.
6178 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6179 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6181 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6182 two operands.
6184 We return the simplified tree or 0 if no optimization is possible. */
6186 static tree
6187 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6188 tree lhs, tree rhs)
6190 /* If this is the "or" of two comparisons, we can do something if
6191 the comparisons are NE_EXPR. If this is the "and", we can do something
6192 if the comparisons are EQ_EXPR. I.e.,
6193 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6195 WANTED_CODE is this operation code. For single bit fields, we can
6196 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6197 comparison for one-bit fields. */
6199 enum tree_code wanted_code;
6200 enum tree_code lcode, rcode;
6201 tree ll_arg, lr_arg, rl_arg, rr_arg;
6202 tree ll_inner, lr_inner, rl_inner, rr_inner;
6203 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6204 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6205 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6206 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6207 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6208 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6209 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6210 scalar_int_mode lnmode, rnmode;
6211 tree ll_mask, lr_mask, rl_mask, rr_mask;
6212 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6213 tree l_const, r_const;
6214 tree lntype, rntype, result;
6215 HOST_WIDE_INT first_bit, end_bit;
6216 int volatilep;
6218 /* Start by getting the comparison codes. Fail if anything is volatile.
6219 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6220 it were surrounded with a NE_EXPR. */
6222 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6223 return 0;
6225 lcode = TREE_CODE (lhs);
6226 rcode = TREE_CODE (rhs);
6228 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6230 lhs = build2 (NE_EXPR, truth_type, lhs,
6231 build_int_cst (TREE_TYPE (lhs), 0));
6232 lcode = NE_EXPR;
6235 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6237 rhs = build2 (NE_EXPR, truth_type, rhs,
6238 build_int_cst (TREE_TYPE (rhs), 0));
6239 rcode = NE_EXPR;
6242 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6243 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6244 return 0;
6246 ll_arg = TREE_OPERAND (lhs, 0);
6247 lr_arg = TREE_OPERAND (lhs, 1);
6248 rl_arg = TREE_OPERAND (rhs, 0);
6249 rr_arg = TREE_OPERAND (rhs, 1);
6251 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6252 if (simple_operand_p (ll_arg)
6253 && simple_operand_p (lr_arg))
6255 if (operand_equal_p (ll_arg, rl_arg, 0)
6256 && operand_equal_p (lr_arg, rr_arg, 0))
6258 result = combine_comparisons (loc, code, lcode, rcode,
6259 truth_type, ll_arg, lr_arg);
6260 if (result)
6261 return result;
6263 else if (operand_equal_p (ll_arg, rr_arg, 0)
6264 && operand_equal_p (lr_arg, rl_arg, 0))
6266 result = combine_comparisons (loc, code, lcode,
6267 swap_tree_comparison (rcode),
6268 truth_type, ll_arg, lr_arg);
6269 if (result)
6270 return result;
6274 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6275 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6277 /* If the RHS can be evaluated unconditionally and its operands are
6278 simple, it wins to evaluate the RHS unconditionally on machines
6279 with expensive branches. In this case, this isn't a comparison
6280 that can be merged. */
6282 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6283 false) >= 2
6284 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6285 && simple_operand_p (rl_arg)
6286 && simple_operand_p (rr_arg))
6288 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6289 if (code == TRUTH_OR_EXPR
6290 && lcode == NE_EXPR && integer_zerop (lr_arg)
6291 && rcode == NE_EXPR && integer_zerop (rr_arg)
6292 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6293 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6294 return build2_loc (loc, NE_EXPR, truth_type,
6295 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6296 ll_arg, rl_arg),
6297 build_int_cst (TREE_TYPE (ll_arg), 0));
6299 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6300 if (code == TRUTH_AND_EXPR
6301 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6302 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6303 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6304 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6305 return build2_loc (loc, EQ_EXPR, truth_type,
6306 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6307 ll_arg, rl_arg),
6308 build_int_cst (TREE_TYPE (ll_arg), 0));
6311 /* See if the comparisons can be merged. Then get all the parameters for
6312 each side. */
6314 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6315 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6316 return 0;
6318 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6319 volatilep = 0;
6320 ll_inner = decode_field_reference (loc, &ll_arg,
6321 &ll_bitsize, &ll_bitpos, &ll_mode,
6322 &ll_unsignedp, &ll_reversep, &volatilep,
6323 &ll_mask, &ll_and_mask);
6324 lr_inner = decode_field_reference (loc, &lr_arg,
6325 &lr_bitsize, &lr_bitpos, &lr_mode,
6326 &lr_unsignedp, &lr_reversep, &volatilep,
6327 &lr_mask, &lr_and_mask);
6328 rl_inner = decode_field_reference (loc, &rl_arg,
6329 &rl_bitsize, &rl_bitpos, &rl_mode,
6330 &rl_unsignedp, &rl_reversep, &volatilep,
6331 &rl_mask, &rl_and_mask);
6332 rr_inner = decode_field_reference (loc, &rr_arg,
6333 &rr_bitsize, &rr_bitpos, &rr_mode,
6334 &rr_unsignedp, &rr_reversep, &volatilep,
6335 &rr_mask, &rr_and_mask);
6337 /* It must be true that the inner operation on the lhs of each
6338 comparison must be the same if we are to be able to do anything.
6339 Then see if we have constants. If not, the same must be true for
6340 the rhs's. */
6341 if (volatilep
6342 || ll_reversep != rl_reversep
6343 || ll_inner == 0 || rl_inner == 0
6344 || ! operand_equal_p (ll_inner, rl_inner, 0))
6345 return 0;
6347 if (TREE_CODE (lr_arg) == INTEGER_CST
6348 && TREE_CODE (rr_arg) == INTEGER_CST)
6350 l_const = lr_arg, r_const = rr_arg;
6351 lr_reversep = ll_reversep;
6353 else if (lr_reversep != rr_reversep
6354 || lr_inner == 0 || rr_inner == 0
6355 || ! operand_equal_p (lr_inner, rr_inner, 0))
6356 return 0;
6357 else
6358 l_const = r_const = 0;
6360 /* If either comparison code is not correct for our logical operation,
6361 fail. However, we can convert a one-bit comparison against zero into
6362 the opposite comparison against that bit being set in the field. */
6364 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6365 if (lcode != wanted_code)
6367 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6369 /* Make the left operand unsigned, since we are only interested
6370 in the value of one bit. Otherwise we are doing the wrong
6371 thing below. */
6372 ll_unsignedp = 1;
6373 l_const = ll_mask;
6375 else
6376 return 0;
6379 /* This is analogous to the code for l_const above. */
6380 if (rcode != wanted_code)
6382 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6384 rl_unsignedp = 1;
6385 r_const = rl_mask;
6387 else
6388 return 0;
6391 /* See if we can find a mode that contains both fields being compared on
6392 the left. If we can't, fail. Otherwise, update all constants and masks
6393 to be relative to a field of that size. */
6394 first_bit = MIN (ll_bitpos, rl_bitpos);
6395 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6396 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6397 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6398 volatilep, &lnmode))
6399 return 0;
6401 lnbitsize = GET_MODE_BITSIZE (lnmode);
6402 lnbitpos = first_bit & ~ (lnbitsize - 1);
6403 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6404 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6406 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6408 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6409 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6412 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6413 size_int (xll_bitpos));
6414 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6415 size_int (xrl_bitpos));
6417 if (l_const)
6419 l_const = fold_convert_loc (loc, lntype, l_const);
6420 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6421 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6422 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6423 fold_build1_loc (loc, BIT_NOT_EXPR,
6424 lntype, ll_mask))))
6426 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6428 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6431 if (r_const)
6433 r_const = fold_convert_loc (loc, lntype, r_const);
6434 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6435 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6436 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6437 fold_build1_loc (loc, BIT_NOT_EXPR,
6438 lntype, rl_mask))))
6440 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6442 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6446 /* If the right sides are not constant, do the same for it. Also,
6447 disallow this optimization if a size, signedness or storage order
6448 mismatch occurs between the left and right sides. */
6449 if (l_const == 0)
6451 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6452 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6453 || ll_reversep != lr_reversep
6454 /* Make sure the two fields on the right
6455 correspond to the left without being swapped. */
6456 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6457 return 0;
6459 first_bit = MIN (lr_bitpos, rr_bitpos);
6460 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6461 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6462 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6463 volatilep, &rnmode))
6464 return 0;
6466 rnbitsize = GET_MODE_BITSIZE (rnmode);
6467 rnbitpos = first_bit & ~ (rnbitsize - 1);
6468 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6469 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6471 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6473 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6474 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6477 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6478 rntype, lr_mask),
6479 size_int (xlr_bitpos));
6480 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6481 rntype, rr_mask),
6482 size_int (xrr_bitpos));
6484 /* Make a mask that corresponds to both fields being compared.
6485 Do this for both items being compared. If the operands are the
6486 same size and the bits being compared are in the same position
6487 then we can do this by masking both and comparing the masked
6488 results. */
6489 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6490 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6491 if (lnbitsize == rnbitsize
6492 && xll_bitpos == xlr_bitpos
6493 && lnbitpos >= 0
6494 && rnbitpos >= 0)
6496 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6497 lntype, lnbitsize, lnbitpos,
6498 ll_unsignedp || rl_unsignedp, ll_reversep);
6499 if (! all_ones_mask_p (ll_mask, lnbitsize))
6500 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6502 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6503 rntype, rnbitsize, rnbitpos,
6504 lr_unsignedp || rr_unsignedp, lr_reversep);
6505 if (! all_ones_mask_p (lr_mask, rnbitsize))
6506 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6508 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6511 /* There is still another way we can do something: If both pairs of
6512 fields being compared are adjacent, we may be able to make a wider
6513 field containing them both.
6515 Note that we still must mask the lhs/rhs expressions. Furthermore,
6516 the mask must be shifted to account for the shift done by
6517 make_bit_field_ref. */
6518 if (((ll_bitsize + ll_bitpos == rl_bitpos
6519 && lr_bitsize + lr_bitpos == rr_bitpos)
6520 || (ll_bitpos == rl_bitpos + rl_bitsize
6521 && lr_bitpos == rr_bitpos + rr_bitsize))
6522 && ll_bitpos >= 0
6523 && rl_bitpos >= 0
6524 && lr_bitpos >= 0
6525 && rr_bitpos >= 0)
6527 tree type;
6529 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6530 ll_bitsize + rl_bitsize,
6531 MIN (ll_bitpos, rl_bitpos),
6532 ll_unsignedp, ll_reversep);
6533 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6534 lr_bitsize + rr_bitsize,
6535 MIN (lr_bitpos, rr_bitpos),
6536 lr_unsignedp, lr_reversep);
6538 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6539 size_int (MIN (xll_bitpos, xrl_bitpos)));
6540 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6541 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6543 /* Convert to the smaller type before masking out unwanted bits. */
6544 type = lntype;
6545 if (lntype != rntype)
6547 if (lnbitsize > rnbitsize)
6549 lhs = fold_convert_loc (loc, rntype, lhs);
6550 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6551 type = rntype;
6553 else if (lnbitsize < rnbitsize)
6555 rhs = fold_convert_loc (loc, lntype, rhs);
6556 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6557 type = lntype;
6561 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6562 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6564 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6565 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6567 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6570 return 0;
6573 /* Handle the case of comparisons with constants. If there is something in
6574 common between the masks, those bits of the constants must be the same.
6575 If not, the condition is always false. Test for this to avoid generating
6576 incorrect code below. */
6577 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6578 if (! integer_zerop (result)
6579 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6580 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6582 if (wanted_code == NE_EXPR)
6584 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6585 return constant_boolean_node (true, truth_type);
6587 else
6589 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6590 return constant_boolean_node (false, truth_type);
6594 if (lnbitpos < 0)
6595 return 0;
6597 /* Construct the expression we will return. First get the component
6598 reference we will make. Unless the mask is all ones the width of
6599 that field, perform the mask operation. Then compare with the
6600 merged constant. */
6601 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6602 lntype, lnbitsize, lnbitpos,
6603 ll_unsignedp || rl_unsignedp, ll_reversep);
6605 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6606 if (! all_ones_mask_p (ll_mask, lnbitsize))
6607 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6609 return build2_loc (loc, wanted_code, truth_type, result,
6610 const_binop (BIT_IOR_EXPR, l_const, r_const));
6613 /* T is an integer expression that is being multiplied, divided, or taken a
6614 modulus (CODE says which and what kind of divide or modulus) by a
6615 constant C. See if we can eliminate that operation by folding it with
6616 other operations already in T. WIDE_TYPE, if non-null, is a type that
6617 should be used for the computation if wider than our type.
6619 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6620 (X * 2) + (Y * 4). We must, however, be assured that either the original
6621 expression would not overflow or that overflow is undefined for the type
6622 in the language in question.
6624 If we return a non-null expression, it is an equivalent form of the
6625 original computation, but need not be in the original type.
6627 We set *STRICT_OVERFLOW_P to true if the return values depends on
6628 signed overflow being undefined. Otherwise we do not change
6629 *STRICT_OVERFLOW_P. */
6631 static tree
6632 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6633 bool *strict_overflow_p)
6635 /* To avoid exponential search depth, refuse to allow recursion past
6636 three levels. Beyond that (1) it's highly unlikely that we'll find
6637 something interesting and (2) we've probably processed it before
6638 when we built the inner expression. */
6640 static int depth;
6641 tree ret;
6643 if (depth > 3)
6644 return NULL;
6646 depth++;
6647 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6648 depth--;
6650 return ret;
6653 static tree
6654 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6655 bool *strict_overflow_p)
6657 tree type = TREE_TYPE (t);
6658 enum tree_code tcode = TREE_CODE (t);
6659 tree ctype = (wide_type != 0
6660 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6661 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6662 ? wide_type : type);
6663 tree t1, t2;
6664 int same_p = tcode == code;
6665 tree op0 = NULL_TREE, op1 = NULL_TREE;
6666 bool sub_strict_overflow_p;
6668 /* Don't deal with constants of zero here; they confuse the code below. */
6669 if (integer_zerop (c))
6670 return NULL_TREE;
6672 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6673 op0 = TREE_OPERAND (t, 0);
6675 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6676 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6678 /* Note that we need not handle conditional operations here since fold
6679 already handles those cases. So just do arithmetic here. */
6680 switch (tcode)
6682 case INTEGER_CST:
6683 /* For a constant, we can always simplify if we are a multiply
6684 or (for divide and modulus) if it is a multiple of our constant. */
6685 if (code == MULT_EXPR
6686 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6687 TYPE_SIGN (type)))
6689 tree tem = const_binop (code, fold_convert (ctype, t),
6690 fold_convert (ctype, c));
6691 /* If the multiplication overflowed, we lost information on it.
6692 See PR68142 and PR69845. */
6693 if (TREE_OVERFLOW (tem))
6694 return NULL_TREE;
6695 return tem;
6697 break;
6699 CASE_CONVERT: case NON_LVALUE_EXPR:
6700 /* If op0 is an expression ... */
6701 if ((COMPARISON_CLASS_P (op0)
6702 || UNARY_CLASS_P (op0)
6703 || BINARY_CLASS_P (op0)
6704 || VL_EXP_CLASS_P (op0)
6705 || EXPRESSION_CLASS_P (op0))
6706 /* ... and has wrapping overflow, and its type is smaller
6707 than ctype, then we cannot pass through as widening. */
6708 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6709 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6710 && (TYPE_PRECISION (ctype)
6711 > TYPE_PRECISION (TREE_TYPE (op0))))
6712 /* ... or this is a truncation (t is narrower than op0),
6713 then we cannot pass through this narrowing. */
6714 || (TYPE_PRECISION (type)
6715 < TYPE_PRECISION (TREE_TYPE (op0)))
6716 /* ... or signedness changes for division or modulus,
6717 then we cannot pass through this conversion. */
6718 || (code != MULT_EXPR
6719 && (TYPE_UNSIGNED (ctype)
6720 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6721 /* ... or has undefined overflow while the converted to
6722 type has not, we cannot do the operation in the inner type
6723 as that would introduce undefined overflow. */
6724 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6725 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6726 && !TYPE_OVERFLOW_UNDEFINED (type))))
6727 break;
6729 /* Pass the constant down and see if we can make a simplification. If
6730 we can, replace this expression with the inner simplification for
6731 possible later conversion to our or some other type. */
6732 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6733 && TREE_CODE (t2) == INTEGER_CST
6734 && !TREE_OVERFLOW (t2)
6735 && (t1 = extract_muldiv (op0, t2, code,
6736 code == MULT_EXPR ? ctype : NULL_TREE,
6737 strict_overflow_p)) != 0)
6738 return t1;
6739 break;
6741 case ABS_EXPR:
6742 /* If widening the type changes it from signed to unsigned, then we
6743 must avoid building ABS_EXPR itself as unsigned. */
6744 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6746 tree cstype = (*signed_type_for) (ctype);
6747 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6748 != 0)
6750 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6751 return fold_convert (ctype, t1);
6753 break;
6755 /* If the constant is negative, we cannot simplify this. */
6756 if (tree_int_cst_sgn (c) == -1)
6757 break;
6758 /* FALLTHROUGH */
6759 case NEGATE_EXPR:
6760 /* For division and modulus, type can't be unsigned, as e.g.
6761 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6762 For signed types, even with wrapping overflow, this is fine. */
6763 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6764 break;
6765 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6766 != 0)
6767 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6768 break;
6770 case MIN_EXPR: case MAX_EXPR:
6771 /* If widening the type changes the signedness, then we can't perform
6772 this optimization as that changes the result. */
6773 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6774 break;
6776 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6777 sub_strict_overflow_p = false;
6778 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6779 &sub_strict_overflow_p)) != 0
6780 && (t2 = extract_muldiv (op1, c, code, wide_type,
6781 &sub_strict_overflow_p)) != 0)
6783 if (tree_int_cst_sgn (c) < 0)
6784 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6785 if (sub_strict_overflow_p)
6786 *strict_overflow_p = true;
6787 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6788 fold_convert (ctype, t2));
6790 break;
6792 case LSHIFT_EXPR: case RSHIFT_EXPR:
6793 /* If the second operand is constant, this is a multiplication
6794 or floor division, by a power of two, so we can treat it that
6795 way unless the multiplier or divisor overflows. Signed
6796 left-shift overflow is implementation-defined rather than
6797 undefined in C90, so do not convert signed left shift into
6798 multiplication. */
6799 if (TREE_CODE (op1) == INTEGER_CST
6800 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6801 /* const_binop may not detect overflow correctly,
6802 so check for it explicitly here. */
6803 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6804 wi::to_wide (op1))
6805 && (t1 = fold_convert (ctype,
6806 const_binop (LSHIFT_EXPR, size_one_node,
6807 op1))) != 0
6808 && !TREE_OVERFLOW (t1))
6809 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6810 ? MULT_EXPR : FLOOR_DIV_EXPR,
6811 ctype,
6812 fold_convert (ctype, op0),
6813 t1),
6814 c, code, wide_type, strict_overflow_p);
6815 break;
6817 case PLUS_EXPR: case MINUS_EXPR:
6818 /* See if we can eliminate the operation on both sides. If we can, we
6819 can return a new PLUS or MINUS. If we can't, the only remaining
6820 cases where we can do anything are if the second operand is a
6821 constant. */
6822 sub_strict_overflow_p = false;
6823 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6824 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6825 if (t1 != 0 && t2 != 0
6826 && TYPE_OVERFLOW_WRAPS (ctype)
6827 && (code == MULT_EXPR
6828 /* If not multiplication, we can only do this if both operands
6829 are divisible by c. */
6830 || (multiple_of_p (ctype, op0, c)
6831 && multiple_of_p (ctype, op1, c))))
6833 if (sub_strict_overflow_p)
6834 *strict_overflow_p = true;
6835 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6836 fold_convert (ctype, t2));
6839 /* If this was a subtraction, negate OP1 and set it to be an addition.
6840 This simplifies the logic below. */
6841 if (tcode == MINUS_EXPR)
6843 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6844 /* If OP1 was not easily negatable, the constant may be OP0. */
6845 if (TREE_CODE (op0) == INTEGER_CST)
6847 std::swap (op0, op1);
6848 std::swap (t1, t2);
6852 if (TREE_CODE (op1) != INTEGER_CST)
6853 break;
6855 /* If either OP1 or C are negative, this optimization is not safe for
6856 some of the division and remainder types while for others we need
6857 to change the code. */
6858 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6860 if (code == CEIL_DIV_EXPR)
6861 code = FLOOR_DIV_EXPR;
6862 else if (code == FLOOR_DIV_EXPR)
6863 code = CEIL_DIV_EXPR;
6864 else if (code != MULT_EXPR
6865 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6866 break;
6869 /* If it's a multiply or a division/modulus operation of a multiple
6870 of our constant, do the operation and verify it doesn't overflow. */
6871 if (code == MULT_EXPR
6872 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6873 TYPE_SIGN (type)))
6875 op1 = const_binop (code, fold_convert (ctype, op1),
6876 fold_convert (ctype, c));
6877 /* We allow the constant to overflow with wrapping semantics. */
6878 if (op1 == 0
6879 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6880 break;
6882 else
6883 break;
6885 /* If we have an unsigned type, we cannot widen the operation since it
6886 will change the result if the original computation overflowed. */
6887 if (TYPE_UNSIGNED (ctype) && ctype != type)
6888 break;
6890 /* The last case is if we are a multiply. In that case, we can
6891 apply the distributive law to commute the multiply and addition
6892 if the multiplication of the constants doesn't overflow
6893 and overflow is defined. With undefined overflow
6894 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6895 But fold_plusminus_mult_expr would factor back any power-of-two
6896 value so do not distribute in the first place in this case. */
6897 if (code == MULT_EXPR
6898 && TYPE_OVERFLOW_WRAPS (ctype)
6899 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6900 return fold_build2 (tcode, ctype,
6901 fold_build2 (code, ctype,
6902 fold_convert (ctype, op0),
6903 fold_convert (ctype, c)),
6904 op1);
6906 break;
6908 case MULT_EXPR:
6909 /* We have a special case here if we are doing something like
6910 (C * 8) % 4 since we know that's zero. */
6911 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6912 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6913 /* If the multiplication can overflow we cannot optimize this. */
6914 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6915 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6916 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6917 TYPE_SIGN (type)))
6919 *strict_overflow_p = true;
6920 return omit_one_operand (type, integer_zero_node, op0);
6923 /* ... fall through ... */
6925 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6926 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6927 /* If we can extract our operation from the LHS, do so and return a
6928 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6929 do something only if the second operand is a constant. */
6930 if (same_p
6931 && TYPE_OVERFLOW_WRAPS (ctype)
6932 && (t1 = extract_muldiv (op0, c, code, wide_type,
6933 strict_overflow_p)) != 0)
6934 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6935 fold_convert (ctype, op1));
6936 else if (tcode == MULT_EXPR && code == MULT_EXPR
6937 && TYPE_OVERFLOW_WRAPS (ctype)
6938 && (t1 = extract_muldiv (op1, c, code, wide_type,
6939 strict_overflow_p)) != 0)
6940 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6941 fold_convert (ctype, t1));
6942 else if (TREE_CODE (op1) != INTEGER_CST)
6943 return 0;
6945 /* If these are the same operation types, we can associate them
6946 assuming no overflow. */
6947 if (tcode == code)
6949 bool overflow_p = false;
6950 wi::overflow_type overflow_mul;
6951 signop sign = TYPE_SIGN (ctype);
6952 unsigned prec = TYPE_PRECISION (ctype);
6953 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6954 wi::to_wide (c, prec),
6955 sign, &overflow_mul);
6956 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6957 if (overflow_mul
6958 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6959 overflow_p = true;
6960 if (!overflow_p)
6961 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6962 wide_int_to_tree (ctype, mul));
6965 /* If these operations "cancel" each other, we have the main
6966 optimizations of this pass, which occur when either constant is a
6967 multiple of the other, in which case we replace this with either an
6968 operation or CODE or TCODE.
6970 If we have an unsigned type, we cannot do this since it will change
6971 the result if the original computation overflowed. */
6972 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6973 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6974 || (tcode == MULT_EXPR
6975 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6976 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6977 && code != MULT_EXPR)))
6979 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6980 TYPE_SIGN (type)))
6982 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6983 *strict_overflow_p = true;
6984 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6985 fold_convert (ctype,
6986 const_binop (TRUNC_DIV_EXPR,
6987 op1, c)));
6989 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6990 TYPE_SIGN (type)))
6992 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6993 *strict_overflow_p = true;
6994 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6995 fold_convert (ctype,
6996 const_binop (TRUNC_DIV_EXPR,
6997 c, op1)));
7000 break;
7002 default:
7003 break;
7006 return 0;
7009 /* Return a node which has the indicated constant VALUE (either 0 or
7010 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7011 and is of the indicated TYPE. */
7013 tree
7014 constant_boolean_node (bool value, tree type)
7016 if (type == integer_type_node)
7017 return value ? integer_one_node : integer_zero_node;
7018 else if (type == boolean_type_node)
7019 return value ? boolean_true_node : boolean_false_node;
7020 else if (TREE_CODE (type) == VECTOR_TYPE)
7021 return build_vector_from_val (type,
7022 build_int_cst (TREE_TYPE (type),
7023 value ? -1 : 0));
7024 else
7025 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7029 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7030 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7031 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7032 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7033 COND is the first argument to CODE; otherwise (as in the example
7034 given here), it is the second argument. TYPE is the type of the
7035 original expression. Return NULL_TREE if no simplification is
7036 possible. */
7038 static tree
7039 fold_binary_op_with_conditional_arg (location_t loc,
7040 enum tree_code code,
7041 tree type, tree op0, tree op1,
7042 tree cond, tree arg, int cond_first_p)
7044 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7045 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7046 tree test, true_value, false_value;
7047 tree lhs = NULL_TREE;
7048 tree rhs = NULL_TREE;
7049 enum tree_code cond_code = COND_EXPR;
7051 /* Do not move possibly trapping operations into the conditional as this
7052 pessimizes code and causes gimplification issues when applied late. */
7053 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7054 ANY_INTEGRAL_TYPE_P (type)
7055 && TYPE_OVERFLOW_TRAPS (type), op1))
7056 return NULL_TREE;
7058 if (TREE_CODE (cond) == COND_EXPR
7059 || TREE_CODE (cond) == VEC_COND_EXPR)
7061 test = TREE_OPERAND (cond, 0);
7062 true_value = TREE_OPERAND (cond, 1);
7063 false_value = TREE_OPERAND (cond, 2);
7064 /* If this operand throws an expression, then it does not make
7065 sense to try to perform a logical or arithmetic operation
7066 involving it. */
7067 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7068 lhs = true_value;
7069 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7070 rhs = false_value;
7072 else if (!(TREE_CODE (type) != VECTOR_TYPE
7073 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7075 tree testtype = TREE_TYPE (cond);
7076 test = cond;
7077 true_value = constant_boolean_node (true, testtype);
7078 false_value = constant_boolean_node (false, testtype);
7080 else
7081 /* Detect the case of mixing vector and scalar types - bail out. */
7082 return NULL_TREE;
7084 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7085 cond_code = VEC_COND_EXPR;
7087 /* This transformation is only worthwhile if we don't have to wrap ARG
7088 in a SAVE_EXPR and the operation can be simplified without recursing
7089 on at least one of the branches once its pushed inside the COND_EXPR. */
7090 if (!TREE_CONSTANT (arg)
7091 && (TREE_SIDE_EFFECTS (arg)
7092 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7093 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7094 return NULL_TREE;
7096 arg = fold_convert_loc (loc, arg_type, arg);
7097 if (lhs == 0)
7099 true_value = fold_convert_loc (loc, cond_type, true_value);
7100 if (cond_first_p)
7101 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7102 else
7103 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7105 if (rhs == 0)
7107 false_value = fold_convert_loc (loc, cond_type, false_value);
7108 if (cond_first_p)
7109 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7110 else
7111 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7114 /* Check that we have simplified at least one of the branches. */
7115 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7116 return NULL_TREE;
7118 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7122 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7124 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7125 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7126 ADDEND is the same as X.
7128 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7129 and finite. The problematic cases are when X is zero, and its mode
7130 has signed zeros. In the case of rounding towards -infinity,
7131 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7132 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7134 bool
7135 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7137 if (!real_zerop (addend))
7138 return false;
7140 /* Don't allow the fold with -fsignaling-nans. */
7141 if (HONOR_SNANS (type))
7142 return false;
7144 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7145 if (!HONOR_SIGNED_ZEROS (type))
7146 return true;
7148 /* There is no case that is safe for all rounding modes. */
7149 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7150 return false;
7152 /* In a vector or complex, we would need to check the sign of all zeros. */
7153 if (TREE_CODE (addend) == VECTOR_CST)
7154 addend = uniform_vector_p (addend);
7155 if (!addend || TREE_CODE (addend) != REAL_CST)
7156 return false;
7158 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7159 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7160 negate = !negate;
7162 /* The mode has signed zeros, and we have to honor their sign.
7163 In this situation, there is only one case we can return true for.
7164 X - 0 is the same as X with default rounding. */
7165 return negate;
7168 /* Subroutine of match.pd that optimizes comparisons of a division by
7169 a nonzero integer constant against an integer constant, i.e.
7170 X/C1 op C2.
7172 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7173 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7175 enum tree_code
7176 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7177 tree *hi, bool *neg_overflow)
7179 tree prod, tmp, type = TREE_TYPE (c1);
7180 signop sign = TYPE_SIGN (type);
7181 wi::overflow_type overflow;
7183 /* We have to do this the hard way to detect unsigned overflow.
7184 prod = int_const_binop (MULT_EXPR, c1, c2); */
7185 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7186 prod = force_fit_type (type, val, -1, overflow);
7187 *neg_overflow = false;
7189 if (sign == UNSIGNED)
7191 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7192 *lo = prod;
7194 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7195 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7196 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7198 else if (tree_int_cst_sgn (c1) >= 0)
7200 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7201 switch (tree_int_cst_sgn (c2))
7203 case -1:
7204 *neg_overflow = true;
7205 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7206 *hi = prod;
7207 break;
7209 case 0:
7210 *lo = fold_negate_const (tmp, type);
7211 *hi = tmp;
7212 break;
7214 case 1:
7215 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7216 *lo = prod;
7217 break;
7219 default:
7220 gcc_unreachable ();
7223 else
7225 /* A negative divisor reverses the relational operators. */
7226 code = swap_tree_comparison (code);
7228 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7229 switch (tree_int_cst_sgn (c2))
7231 case -1:
7232 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7233 *lo = prod;
7234 break;
7236 case 0:
7237 *hi = fold_negate_const (tmp, type);
7238 *lo = tmp;
7239 break;
7241 case 1:
7242 *neg_overflow = true;
7243 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7244 *hi = prod;
7245 break;
7247 default:
7248 gcc_unreachable ();
7252 if (code != EQ_EXPR && code != NE_EXPR)
7253 return code;
7255 if (TREE_OVERFLOW (*lo)
7256 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7257 *lo = NULL_TREE;
7258 if (TREE_OVERFLOW (*hi)
7259 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7260 *hi = NULL_TREE;
7262 return code;
7266 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7267 equality/inequality test, then return a simplified form of the test
7268 using a sign testing. Otherwise return NULL. TYPE is the desired
7269 result type. */
7271 static tree
7272 fold_single_bit_test_into_sign_test (location_t loc,
7273 enum tree_code code, tree arg0, tree arg1,
7274 tree result_type)
7276 /* If this is testing a single bit, we can optimize the test. */
7277 if ((code == NE_EXPR || code == EQ_EXPR)
7278 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7279 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7281 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7282 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7283 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7285 if (arg00 != NULL_TREE
7286 /* This is only a win if casting to a signed type is cheap,
7287 i.e. when arg00's type is not a partial mode. */
7288 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7290 tree stype = signed_type_for (TREE_TYPE (arg00));
7291 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7292 result_type,
7293 fold_convert_loc (loc, stype, arg00),
7294 build_int_cst (stype, 0));
7298 return NULL_TREE;
7301 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7302 equality/inequality test, then return a simplified form of
7303 the test using shifts and logical operations. Otherwise return
7304 NULL. TYPE is the desired result type. */
7306 tree
7307 fold_single_bit_test (location_t loc, enum tree_code code,
7308 tree arg0, tree arg1, tree result_type)
7310 /* If this is testing a single bit, we can optimize the test. */
7311 if ((code == NE_EXPR || code == EQ_EXPR)
7312 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7313 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7315 tree inner = TREE_OPERAND (arg0, 0);
7316 tree type = TREE_TYPE (arg0);
7317 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7318 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7319 int ops_unsigned;
7320 tree signed_type, unsigned_type, intermediate_type;
7321 tree tem, one;
7323 /* First, see if we can fold the single bit test into a sign-bit
7324 test. */
7325 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7326 result_type);
7327 if (tem)
7328 return tem;
7330 /* Otherwise we have (A & C) != 0 where C is a single bit,
7331 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7332 Similarly for (A & C) == 0. */
7334 /* If INNER is a right shift of a constant and it plus BITNUM does
7335 not overflow, adjust BITNUM and INNER. */
7336 if (TREE_CODE (inner) == RSHIFT_EXPR
7337 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7338 && bitnum < TYPE_PRECISION (type)
7339 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7340 TYPE_PRECISION (type) - bitnum))
7342 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7343 inner = TREE_OPERAND (inner, 0);
7346 /* If we are going to be able to omit the AND below, we must do our
7347 operations as unsigned. If we must use the AND, we have a choice.
7348 Normally unsigned is faster, but for some machines signed is. */
7349 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7350 && !flag_syntax_only) ? 0 : 1;
7352 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7353 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7354 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7355 inner = fold_convert_loc (loc, intermediate_type, inner);
7357 if (bitnum != 0)
7358 inner = build2 (RSHIFT_EXPR, intermediate_type,
7359 inner, size_int (bitnum));
7361 one = build_int_cst (intermediate_type, 1);
7363 if (code == EQ_EXPR)
7364 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7366 /* Put the AND last so it can combine with more things. */
7367 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7369 /* Make sure to return the proper type. */
7370 inner = fold_convert_loc (loc, result_type, inner);
7372 return inner;
7374 return NULL_TREE;
7377 /* Test whether it is preferable two swap two operands, ARG0 and
7378 ARG1, for example because ARG0 is an integer constant and ARG1
7379 isn't. */
7381 bool
7382 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7384 if (CONSTANT_CLASS_P (arg1))
7385 return 0;
7386 if (CONSTANT_CLASS_P (arg0))
7387 return 1;
7389 STRIP_NOPS (arg0);
7390 STRIP_NOPS (arg1);
7392 if (TREE_CONSTANT (arg1))
7393 return 0;
7394 if (TREE_CONSTANT (arg0))
7395 return 1;
7397 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7398 for commutative and comparison operators. Ensuring a canonical
7399 form allows the optimizers to find additional redundancies without
7400 having to explicitly check for both orderings. */
7401 if (TREE_CODE (arg0) == SSA_NAME
7402 && TREE_CODE (arg1) == SSA_NAME
7403 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7404 return 1;
7406 /* Put SSA_NAMEs last. */
7407 if (TREE_CODE (arg1) == SSA_NAME)
7408 return 0;
7409 if (TREE_CODE (arg0) == SSA_NAME)
7410 return 1;
7412 /* Put variables last. */
7413 if (DECL_P (arg1))
7414 return 0;
7415 if (DECL_P (arg0))
7416 return 1;
7418 return 0;
7422 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7423 means A >= Y && A != MAX, but in this case we know that
7424 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7426 static tree
7427 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7429 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7431 if (TREE_CODE (bound) == LT_EXPR)
7432 a = TREE_OPERAND (bound, 0);
7433 else if (TREE_CODE (bound) == GT_EXPR)
7434 a = TREE_OPERAND (bound, 1);
7435 else
7436 return NULL_TREE;
7438 typea = TREE_TYPE (a);
7439 if (!INTEGRAL_TYPE_P (typea)
7440 && !POINTER_TYPE_P (typea))
7441 return NULL_TREE;
7443 if (TREE_CODE (ineq) == LT_EXPR)
7445 a1 = TREE_OPERAND (ineq, 1);
7446 y = TREE_OPERAND (ineq, 0);
7448 else if (TREE_CODE (ineq) == GT_EXPR)
7450 a1 = TREE_OPERAND (ineq, 0);
7451 y = TREE_OPERAND (ineq, 1);
7453 else
7454 return NULL_TREE;
7456 if (TREE_TYPE (a1) != typea)
7457 return NULL_TREE;
7459 if (POINTER_TYPE_P (typea))
7461 /* Convert the pointer types into integer before taking the difference. */
7462 tree ta = fold_convert_loc (loc, ssizetype, a);
7463 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7464 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7466 else
7467 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7469 if (!diff || !integer_onep (diff))
7470 return NULL_TREE;
7472 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7475 /* Fold a sum or difference of at least one multiplication.
7476 Returns the folded tree or NULL if no simplification could be made. */
7478 static tree
7479 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7480 tree arg0, tree arg1)
7482 tree arg00, arg01, arg10, arg11;
7483 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7485 /* (A * C) +- (B * C) -> (A+-B) * C.
7486 (A * C) +- A -> A * (C+-1).
7487 We are most concerned about the case where C is a constant,
7488 but other combinations show up during loop reduction. Since
7489 it is not difficult, try all four possibilities. */
7491 if (TREE_CODE (arg0) == MULT_EXPR)
7493 arg00 = TREE_OPERAND (arg0, 0);
7494 arg01 = TREE_OPERAND (arg0, 1);
7496 else if (TREE_CODE (arg0) == INTEGER_CST)
7498 arg00 = build_one_cst (type);
7499 arg01 = arg0;
7501 else
7503 /* We cannot generate constant 1 for fract. */
7504 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7505 return NULL_TREE;
7506 arg00 = arg0;
7507 arg01 = build_one_cst (type);
7509 if (TREE_CODE (arg1) == MULT_EXPR)
7511 arg10 = TREE_OPERAND (arg1, 0);
7512 arg11 = TREE_OPERAND (arg1, 1);
7514 else if (TREE_CODE (arg1) == INTEGER_CST)
7516 arg10 = build_one_cst (type);
7517 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7518 the purpose of this canonicalization. */
7519 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7520 && negate_expr_p (arg1)
7521 && code == PLUS_EXPR)
7523 arg11 = negate_expr (arg1);
7524 code = MINUS_EXPR;
7526 else
7527 arg11 = arg1;
7529 else
7531 /* We cannot generate constant 1 for fract. */
7532 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7533 return NULL_TREE;
7534 arg10 = arg1;
7535 arg11 = build_one_cst (type);
7537 same = NULL_TREE;
7539 /* Prefer factoring a common non-constant. */
7540 if (operand_equal_p (arg00, arg10, 0))
7541 same = arg00, alt0 = arg01, alt1 = arg11;
7542 else if (operand_equal_p (arg01, arg11, 0))
7543 same = arg01, alt0 = arg00, alt1 = arg10;
7544 else if (operand_equal_p (arg00, arg11, 0))
7545 same = arg00, alt0 = arg01, alt1 = arg10;
7546 else if (operand_equal_p (arg01, arg10, 0))
7547 same = arg01, alt0 = arg00, alt1 = arg11;
7549 /* No identical multiplicands; see if we can find a common
7550 power-of-two factor in non-power-of-two multiplies. This
7551 can help in multi-dimensional array access. */
7552 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7554 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7555 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7556 HOST_WIDE_INT tmp;
7557 bool swap = false;
7558 tree maybe_same;
7560 /* Move min of absolute values to int11. */
7561 if (absu_hwi (int01) < absu_hwi (int11))
7563 tmp = int01, int01 = int11, int11 = tmp;
7564 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7565 maybe_same = arg01;
7566 swap = true;
7568 else
7569 maybe_same = arg11;
7571 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7572 if (factor > 1
7573 && pow2p_hwi (factor)
7574 && (int01 & (factor - 1)) == 0
7575 /* The remainder should not be a constant, otherwise we
7576 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7577 increased the number of multiplications necessary. */
7578 && TREE_CODE (arg10) != INTEGER_CST)
7580 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7581 build_int_cst (TREE_TYPE (arg00),
7582 int01 / int11));
7583 alt1 = arg10;
7584 same = maybe_same;
7585 if (swap)
7586 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7590 if (!same)
7591 return NULL_TREE;
7593 if (! ANY_INTEGRAL_TYPE_P (type)
7594 || TYPE_OVERFLOW_WRAPS (type)
7595 /* We are neither factoring zero nor minus one. */
7596 || TREE_CODE (same) == INTEGER_CST)
7597 return fold_build2_loc (loc, MULT_EXPR, type,
7598 fold_build2_loc (loc, code, type,
7599 fold_convert_loc (loc, type, alt0),
7600 fold_convert_loc (loc, type, alt1)),
7601 fold_convert_loc (loc, type, same));
7603 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7604 same may be minus one and thus the multiplication may overflow. Perform
7605 the sum operation in an unsigned type. */
7606 tree utype = unsigned_type_for (type);
7607 tree tem = fold_build2_loc (loc, code, utype,
7608 fold_convert_loc (loc, utype, alt0),
7609 fold_convert_loc (loc, utype, alt1));
7610 /* If the sum evaluated to a constant that is not -INF the multiplication
7611 cannot overflow. */
7612 if (TREE_CODE (tem) == INTEGER_CST
7613 && (wi::to_wide (tem)
7614 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7615 return fold_build2_loc (loc, MULT_EXPR, type,
7616 fold_convert (type, tem), same);
7618 /* Do not resort to unsigned multiplication because
7619 we lose the no-overflow property of the expression. */
7620 return NULL_TREE;
7623 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7624 specified by EXPR into the buffer PTR of length LEN bytes.
7625 Return the number of bytes placed in the buffer, or zero
7626 upon failure. */
7628 static int
7629 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7631 tree type = TREE_TYPE (expr);
7632 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7633 int byte, offset, word, words;
7634 unsigned char value;
7636 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7637 return 0;
7638 if (off == -1)
7639 off = 0;
7641 if (ptr == NULL)
7642 /* Dry run. */
7643 return MIN (len, total_bytes - off);
7645 words = total_bytes / UNITS_PER_WORD;
7647 for (byte = 0; byte < total_bytes; byte++)
7649 int bitpos = byte * BITS_PER_UNIT;
7650 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7651 number of bytes. */
7652 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7654 if (total_bytes > UNITS_PER_WORD)
7656 word = byte / UNITS_PER_WORD;
7657 if (WORDS_BIG_ENDIAN)
7658 word = (words - 1) - word;
7659 offset = word * UNITS_PER_WORD;
7660 if (BYTES_BIG_ENDIAN)
7661 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7662 else
7663 offset += byte % UNITS_PER_WORD;
7665 else
7666 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7667 if (offset >= off && offset - off < len)
7668 ptr[offset - off] = value;
7670 return MIN (len, total_bytes - off);
7674 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7675 specified by EXPR into the buffer PTR of length LEN bytes.
7676 Return the number of bytes placed in the buffer, or zero
7677 upon failure. */
7679 static int
7680 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7682 tree type = TREE_TYPE (expr);
7683 scalar_mode mode = SCALAR_TYPE_MODE (type);
7684 int total_bytes = GET_MODE_SIZE (mode);
7685 FIXED_VALUE_TYPE value;
7686 tree i_value, i_type;
7688 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7689 return 0;
7691 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7693 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7694 return 0;
7696 value = TREE_FIXED_CST (expr);
7697 i_value = double_int_to_tree (i_type, value.data);
7699 return native_encode_int (i_value, ptr, len, off);
7703 /* Subroutine of native_encode_expr. Encode the REAL_CST
7704 specified by EXPR into the buffer PTR of length LEN bytes.
7705 Return the number of bytes placed in the buffer, or zero
7706 upon failure. */
7708 static int
7709 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7711 tree type = TREE_TYPE (expr);
7712 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7713 int byte, offset, word, words, bitpos;
7714 unsigned char value;
7716 /* There are always 32 bits in each long, no matter the size of
7717 the hosts long. We handle floating point representations with
7718 up to 192 bits. */
7719 long tmp[6];
7721 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7722 return 0;
7723 if (off == -1)
7724 off = 0;
7726 if (ptr == NULL)
7727 /* Dry run. */
7728 return MIN (len, total_bytes - off);
7730 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7732 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7734 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7735 bitpos += BITS_PER_UNIT)
7737 byte = (bitpos / BITS_PER_UNIT) & 3;
7738 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7740 if (UNITS_PER_WORD < 4)
7742 word = byte / UNITS_PER_WORD;
7743 if (WORDS_BIG_ENDIAN)
7744 word = (words - 1) - word;
7745 offset = word * UNITS_PER_WORD;
7746 if (BYTES_BIG_ENDIAN)
7747 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7748 else
7749 offset += byte % UNITS_PER_WORD;
7751 else
7753 offset = byte;
7754 if (BYTES_BIG_ENDIAN)
7756 /* Reverse bytes within each long, or within the entire float
7757 if it's smaller than a long (for HFmode). */
7758 offset = MIN (3, total_bytes - 1) - offset;
7759 gcc_assert (offset >= 0);
7762 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7763 if (offset >= off
7764 && offset - off < len)
7765 ptr[offset - off] = value;
7767 return MIN (len, total_bytes - off);
7770 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7771 specified by EXPR into the buffer PTR of length LEN bytes.
7772 Return the number of bytes placed in the buffer, or zero
7773 upon failure. */
7775 static int
7776 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7778 int rsize, isize;
7779 tree part;
7781 part = TREE_REALPART (expr);
7782 rsize = native_encode_expr (part, ptr, len, off);
7783 if (off == -1 && rsize == 0)
7784 return 0;
7785 part = TREE_IMAGPART (expr);
7786 if (off != -1)
7787 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7788 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7789 len - rsize, off);
7790 if (off == -1 && isize != rsize)
7791 return 0;
7792 return rsize + isize;
7795 /* Like native_encode_vector, but only encode the first COUNT elements.
7796 The other arguments are as for native_encode_vector. */
7798 static int
7799 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7800 int off, unsigned HOST_WIDE_INT count)
7802 tree itype = TREE_TYPE (TREE_TYPE (expr));
7803 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7804 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7806 /* This is the only case in which elements can be smaller than a byte.
7807 Element 0 is always in the lsb of the containing byte. */
7808 unsigned int elt_bits = TYPE_PRECISION (itype);
7809 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7810 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7811 return 0;
7813 if (off == -1)
7814 off = 0;
7816 /* Zero the buffer and then set bits later where necessary. */
7817 int extract_bytes = MIN (len, total_bytes - off);
7818 if (ptr)
7819 memset (ptr, 0, extract_bytes);
7821 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7822 unsigned int first_elt = off * elts_per_byte;
7823 unsigned int extract_elts = extract_bytes * elts_per_byte;
7824 for (unsigned int i = 0; i < extract_elts; ++i)
7826 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7827 if (TREE_CODE (elt) != INTEGER_CST)
7828 return 0;
7830 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7832 unsigned int bit = i * elt_bits;
7833 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7836 return extract_bytes;
7839 int offset = 0;
7840 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7841 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7843 if (off >= size)
7845 off -= size;
7846 continue;
7848 tree elem = VECTOR_CST_ELT (expr, i);
7849 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7850 len - offset, off);
7851 if ((off == -1 && res != size) || res == 0)
7852 return 0;
7853 offset += res;
7854 if (offset >= len)
7855 return (off == -1 && i < count - 1) ? 0 : offset;
7856 if (off != -1)
7857 off = 0;
7859 return offset;
7862 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7863 specified by EXPR into the buffer PTR of length LEN bytes.
7864 Return the number of bytes placed in the buffer, or zero
7865 upon failure. */
7867 static int
7868 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7870 unsigned HOST_WIDE_INT count;
7871 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7872 return 0;
7873 return native_encode_vector_part (expr, ptr, len, off, count);
7877 /* Subroutine of native_encode_expr. Encode the STRING_CST
7878 specified by EXPR into the buffer PTR of length LEN bytes.
7879 Return the number of bytes placed in the buffer, or zero
7880 upon failure. */
7882 static int
7883 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7885 tree type = TREE_TYPE (expr);
7887 /* Wide-char strings are encoded in target byte-order so native
7888 encoding them is trivial. */
7889 if (BITS_PER_UNIT != CHAR_BIT
7890 || TREE_CODE (type) != ARRAY_TYPE
7891 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7892 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7893 return 0;
7895 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7896 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7897 return 0;
7898 if (off == -1)
7899 off = 0;
7900 len = MIN (total_bytes - off, len);
7901 if (ptr == NULL)
7902 /* Dry run. */;
7903 else
7905 int written = 0;
7906 if (off < TREE_STRING_LENGTH (expr))
7908 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7909 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7911 memset (ptr + written, 0, len - written);
7913 return len;
7917 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7918 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7919 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7920 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7921 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7922 Otherwise, start at byte offset OFF and encode at most LEN bytes.
7923 Return the number of bytes placed in the buffer, or zero upon failure. */
7926 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7928 /* We don't support starting at negative offset and -1 is special. */
7929 if (off < -1)
7930 return 0;
7932 switch (TREE_CODE (expr))
7934 case INTEGER_CST:
7935 return native_encode_int (expr, ptr, len, off);
7937 case REAL_CST:
7938 return native_encode_real (expr, ptr, len, off);
7940 case FIXED_CST:
7941 return native_encode_fixed (expr, ptr, len, off);
7943 case COMPLEX_CST:
7944 return native_encode_complex (expr, ptr, len, off);
7946 case VECTOR_CST:
7947 return native_encode_vector (expr, ptr, len, off);
7949 case STRING_CST:
7950 return native_encode_string (expr, ptr, len, off);
7952 default:
7953 return 0;
7957 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7958 NON_LVALUE_EXPRs and nops. */
7961 native_encode_initializer (tree init, unsigned char *ptr, int len,
7962 int off)
7964 /* We don't support starting at negative offset and -1 is special. */
7965 if (off < -1 || init == NULL_TREE)
7966 return 0;
7968 STRIP_NOPS (init);
7969 switch (TREE_CODE (init))
7971 case VIEW_CONVERT_EXPR:
7972 case NON_LVALUE_EXPR:
7973 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
7974 default:
7975 return native_encode_expr (init, ptr, len, off);
7976 case CONSTRUCTOR:
7977 tree type = TREE_TYPE (init);
7978 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7979 if (total_bytes < 0)
7980 return 0;
7981 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7982 return 0;
7983 int o = off == -1 ? 0 : off;
7984 if (TREE_CODE (type) == ARRAY_TYPE)
7986 HOST_WIDE_INT min_index;
7987 unsigned HOST_WIDE_INT cnt;
7988 HOST_WIDE_INT curpos = 0, fieldsize;
7989 constructor_elt *ce;
7991 if (TYPE_DOMAIN (type) == NULL_TREE
7992 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
7993 return 0;
7995 fieldsize = int_size_in_bytes (TREE_TYPE (type));
7996 if (fieldsize <= 0)
7997 return 0;
7999 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
8000 if (ptr != NULL)
8001 memset (ptr, '\0', MIN (total_bytes - off, len));
8003 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8005 tree val = ce->value;
8006 tree index = ce->index;
8007 HOST_WIDE_INT pos = curpos, count = 0;
8008 bool full = false;
8009 if (index && TREE_CODE (index) == RANGE_EXPR)
8011 if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
8012 || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
8013 return 0;
8014 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
8015 * fieldsize;
8016 count = (tree_to_shwi (TREE_OPERAND (index, 1))
8017 - tree_to_shwi (TREE_OPERAND (index, 0)));
8019 else if (index)
8021 if (!tree_fits_shwi_p (index))
8022 return 0;
8023 pos = (tree_to_shwi (index) - min_index) * fieldsize;
8026 curpos = pos;
8027 if (val)
8030 if (off == -1
8031 || (curpos >= off
8032 && (curpos + fieldsize
8033 <= (HOST_WIDE_INT) off + len)))
8035 if (full)
8037 if (ptr)
8038 memcpy (ptr + (curpos - o), ptr + (pos - o),
8039 fieldsize);
8041 else if (!native_encode_initializer (val,
8043 ? ptr + curpos - o
8044 : NULL,
8045 fieldsize,
8046 off == -1 ? -1
8047 : 0))
8048 return 0;
8049 else
8051 full = true;
8052 pos = curpos;
8055 else if (curpos + fieldsize > off
8056 && curpos < (HOST_WIDE_INT) off + len)
8058 /* Partial overlap. */
8059 unsigned char *p = NULL;
8060 int no = 0;
8061 int l;
8062 if (curpos >= off)
8064 if (ptr)
8065 p = ptr + curpos - off;
8066 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8067 fieldsize);
8069 else
8071 p = ptr;
8072 no = off - curpos;
8073 l = len;
8075 if (!native_encode_initializer (val, p, l, no))
8076 return 0;
8078 curpos += fieldsize;
8080 while (count-- != 0);
8082 return MIN (total_bytes - off, len);
8084 else if (TREE_CODE (type) == RECORD_TYPE
8085 || TREE_CODE (type) == UNION_TYPE)
8087 unsigned HOST_WIDE_INT cnt;
8088 constructor_elt *ce;
8090 if (ptr != NULL)
8091 memset (ptr, '\0', MIN (total_bytes - off, len));
8092 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8094 tree field = ce->index;
8095 tree val = ce->value;
8096 HOST_WIDE_INT pos, fieldsize;
8097 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8099 if (field == NULL_TREE)
8100 return 0;
8102 pos = int_byte_position (field);
8103 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8104 continue;
8106 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8107 && TYPE_DOMAIN (TREE_TYPE (field))
8108 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8109 return 0;
8110 if (DECL_SIZE_UNIT (field) == NULL_TREE
8111 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8112 return 0;
8113 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8114 if (fieldsize == 0)
8115 continue;
8117 if (DECL_BIT_FIELD (field))
8119 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8120 return 0;
8121 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8122 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8123 if (bpos % BITS_PER_UNIT)
8124 bpos %= BITS_PER_UNIT;
8125 else
8126 bpos = 0;
8127 fieldsize += bpos;
8128 epos = fieldsize % BITS_PER_UNIT;
8129 fieldsize += BITS_PER_UNIT - 1;
8130 fieldsize /= BITS_PER_UNIT;
8133 if (off != -1 && pos + fieldsize <= off)
8134 continue;
8136 if (val == NULL_TREE)
8137 continue;
8139 if (DECL_BIT_FIELD (field))
8141 /* FIXME: Handle PDP endian. */
8142 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8143 return 0;
8145 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8146 if (repr == NULL_TREE
8147 || TREE_CODE (val) != INTEGER_CST
8148 || !INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8149 return 0;
8151 HOST_WIDE_INT rpos = int_byte_position (repr);
8152 if (rpos > pos)
8153 return 0;
8154 wide_int w = wi::to_wide (val,
8155 TYPE_PRECISION (TREE_TYPE (repr)));
8156 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8157 - TYPE_PRECISION (TREE_TYPE (field)));
8158 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8159 if (!BYTES_BIG_ENDIAN)
8160 w = wi::lshift (w, bitoff);
8161 else
8162 w = wi::lshift (w, diff - bitoff);
8163 val = wide_int_to_tree (TREE_TYPE (repr), w);
8165 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8166 / BITS_PER_UNIT + 1];
8167 int l = native_encode_int (val, buf, sizeof buf, 0);
8168 if (l * BITS_PER_UNIT != TYPE_PRECISION (TREE_TYPE (repr)))
8169 return 0;
8171 if (ptr == NULL)
8172 continue;
8174 /* If the bitfield does not start at byte boundary, handle
8175 the partial byte at the start. */
8176 if (bpos
8177 && (off == -1 || (pos >= off && len >= 1)))
8179 if (!BYTES_BIG_ENDIAN)
8181 int mask = (1 << bpos) - 1;
8182 buf[pos - rpos] &= ~mask;
8183 buf[pos - rpos] |= ptr[pos - o] & mask;
8185 else
8187 int mask = (1 << (BITS_PER_UNIT - bpos)) - 1;
8188 buf[pos - rpos] &= mask;
8189 buf[pos - rpos] |= ptr[pos - o] & ~mask;
8192 /* If the bitfield does not end at byte boundary, handle
8193 the partial byte at the end. */
8194 if (epos
8195 && (off == -1
8196 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8198 if (!BYTES_BIG_ENDIAN)
8200 int mask = (1 << epos) - 1;
8201 buf[pos - rpos + fieldsize - 1] &= mask;
8202 buf[pos - rpos + fieldsize - 1]
8203 |= ptr[pos + fieldsize - 1 - o] & ~mask;
8205 else
8207 int mask = (1 << (BITS_PER_UNIT - epos)) - 1;
8208 buf[pos - rpos + fieldsize - 1] &= ~mask;
8209 buf[pos - rpos + fieldsize - 1]
8210 |= ptr[pos + fieldsize - 1 - o] & mask;
8213 if (off == -1
8214 || (pos >= off
8215 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8216 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8217 else
8219 /* Partial overlap. */
8220 HOST_WIDE_INT fsz = fieldsize;
8221 if (pos < off)
8223 fsz -= (off - pos);
8224 pos = off;
8226 if (pos + fsz > (HOST_WIDE_INT) off + len)
8227 fsz = (HOST_WIDE_INT) off + len - pos;
8228 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8230 continue;
8233 if (off == -1
8234 || (pos >= off
8235 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8237 if (!native_encode_initializer (val, ptr ? ptr + pos - o
8238 : NULL,
8239 fieldsize,
8240 off == -1 ? -1 : 0))
8241 return 0;
8243 else
8245 /* Partial overlap. */
8246 unsigned char *p = NULL;
8247 int no = 0;
8248 int l;
8249 if (pos >= off)
8251 if (ptr)
8252 p = ptr + pos - off;
8253 l = MIN ((HOST_WIDE_INT) off + len - pos,
8254 fieldsize);
8256 else
8258 p = ptr;
8259 no = off - pos;
8260 l = len;
8262 if (!native_encode_initializer (val, p, l, no))
8263 return 0;
8266 return MIN (total_bytes - off, len);
8268 return 0;
8273 /* Subroutine of native_interpret_expr. Interpret the contents of
8274 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8275 If the buffer cannot be interpreted, return NULL_TREE. */
8277 static tree
8278 native_interpret_int (tree type, const unsigned char *ptr, int len)
8280 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8282 if (total_bytes > len
8283 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8284 return NULL_TREE;
8286 wide_int result = wi::from_buffer (ptr, total_bytes);
8288 return wide_int_to_tree (type, result);
8292 /* Subroutine of native_interpret_expr. Interpret the contents of
8293 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8294 If the buffer cannot be interpreted, return NULL_TREE. */
8296 static tree
8297 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8299 scalar_mode mode = SCALAR_TYPE_MODE (type);
8300 int total_bytes = GET_MODE_SIZE (mode);
8301 double_int result;
8302 FIXED_VALUE_TYPE fixed_value;
8304 if (total_bytes > len
8305 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8306 return NULL_TREE;
8308 result = double_int::from_buffer (ptr, total_bytes);
8309 fixed_value = fixed_from_double_int (result, mode);
8311 return build_fixed (type, fixed_value);
8315 /* Subroutine of native_interpret_expr. Interpret the contents of
8316 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8317 If the buffer cannot be interpreted, return NULL_TREE. */
8319 static tree
8320 native_interpret_real (tree type, const unsigned char *ptr, int len)
8322 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8323 int total_bytes = GET_MODE_SIZE (mode);
8324 unsigned char value;
8325 /* There are always 32 bits in each long, no matter the size of
8326 the hosts long. We handle floating point representations with
8327 up to 192 bits. */
8328 REAL_VALUE_TYPE r;
8329 long tmp[6];
8331 if (total_bytes > len || total_bytes > 24)
8332 return NULL_TREE;
8333 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8335 memset (tmp, 0, sizeof (tmp));
8336 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8337 bitpos += BITS_PER_UNIT)
8339 /* Both OFFSET and BYTE index within a long;
8340 bitpos indexes the whole float. */
8341 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8342 if (UNITS_PER_WORD < 4)
8344 int word = byte / UNITS_PER_WORD;
8345 if (WORDS_BIG_ENDIAN)
8346 word = (words - 1) - word;
8347 offset = word * UNITS_PER_WORD;
8348 if (BYTES_BIG_ENDIAN)
8349 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8350 else
8351 offset += byte % UNITS_PER_WORD;
8353 else
8355 offset = byte;
8356 if (BYTES_BIG_ENDIAN)
8358 /* Reverse bytes within each long, or within the entire float
8359 if it's smaller than a long (for HFmode). */
8360 offset = MIN (3, total_bytes - 1) - offset;
8361 gcc_assert (offset >= 0);
8364 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8366 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8369 real_from_target (&r, tmp, mode);
8370 tree ret = build_real (type, r);
8371 if (MODE_COMPOSITE_P (mode))
8373 /* For floating point values in composite modes, punt if this folding
8374 doesn't preserve bit representation. As the mode doesn't have fixed
8375 precision while GCC pretends it does, there could be valid values that
8376 GCC can't really represent accurately. See PR95450. */
8377 unsigned char buf[24];
8378 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8379 || memcmp (ptr, buf, total_bytes) != 0)
8380 ret = NULL_TREE;
8382 return ret;
8386 /* Subroutine of native_interpret_expr. Interpret the contents of
8387 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8388 If the buffer cannot be interpreted, return NULL_TREE. */
8390 static tree
8391 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8393 tree etype, rpart, ipart;
8394 int size;
8396 etype = TREE_TYPE (type);
8397 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8398 if (size * 2 > len)
8399 return NULL_TREE;
8400 rpart = native_interpret_expr (etype, ptr, size);
8401 if (!rpart)
8402 return NULL_TREE;
8403 ipart = native_interpret_expr (etype, ptr+size, size);
8404 if (!ipart)
8405 return NULL_TREE;
8406 return build_complex (type, rpart, ipart);
8409 /* Read a vector of type TYPE from the target memory image given by BYTES,
8410 which contains LEN bytes. The vector is known to be encodable using
8411 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8413 Return the vector on success, otherwise return null. */
8415 static tree
8416 native_interpret_vector_part (tree type, const unsigned char *bytes,
8417 unsigned int len, unsigned int npatterns,
8418 unsigned int nelts_per_pattern)
8420 tree elt_type = TREE_TYPE (type);
8421 if (VECTOR_BOOLEAN_TYPE_P (type)
8422 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8424 /* This is the only case in which elements can be smaller than a byte.
8425 Element 0 is always in the lsb of the containing byte. */
8426 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8427 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8428 return NULL_TREE;
8430 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8431 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8433 unsigned int bit_index = i * elt_bits;
8434 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8435 unsigned int lsb = bit_index % BITS_PER_UNIT;
8436 builder.quick_push (bytes[byte_index] & (1 << lsb)
8437 ? build_all_ones_cst (elt_type)
8438 : build_zero_cst (elt_type));
8440 return builder.build ();
8443 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8444 if (elt_bytes * npatterns * nelts_per_pattern > len)
8445 return NULL_TREE;
8447 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8448 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8450 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8451 if (!elt)
8452 return NULL_TREE;
8453 builder.quick_push (elt);
8454 bytes += elt_bytes;
8456 return builder.build ();
8459 /* Subroutine of native_interpret_expr. Interpret the contents of
8460 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8461 If the buffer cannot be interpreted, return NULL_TREE. */
8463 static tree
8464 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8466 tree etype;
8467 unsigned int size;
8468 unsigned HOST_WIDE_INT count;
8470 etype = TREE_TYPE (type);
8471 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8472 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8473 || size * count > len)
8474 return NULL_TREE;
8476 return native_interpret_vector_part (type, ptr, len, count, 1);
8480 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8481 the buffer PTR of length LEN as a constant of type TYPE. For
8482 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8483 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8484 return NULL_TREE. */
8486 tree
8487 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8489 switch (TREE_CODE (type))
8491 case INTEGER_TYPE:
8492 case ENUMERAL_TYPE:
8493 case BOOLEAN_TYPE:
8494 case POINTER_TYPE:
8495 case REFERENCE_TYPE:
8496 return native_interpret_int (type, ptr, len);
8498 case REAL_TYPE:
8499 return native_interpret_real (type, ptr, len);
8501 case FIXED_POINT_TYPE:
8502 return native_interpret_fixed (type, ptr, len);
8504 case COMPLEX_TYPE:
8505 return native_interpret_complex (type, ptr, len);
8507 case VECTOR_TYPE:
8508 return native_interpret_vector (type, ptr, len);
8510 default:
8511 return NULL_TREE;
8515 /* Returns true if we can interpret the contents of a native encoding
8516 as TYPE. */
8518 bool
8519 can_native_interpret_type_p (tree type)
8521 switch (TREE_CODE (type))
8523 case INTEGER_TYPE:
8524 case ENUMERAL_TYPE:
8525 case BOOLEAN_TYPE:
8526 case POINTER_TYPE:
8527 case REFERENCE_TYPE:
8528 case FIXED_POINT_TYPE:
8529 case REAL_TYPE:
8530 case COMPLEX_TYPE:
8531 case VECTOR_TYPE:
8532 return true;
8533 default:
8534 return false;
8538 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8539 or extracted constant positions and/or sizes aren't byte aligned. */
8541 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8542 bits between adjacent elements. AMNT should be within
8543 [0, BITS_PER_UNIT).
8544 Example, AMNT = 2:
8545 00011111|11100000 << 2 = 01111111|10000000
8546 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8548 void
8549 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8550 unsigned int amnt)
8552 if (amnt == 0)
8553 return;
8555 unsigned char carry_over = 0U;
8556 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8557 unsigned char clear_mask = (~0U) << amnt;
8559 for (unsigned int i = 0; i < sz; i++)
8561 unsigned prev_carry_over = carry_over;
8562 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8564 ptr[i] <<= amnt;
8565 if (i != 0)
8567 ptr[i] &= clear_mask;
8568 ptr[i] |= prev_carry_over;
8573 /* Like shift_bytes_in_array_left but for big-endian.
8574 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8575 bits between adjacent elements. AMNT should be within
8576 [0, BITS_PER_UNIT).
8577 Example, AMNT = 2:
8578 00011111|11100000 >> 2 = 00000111|11111000
8579 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8581 void
8582 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8583 unsigned int amnt)
8585 if (amnt == 0)
8586 return;
8588 unsigned char carry_over = 0U;
8589 unsigned char carry_mask = ~(~0U << amnt);
8591 for (unsigned int i = 0; i < sz; i++)
8593 unsigned prev_carry_over = carry_over;
8594 carry_over = ptr[i] & carry_mask;
8596 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8597 ptr[i] >>= amnt;
8598 ptr[i] |= prev_carry_over;
8602 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8603 directly on the VECTOR_CST encoding, in a way that works for variable-
8604 length vectors. Return the resulting VECTOR_CST on success or null
8605 on failure. */
8607 static tree
8608 fold_view_convert_vector_encoding (tree type, tree expr)
8610 tree expr_type = TREE_TYPE (expr);
8611 poly_uint64 type_bits, expr_bits;
8612 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8613 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8614 return NULL_TREE;
8616 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8617 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8618 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8619 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8621 /* We can only preserve the semantics of a stepped pattern if the new
8622 vector element is an integer of the same size. */
8623 if (VECTOR_CST_STEPPED_P (expr)
8624 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8625 return NULL_TREE;
8627 /* The number of bits needed to encode one element from every pattern
8628 of the original vector. */
8629 unsigned int expr_sequence_bits
8630 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8632 /* The number of bits needed to encode one element from every pattern
8633 of the result. */
8634 unsigned int type_sequence_bits
8635 = least_common_multiple (expr_sequence_bits, type_elt_bits);
8637 /* Don't try to read more bytes than are available, which can happen
8638 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8639 The general VIEW_CONVERT handling can cope with that case, so there's
8640 no point complicating things here. */
8641 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8642 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8643 BITS_PER_UNIT);
8644 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8645 if (known_gt (buffer_bits, expr_bits))
8646 return NULL_TREE;
8648 /* Get enough bytes of EXPR to form the new encoding. */
8649 auto_vec<unsigned char, 128> buffer (buffer_bytes);
8650 buffer.quick_grow (buffer_bytes);
8651 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8652 buffer_bits / expr_elt_bits)
8653 != (int) buffer_bytes)
8654 return NULL_TREE;
8656 /* Reencode the bytes as TYPE. */
8657 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8658 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8659 type_npatterns, nelts_per_pattern);
8662 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8663 TYPE at compile-time. If we're unable to perform the conversion
8664 return NULL_TREE. */
8666 static tree
8667 fold_view_convert_expr (tree type, tree expr)
8669 /* We support up to 512-bit values (for V8DFmode). */
8670 unsigned char buffer[64];
8671 int len;
8673 /* Check that the host and target are sane. */
8674 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8675 return NULL_TREE;
8677 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8678 if (tree res = fold_view_convert_vector_encoding (type, expr))
8679 return res;
8681 len = native_encode_expr (expr, buffer, sizeof (buffer));
8682 if (len == 0)
8683 return NULL_TREE;
8685 return native_interpret_expr (type, buffer, len);
8688 /* Build an expression for the address of T. Folds away INDIRECT_REF
8689 to avoid confusing the gimplify process. */
8691 tree
8692 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8694 /* The size of the object is not relevant when talking about its address. */
8695 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8696 t = TREE_OPERAND (t, 0);
8698 if (TREE_CODE (t) == INDIRECT_REF)
8700 t = TREE_OPERAND (t, 0);
8702 if (TREE_TYPE (t) != ptrtype)
8703 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8705 else if (TREE_CODE (t) == MEM_REF
8706 && integer_zerop (TREE_OPERAND (t, 1)))
8708 t = TREE_OPERAND (t, 0);
8710 if (TREE_TYPE (t) != ptrtype)
8711 t = fold_convert_loc (loc, ptrtype, t);
8713 else if (TREE_CODE (t) == MEM_REF
8714 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8715 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8716 TREE_OPERAND (t, 0),
8717 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8718 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8720 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8722 if (TREE_TYPE (t) != ptrtype)
8723 t = fold_convert_loc (loc, ptrtype, t);
8725 else
8726 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8728 return t;
8731 /* Build an expression for the address of T. */
8733 tree
8734 build_fold_addr_expr_loc (location_t loc, tree t)
8736 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8738 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8741 /* Fold a unary expression of code CODE and type TYPE with operand
8742 OP0. Return the folded expression if folding is successful.
8743 Otherwise, return NULL_TREE. */
8745 tree
8746 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8748 tree tem;
8749 tree arg0;
8750 enum tree_code_class kind = TREE_CODE_CLASS (code);
8752 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8753 && TREE_CODE_LENGTH (code) == 1);
8755 arg0 = op0;
8756 if (arg0)
8758 if (CONVERT_EXPR_CODE_P (code)
8759 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8761 /* Don't use STRIP_NOPS, because signedness of argument type
8762 matters. */
8763 STRIP_SIGN_NOPS (arg0);
8765 else
8767 /* Strip any conversions that don't change the mode. This
8768 is safe for every expression, except for a comparison
8769 expression because its signedness is derived from its
8770 operands.
8772 Note that this is done as an internal manipulation within
8773 the constant folder, in order to find the simplest
8774 representation of the arguments so that their form can be
8775 studied. In any cases, the appropriate type conversions
8776 should be put back in the tree that will get out of the
8777 constant folder. */
8778 STRIP_NOPS (arg0);
8781 if (CONSTANT_CLASS_P (arg0))
8783 tree tem = const_unop (code, type, arg0);
8784 if (tem)
8786 if (TREE_TYPE (tem) != type)
8787 tem = fold_convert_loc (loc, type, tem);
8788 return tem;
8793 tem = generic_simplify (loc, code, type, op0);
8794 if (tem)
8795 return tem;
8797 if (TREE_CODE_CLASS (code) == tcc_unary)
8799 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8800 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8801 fold_build1_loc (loc, code, type,
8802 fold_convert_loc (loc, TREE_TYPE (op0),
8803 TREE_OPERAND (arg0, 1))));
8804 else if (TREE_CODE (arg0) == COND_EXPR)
8806 tree arg01 = TREE_OPERAND (arg0, 1);
8807 tree arg02 = TREE_OPERAND (arg0, 2);
8808 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8809 arg01 = fold_build1_loc (loc, code, type,
8810 fold_convert_loc (loc,
8811 TREE_TYPE (op0), arg01));
8812 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8813 arg02 = fold_build1_loc (loc, code, type,
8814 fold_convert_loc (loc,
8815 TREE_TYPE (op0), arg02));
8816 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8817 arg01, arg02);
8819 /* If this was a conversion, and all we did was to move into
8820 inside the COND_EXPR, bring it back out. But leave it if
8821 it is a conversion from integer to integer and the
8822 result precision is no wider than a word since such a
8823 conversion is cheap and may be optimized away by combine,
8824 while it couldn't if it were outside the COND_EXPR. Then return
8825 so we don't get into an infinite recursion loop taking the
8826 conversion out and then back in. */
8828 if ((CONVERT_EXPR_CODE_P (code)
8829 || code == NON_LVALUE_EXPR)
8830 && TREE_CODE (tem) == COND_EXPR
8831 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8832 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8833 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8834 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8835 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8836 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8837 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8838 && (INTEGRAL_TYPE_P
8839 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8840 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8841 || flag_syntax_only))
8842 tem = build1_loc (loc, code, type,
8843 build3 (COND_EXPR,
8844 TREE_TYPE (TREE_OPERAND
8845 (TREE_OPERAND (tem, 1), 0)),
8846 TREE_OPERAND (tem, 0),
8847 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8848 TREE_OPERAND (TREE_OPERAND (tem, 2),
8849 0)));
8850 return tem;
8854 switch (code)
8856 case NON_LVALUE_EXPR:
8857 if (!maybe_lvalue_p (op0))
8858 return fold_convert_loc (loc, type, op0);
8859 return NULL_TREE;
8861 CASE_CONVERT:
8862 case FLOAT_EXPR:
8863 case FIX_TRUNC_EXPR:
8864 if (COMPARISON_CLASS_P (op0))
8866 /* If we have (type) (a CMP b) and type is an integral type, return
8867 new expression involving the new type. Canonicalize
8868 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8869 non-integral type.
8870 Do not fold the result as that would not simplify further, also
8871 folding again results in recursions. */
8872 if (TREE_CODE (type) == BOOLEAN_TYPE)
8873 return build2_loc (loc, TREE_CODE (op0), type,
8874 TREE_OPERAND (op0, 0),
8875 TREE_OPERAND (op0, 1));
8876 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8877 && TREE_CODE (type) != VECTOR_TYPE)
8878 return build3_loc (loc, COND_EXPR, type, op0,
8879 constant_boolean_node (true, type),
8880 constant_boolean_node (false, type));
8883 /* Handle (T *)&A.B.C for A being of type T and B and C
8884 living at offset zero. This occurs frequently in
8885 C++ upcasting and then accessing the base. */
8886 if (TREE_CODE (op0) == ADDR_EXPR
8887 && POINTER_TYPE_P (type)
8888 && handled_component_p (TREE_OPERAND (op0, 0)))
8890 poly_int64 bitsize, bitpos;
8891 tree offset;
8892 machine_mode mode;
8893 int unsignedp, reversep, volatilep;
8894 tree base
8895 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8896 &offset, &mode, &unsignedp, &reversep,
8897 &volatilep);
8898 /* If the reference was to a (constant) zero offset, we can use
8899 the address of the base if it has the same base type
8900 as the result type and the pointer type is unqualified. */
8901 if (!offset
8902 && known_eq (bitpos, 0)
8903 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8904 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8905 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8906 return fold_convert_loc (loc, type,
8907 build_fold_addr_expr_loc (loc, base));
8910 if (TREE_CODE (op0) == MODIFY_EXPR
8911 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8912 /* Detect assigning a bitfield. */
8913 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8914 && DECL_BIT_FIELD
8915 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8917 /* Don't leave an assignment inside a conversion
8918 unless assigning a bitfield. */
8919 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8920 /* First do the assignment, then return converted constant. */
8921 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8922 TREE_NO_WARNING (tem) = 1;
8923 TREE_USED (tem) = 1;
8924 return tem;
8927 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8928 constants (if x has signed type, the sign bit cannot be set
8929 in c). This folds extension into the BIT_AND_EXPR.
8930 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8931 very likely don't have maximal range for their precision and this
8932 transformation effectively doesn't preserve non-maximal ranges. */
8933 if (TREE_CODE (type) == INTEGER_TYPE
8934 && TREE_CODE (op0) == BIT_AND_EXPR
8935 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8937 tree and_expr = op0;
8938 tree and0 = TREE_OPERAND (and_expr, 0);
8939 tree and1 = TREE_OPERAND (and_expr, 1);
8940 int change = 0;
8942 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8943 || (TYPE_PRECISION (type)
8944 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8945 change = 1;
8946 else if (TYPE_PRECISION (TREE_TYPE (and1))
8947 <= HOST_BITS_PER_WIDE_INT
8948 && tree_fits_uhwi_p (and1))
8950 unsigned HOST_WIDE_INT cst;
8952 cst = tree_to_uhwi (and1);
8953 cst &= HOST_WIDE_INT_M1U
8954 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8955 change = (cst == 0);
8956 if (change
8957 && !flag_syntax_only
8958 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8959 == ZERO_EXTEND))
8961 tree uns = unsigned_type_for (TREE_TYPE (and0));
8962 and0 = fold_convert_loc (loc, uns, and0);
8963 and1 = fold_convert_loc (loc, uns, and1);
8966 if (change)
8968 tem = force_fit_type (type, wi::to_widest (and1), 0,
8969 TREE_OVERFLOW (and1));
8970 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8971 fold_convert_loc (loc, type, and0), tem);
8975 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8976 cast (T1)X will fold away. We assume that this happens when X itself
8977 is a cast. */
8978 if (POINTER_TYPE_P (type)
8979 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8980 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8982 tree arg00 = TREE_OPERAND (arg0, 0);
8983 tree arg01 = TREE_OPERAND (arg0, 1);
8985 return fold_build_pointer_plus_loc
8986 (loc, fold_convert_loc (loc, type, arg00), arg01);
8989 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8990 of the same precision, and X is an integer type not narrower than
8991 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8992 if (INTEGRAL_TYPE_P (type)
8993 && TREE_CODE (op0) == BIT_NOT_EXPR
8994 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8995 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8996 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8998 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8999 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9000 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9001 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9002 fold_convert_loc (loc, type, tem));
9005 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9006 type of X and Y (integer types only). */
9007 if (INTEGRAL_TYPE_P (type)
9008 && TREE_CODE (op0) == MULT_EXPR
9009 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9010 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9012 /* Be careful not to introduce new overflows. */
9013 tree mult_type;
9014 if (TYPE_OVERFLOW_WRAPS (type))
9015 mult_type = type;
9016 else
9017 mult_type = unsigned_type_for (type);
9019 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9021 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9022 fold_convert_loc (loc, mult_type,
9023 TREE_OPERAND (op0, 0)),
9024 fold_convert_loc (loc, mult_type,
9025 TREE_OPERAND (op0, 1)));
9026 return fold_convert_loc (loc, type, tem);
9030 return NULL_TREE;
9032 case VIEW_CONVERT_EXPR:
9033 if (TREE_CODE (op0) == MEM_REF)
9035 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9036 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9037 tem = fold_build2_loc (loc, MEM_REF, type,
9038 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9039 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9040 return tem;
9043 return NULL_TREE;
9045 case NEGATE_EXPR:
9046 tem = fold_negate_expr (loc, arg0);
9047 if (tem)
9048 return fold_convert_loc (loc, type, tem);
9049 return NULL_TREE;
9051 case ABS_EXPR:
9052 /* Convert fabs((double)float) into (double)fabsf(float). */
9053 if (TREE_CODE (arg0) == NOP_EXPR
9054 && TREE_CODE (type) == REAL_TYPE)
9056 tree targ0 = strip_float_extensions (arg0);
9057 if (targ0 != arg0)
9058 return fold_convert_loc (loc, type,
9059 fold_build1_loc (loc, ABS_EXPR,
9060 TREE_TYPE (targ0),
9061 targ0));
9063 return NULL_TREE;
9065 case BIT_NOT_EXPR:
9066 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9067 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9068 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9069 fold_convert_loc (loc, type,
9070 TREE_OPERAND (arg0, 0)))))
9071 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9072 fold_convert_loc (loc, type,
9073 TREE_OPERAND (arg0, 1)));
9074 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9075 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9076 fold_convert_loc (loc, type,
9077 TREE_OPERAND (arg0, 1)))))
9078 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9079 fold_convert_loc (loc, type,
9080 TREE_OPERAND (arg0, 0)), tem);
9082 return NULL_TREE;
9084 case TRUTH_NOT_EXPR:
9085 /* Note that the operand of this must be an int
9086 and its values must be 0 or 1.
9087 ("true" is a fixed value perhaps depending on the language,
9088 but we don't handle values other than 1 correctly yet.) */
9089 tem = fold_truth_not_expr (loc, arg0);
9090 if (!tem)
9091 return NULL_TREE;
9092 return fold_convert_loc (loc, type, tem);
9094 case INDIRECT_REF:
9095 /* Fold *&X to X if X is an lvalue. */
9096 if (TREE_CODE (op0) == ADDR_EXPR)
9098 tree op00 = TREE_OPERAND (op0, 0);
9099 if ((VAR_P (op00)
9100 || TREE_CODE (op00) == PARM_DECL
9101 || TREE_CODE (op00) == RESULT_DECL)
9102 && !TREE_READONLY (op00))
9103 return op00;
9105 return NULL_TREE;
9107 default:
9108 return NULL_TREE;
9109 } /* switch (code) */
9113 /* If the operation was a conversion do _not_ mark a resulting constant
9114 with TREE_OVERFLOW if the original constant was not. These conversions
9115 have implementation defined behavior and retaining the TREE_OVERFLOW
9116 flag here would confuse later passes such as VRP. */
9117 tree
9118 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9119 tree type, tree op0)
9121 tree res = fold_unary_loc (loc, code, type, op0);
9122 if (res
9123 && TREE_CODE (res) == INTEGER_CST
9124 && TREE_CODE (op0) == INTEGER_CST
9125 && CONVERT_EXPR_CODE_P (code))
9126 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9128 return res;
9131 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9132 operands OP0 and OP1. LOC is the location of the resulting expression.
9133 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9134 Return the folded expression if folding is successful. Otherwise,
9135 return NULL_TREE. */
9136 static tree
9137 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9138 tree arg0, tree arg1, tree op0, tree op1)
9140 tree tem;
9142 /* We only do these simplifications if we are optimizing. */
9143 if (!optimize)
9144 return NULL_TREE;
9146 /* Check for things like (A || B) && (A || C). We can convert this
9147 to A || (B && C). Note that either operator can be any of the four
9148 truth and/or operations and the transformation will still be
9149 valid. Also note that we only care about order for the
9150 ANDIF and ORIF operators. If B contains side effects, this
9151 might change the truth-value of A. */
9152 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9153 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9154 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9155 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9156 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9157 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9159 tree a00 = TREE_OPERAND (arg0, 0);
9160 tree a01 = TREE_OPERAND (arg0, 1);
9161 tree a10 = TREE_OPERAND (arg1, 0);
9162 tree a11 = TREE_OPERAND (arg1, 1);
9163 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9164 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9165 && (code == TRUTH_AND_EXPR
9166 || code == TRUTH_OR_EXPR));
9168 if (operand_equal_p (a00, a10, 0))
9169 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9170 fold_build2_loc (loc, code, type, a01, a11));
9171 else if (commutative && operand_equal_p (a00, a11, 0))
9172 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9173 fold_build2_loc (loc, code, type, a01, a10));
9174 else if (commutative && operand_equal_p (a01, a10, 0))
9175 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9176 fold_build2_loc (loc, code, type, a00, a11));
9178 /* This case if tricky because we must either have commutative
9179 operators or else A10 must not have side-effects. */
9181 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9182 && operand_equal_p (a01, a11, 0))
9183 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9184 fold_build2_loc (loc, code, type, a00, a10),
9185 a01);
9188 /* See if we can build a range comparison. */
9189 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9190 return tem;
9192 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9193 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9195 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9196 if (tem)
9197 return fold_build2_loc (loc, code, type, tem, arg1);
9200 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9201 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9203 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9204 if (tem)
9205 return fold_build2_loc (loc, code, type, arg0, tem);
9208 /* Check for the possibility of merging component references. If our
9209 lhs is another similar operation, try to merge its rhs with our
9210 rhs. Then try to merge our lhs and rhs. */
9211 if (TREE_CODE (arg0) == code
9212 && (tem = fold_truth_andor_1 (loc, code, type,
9213 TREE_OPERAND (arg0, 1), arg1)) != 0)
9214 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9216 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9217 return tem;
9219 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9220 if (param_logical_op_non_short_circuit != -1)
9221 logical_op_non_short_circuit
9222 = param_logical_op_non_short_circuit;
9223 if (logical_op_non_short_circuit
9224 && !flag_sanitize_coverage
9225 && (code == TRUTH_AND_EXPR
9226 || code == TRUTH_ANDIF_EXPR
9227 || code == TRUTH_OR_EXPR
9228 || code == TRUTH_ORIF_EXPR))
9230 enum tree_code ncode, icode;
9232 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9233 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9234 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9236 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9237 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9238 We don't want to pack more than two leafs to a non-IF AND/OR
9239 expression.
9240 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9241 equal to IF-CODE, then we don't want to add right-hand operand.
9242 If the inner right-hand side of left-hand operand has
9243 side-effects, or isn't simple, then we can't add to it,
9244 as otherwise we might destroy if-sequence. */
9245 if (TREE_CODE (arg0) == icode
9246 && simple_operand_p_2 (arg1)
9247 /* Needed for sequence points to handle trappings, and
9248 side-effects. */
9249 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9251 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9252 arg1);
9253 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9254 tem);
9256 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9257 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9258 else if (TREE_CODE (arg1) == icode
9259 && simple_operand_p_2 (arg0)
9260 /* Needed for sequence points to handle trappings, and
9261 side-effects. */
9262 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9264 tem = fold_build2_loc (loc, ncode, type,
9265 arg0, TREE_OPERAND (arg1, 0));
9266 return fold_build2_loc (loc, icode, type, tem,
9267 TREE_OPERAND (arg1, 1));
9269 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9270 into (A OR B).
9271 For sequence point consistancy, we need to check for trapping,
9272 and side-effects. */
9273 else if (code == icode && simple_operand_p_2 (arg0)
9274 && simple_operand_p_2 (arg1))
9275 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9278 return NULL_TREE;
9281 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9282 by changing CODE to reduce the magnitude of constants involved in
9283 ARG0 of the comparison.
9284 Returns a canonicalized comparison tree if a simplification was
9285 possible, otherwise returns NULL_TREE.
9286 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9287 valid if signed overflow is undefined. */
9289 static tree
9290 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9291 tree arg0, tree arg1,
9292 bool *strict_overflow_p)
9294 enum tree_code code0 = TREE_CODE (arg0);
9295 tree t, cst0 = NULL_TREE;
9296 int sgn0;
9298 /* Match A +- CST code arg1. We can change this only if overflow
9299 is undefined. */
9300 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9301 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9302 /* In principle pointers also have undefined overflow behavior,
9303 but that causes problems elsewhere. */
9304 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9305 && (code0 == MINUS_EXPR
9306 || code0 == PLUS_EXPR)
9307 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9308 return NULL_TREE;
9310 /* Identify the constant in arg0 and its sign. */
9311 cst0 = TREE_OPERAND (arg0, 1);
9312 sgn0 = tree_int_cst_sgn (cst0);
9314 /* Overflowed constants and zero will cause problems. */
9315 if (integer_zerop (cst0)
9316 || TREE_OVERFLOW (cst0))
9317 return NULL_TREE;
9319 /* See if we can reduce the magnitude of the constant in
9320 arg0 by changing the comparison code. */
9321 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9322 if (code == LT_EXPR
9323 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9324 code = LE_EXPR;
9325 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9326 else if (code == GT_EXPR
9327 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9328 code = GE_EXPR;
9329 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9330 else if (code == LE_EXPR
9331 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9332 code = LT_EXPR;
9333 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9334 else if (code == GE_EXPR
9335 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9336 code = GT_EXPR;
9337 else
9338 return NULL_TREE;
9339 *strict_overflow_p = true;
9341 /* Now build the constant reduced in magnitude. But not if that
9342 would produce one outside of its types range. */
9343 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9344 && ((sgn0 == 1
9345 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9346 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9347 || (sgn0 == -1
9348 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9349 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9350 return NULL_TREE;
9352 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9353 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9354 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9355 t = fold_convert (TREE_TYPE (arg1), t);
9357 return fold_build2_loc (loc, code, type, t, arg1);
9360 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9361 overflow further. Try to decrease the magnitude of constants involved
9362 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9363 and put sole constants at the second argument position.
9364 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9366 static tree
9367 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9368 tree arg0, tree arg1)
9370 tree t;
9371 bool strict_overflow_p;
9372 const char * const warnmsg = G_("assuming signed overflow does not occur "
9373 "when reducing constant in comparison");
9375 /* Try canonicalization by simplifying arg0. */
9376 strict_overflow_p = false;
9377 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9378 &strict_overflow_p);
9379 if (t)
9381 if (strict_overflow_p)
9382 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9383 return t;
9386 /* Try canonicalization by simplifying arg1 using the swapped
9387 comparison. */
9388 code = swap_tree_comparison (code);
9389 strict_overflow_p = false;
9390 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9391 &strict_overflow_p);
9392 if (t && strict_overflow_p)
9393 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9394 return t;
9397 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9398 space. This is used to avoid issuing overflow warnings for
9399 expressions like &p->x which cannot wrap. */
9401 static bool
9402 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9404 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9405 return true;
9407 if (maybe_lt (bitpos, 0))
9408 return true;
9410 poly_wide_int wi_offset;
9411 int precision = TYPE_PRECISION (TREE_TYPE (base));
9412 if (offset == NULL_TREE)
9413 wi_offset = wi::zero (precision);
9414 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9415 return true;
9416 else
9417 wi_offset = wi::to_poly_wide (offset);
9419 wi::overflow_type overflow;
9420 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9421 precision);
9422 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9423 if (overflow)
9424 return true;
9426 poly_uint64 total_hwi, size;
9427 if (!total.to_uhwi (&total_hwi)
9428 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9429 &size)
9430 || known_eq (size, 0U))
9431 return true;
9433 if (known_le (total_hwi, size))
9434 return false;
9436 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9437 array. */
9438 if (TREE_CODE (base) == ADDR_EXPR
9439 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9440 &size)
9441 && maybe_ne (size, 0U)
9442 && known_le (total_hwi, size))
9443 return false;
9445 return true;
9448 /* Return a positive integer when the symbol DECL is known to have
9449 a nonzero address, zero when it's known not to (e.g., it's a weak
9450 symbol), and a negative integer when the symbol is not yet in the
9451 symbol table and so whether or not its address is zero is unknown.
9452 For function local objects always return positive integer. */
9453 static int
9454 maybe_nonzero_address (tree decl)
9456 if (DECL_P (decl) && decl_in_symtab_p (decl))
9457 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9458 return symbol->nonzero_address ();
9460 /* Function local objects are never NULL. */
9461 if (DECL_P (decl)
9462 && (DECL_CONTEXT (decl)
9463 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9464 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9465 return 1;
9467 return -1;
9470 /* Subroutine of fold_binary. This routine performs all of the
9471 transformations that are common to the equality/inequality
9472 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9473 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9474 fold_binary should call fold_binary. Fold a comparison with
9475 tree code CODE and type TYPE with operands OP0 and OP1. Return
9476 the folded comparison or NULL_TREE. */
9478 static tree
9479 fold_comparison (location_t loc, enum tree_code code, tree type,
9480 tree op0, tree op1)
9482 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9483 tree arg0, arg1, tem;
9485 arg0 = op0;
9486 arg1 = op1;
9488 STRIP_SIGN_NOPS (arg0);
9489 STRIP_SIGN_NOPS (arg1);
9491 /* For comparisons of pointers we can decompose it to a compile time
9492 comparison of the base objects and the offsets into the object.
9493 This requires at least one operand being an ADDR_EXPR or a
9494 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9495 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9496 && (TREE_CODE (arg0) == ADDR_EXPR
9497 || TREE_CODE (arg1) == ADDR_EXPR
9498 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9499 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9501 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9502 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9503 machine_mode mode;
9504 int volatilep, reversep, unsignedp;
9505 bool indirect_base0 = false, indirect_base1 = false;
9507 /* Get base and offset for the access. Strip ADDR_EXPR for
9508 get_inner_reference, but put it back by stripping INDIRECT_REF
9509 off the base object if possible. indirect_baseN will be true
9510 if baseN is not an address but refers to the object itself. */
9511 base0 = arg0;
9512 if (TREE_CODE (arg0) == ADDR_EXPR)
9514 base0
9515 = get_inner_reference (TREE_OPERAND (arg0, 0),
9516 &bitsize, &bitpos0, &offset0, &mode,
9517 &unsignedp, &reversep, &volatilep);
9518 if (TREE_CODE (base0) == INDIRECT_REF)
9519 base0 = TREE_OPERAND (base0, 0);
9520 else
9521 indirect_base0 = true;
9523 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9525 base0 = TREE_OPERAND (arg0, 0);
9526 STRIP_SIGN_NOPS (base0);
9527 if (TREE_CODE (base0) == ADDR_EXPR)
9529 base0
9530 = get_inner_reference (TREE_OPERAND (base0, 0),
9531 &bitsize, &bitpos0, &offset0, &mode,
9532 &unsignedp, &reversep, &volatilep);
9533 if (TREE_CODE (base0) == INDIRECT_REF)
9534 base0 = TREE_OPERAND (base0, 0);
9535 else
9536 indirect_base0 = true;
9538 if (offset0 == NULL_TREE || integer_zerop (offset0))
9539 offset0 = TREE_OPERAND (arg0, 1);
9540 else
9541 offset0 = size_binop (PLUS_EXPR, offset0,
9542 TREE_OPERAND (arg0, 1));
9543 if (poly_int_tree_p (offset0))
9545 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9546 TYPE_PRECISION (sizetype));
9547 tem <<= LOG2_BITS_PER_UNIT;
9548 tem += bitpos0;
9549 if (tem.to_shwi (&bitpos0))
9550 offset0 = NULL_TREE;
9554 base1 = arg1;
9555 if (TREE_CODE (arg1) == ADDR_EXPR)
9557 base1
9558 = get_inner_reference (TREE_OPERAND (arg1, 0),
9559 &bitsize, &bitpos1, &offset1, &mode,
9560 &unsignedp, &reversep, &volatilep);
9561 if (TREE_CODE (base1) == INDIRECT_REF)
9562 base1 = TREE_OPERAND (base1, 0);
9563 else
9564 indirect_base1 = true;
9566 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9568 base1 = TREE_OPERAND (arg1, 0);
9569 STRIP_SIGN_NOPS (base1);
9570 if (TREE_CODE (base1) == ADDR_EXPR)
9572 base1
9573 = get_inner_reference (TREE_OPERAND (base1, 0),
9574 &bitsize, &bitpos1, &offset1, &mode,
9575 &unsignedp, &reversep, &volatilep);
9576 if (TREE_CODE (base1) == INDIRECT_REF)
9577 base1 = TREE_OPERAND (base1, 0);
9578 else
9579 indirect_base1 = true;
9581 if (offset1 == NULL_TREE || integer_zerop (offset1))
9582 offset1 = TREE_OPERAND (arg1, 1);
9583 else
9584 offset1 = size_binop (PLUS_EXPR, offset1,
9585 TREE_OPERAND (arg1, 1));
9586 if (poly_int_tree_p (offset1))
9588 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9589 TYPE_PRECISION (sizetype));
9590 tem <<= LOG2_BITS_PER_UNIT;
9591 tem += bitpos1;
9592 if (tem.to_shwi (&bitpos1))
9593 offset1 = NULL_TREE;
9597 /* If we have equivalent bases we might be able to simplify. */
9598 if (indirect_base0 == indirect_base1
9599 && operand_equal_p (base0, base1,
9600 indirect_base0 ? OEP_ADDRESS_OF : 0))
9602 /* We can fold this expression to a constant if the non-constant
9603 offset parts are equal. */
9604 if ((offset0 == offset1
9605 || (offset0 && offset1
9606 && operand_equal_p (offset0, offset1, 0)))
9607 && (equality_code
9608 || (indirect_base0
9609 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9610 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9612 if (!equality_code
9613 && maybe_ne (bitpos0, bitpos1)
9614 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9615 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9616 fold_overflow_warning (("assuming pointer wraparound does not "
9617 "occur when comparing P +- C1 with "
9618 "P +- C2"),
9619 WARN_STRICT_OVERFLOW_CONDITIONAL);
9621 switch (code)
9623 case EQ_EXPR:
9624 if (known_eq (bitpos0, bitpos1))
9625 return constant_boolean_node (true, type);
9626 if (known_ne (bitpos0, bitpos1))
9627 return constant_boolean_node (false, type);
9628 break;
9629 case NE_EXPR:
9630 if (known_ne (bitpos0, bitpos1))
9631 return constant_boolean_node (true, type);
9632 if (known_eq (bitpos0, bitpos1))
9633 return constant_boolean_node (false, type);
9634 break;
9635 case LT_EXPR:
9636 if (known_lt (bitpos0, bitpos1))
9637 return constant_boolean_node (true, type);
9638 if (known_ge (bitpos0, bitpos1))
9639 return constant_boolean_node (false, type);
9640 break;
9641 case LE_EXPR:
9642 if (known_le (bitpos0, bitpos1))
9643 return constant_boolean_node (true, type);
9644 if (known_gt (bitpos0, bitpos1))
9645 return constant_boolean_node (false, type);
9646 break;
9647 case GE_EXPR:
9648 if (known_ge (bitpos0, bitpos1))
9649 return constant_boolean_node (true, type);
9650 if (known_lt (bitpos0, bitpos1))
9651 return constant_boolean_node (false, type);
9652 break;
9653 case GT_EXPR:
9654 if (known_gt (bitpos0, bitpos1))
9655 return constant_boolean_node (true, type);
9656 if (known_le (bitpos0, bitpos1))
9657 return constant_boolean_node (false, type);
9658 break;
9659 default:;
9662 /* We can simplify the comparison to a comparison of the variable
9663 offset parts if the constant offset parts are equal.
9664 Be careful to use signed sizetype here because otherwise we
9665 mess with array offsets in the wrong way. This is possible
9666 because pointer arithmetic is restricted to retain within an
9667 object and overflow on pointer differences is undefined as of
9668 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9669 else if (known_eq (bitpos0, bitpos1)
9670 && (equality_code
9671 || (indirect_base0
9672 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9673 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9675 /* By converting to signed sizetype we cover middle-end pointer
9676 arithmetic which operates on unsigned pointer types of size
9677 type size and ARRAY_REF offsets which are properly sign or
9678 zero extended from their type in case it is narrower than
9679 sizetype. */
9680 if (offset0 == NULL_TREE)
9681 offset0 = build_int_cst (ssizetype, 0);
9682 else
9683 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9684 if (offset1 == NULL_TREE)
9685 offset1 = build_int_cst (ssizetype, 0);
9686 else
9687 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9689 if (!equality_code
9690 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9691 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9692 fold_overflow_warning (("assuming pointer wraparound does not "
9693 "occur when comparing P +- C1 with "
9694 "P +- C2"),
9695 WARN_STRICT_OVERFLOW_COMPARISON);
9697 return fold_build2_loc (loc, code, type, offset0, offset1);
9700 /* For equal offsets we can simplify to a comparison of the
9701 base addresses. */
9702 else if (known_eq (bitpos0, bitpos1)
9703 && (indirect_base0
9704 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9705 && (indirect_base1
9706 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9707 && ((offset0 == offset1)
9708 || (offset0 && offset1
9709 && operand_equal_p (offset0, offset1, 0))))
9711 if (indirect_base0)
9712 base0 = build_fold_addr_expr_loc (loc, base0);
9713 if (indirect_base1)
9714 base1 = build_fold_addr_expr_loc (loc, base1);
9715 return fold_build2_loc (loc, code, type, base0, base1);
9717 /* Comparison between an ordinary (non-weak) symbol and a null
9718 pointer can be eliminated since such symbols must have a non
9719 null address. In C, relational expressions between pointers
9720 to objects and null pointers are undefined. The results
9721 below follow the C++ rules with the additional property that
9722 every object pointer compares greater than a null pointer.
9724 else if (((DECL_P (base0)
9725 && maybe_nonzero_address (base0) > 0
9726 /* Avoid folding references to struct members at offset 0 to
9727 prevent tests like '&ptr->firstmember == 0' from getting
9728 eliminated. When ptr is null, although the -> expression
9729 is strictly speaking invalid, GCC retains it as a matter
9730 of QoI. See PR c/44555. */
9731 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9732 || CONSTANT_CLASS_P (base0))
9733 && indirect_base0
9734 /* The caller guarantees that when one of the arguments is
9735 constant (i.e., null in this case) it is second. */
9736 && integer_zerop (arg1))
9738 switch (code)
9740 case EQ_EXPR:
9741 case LE_EXPR:
9742 case LT_EXPR:
9743 return constant_boolean_node (false, type);
9744 case GE_EXPR:
9745 case GT_EXPR:
9746 case NE_EXPR:
9747 return constant_boolean_node (true, type);
9748 default:
9749 gcc_unreachable ();
9754 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9755 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9756 the resulting offset is smaller in absolute value than the
9757 original one and has the same sign. */
9758 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9759 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9760 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9761 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9762 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9763 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9764 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9765 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9767 tree const1 = TREE_OPERAND (arg0, 1);
9768 tree const2 = TREE_OPERAND (arg1, 1);
9769 tree variable1 = TREE_OPERAND (arg0, 0);
9770 tree variable2 = TREE_OPERAND (arg1, 0);
9771 tree cst;
9772 const char * const warnmsg = G_("assuming signed overflow does not "
9773 "occur when combining constants around "
9774 "a comparison");
9776 /* Put the constant on the side where it doesn't overflow and is
9777 of lower absolute value and of same sign than before. */
9778 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9779 ? MINUS_EXPR : PLUS_EXPR,
9780 const2, const1);
9781 if (!TREE_OVERFLOW (cst)
9782 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9783 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9785 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9786 return fold_build2_loc (loc, code, type,
9787 variable1,
9788 fold_build2_loc (loc, TREE_CODE (arg1),
9789 TREE_TYPE (arg1),
9790 variable2, cst));
9793 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9794 ? MINUS_EXPR : PLUS_EXPR,
9795 const1, const2);
9796 if (!TREE_OVERFLOW (cst)
9797 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9798 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9800 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9801 return fold_build2_loc (loc, code, type,
9802 fold_build2_loc (loc, TREE_CODE (arg0),
9803 TREE_TYPE (arg0),
9804 variable1, cst),
9805 variable2);
9809 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9810 if (tem)
9811 return tem;
9813 /* If we are comparing an expression that just has comparisons
9814 of two integer values, arithmetic expressions of those comparisons,
9815 and constants, we can simplify it. There are only three cases
9816 to check: the two values can either be equal, the first can be
9817 greater, or the second can be greater. Fold the expression for
9818 those three values. Since each value must be 0 or 1, we have
9819 eight possibilities, each of which corresponds to the constant 0
9820 or 1 or one of the six possible comparisons.
9822 This handles common cases like (a > b) == 0 but also handles
9823 expressions like ((x > y) - (y > x)) > 0, which supposedly
9824 occur in macroized code. */
9826 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9828 tree cval1 = 0, cval2 = 0;
9830 if (twoval_comparison_p (arg0, &cval1, &cval2)
9831 /* Don't handle degenerate cases here; they should already
9832 have been handled anyway. */
9833 && cval1 != 0 && cval2 != 0
9834 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9835 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9836 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9837 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9838 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9839 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9840 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9842 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9843 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9845 /* We can't just pass T to eval_subst in case cval1 or cval2
9846 was the same as ARG1. */
9848 tree high_result
9849 = fold_build2_loc (loc, code, type,
9850 eval_subst (loc, arg0, cval1, maxval,
9851 cval2, minval),
9852 arg1);
9853 tree equal_result
9854 = fold_build2_loc (loc, code, type,
9855 eval_subst (loc, arg0, cval1, maxval,
9856 cval2, maxval),
9857 arg1);
9858 tree low_result
9859 = fold_build2_loc (loc, code, type,
9860 eval_subst (loc, arg0, cval1, minval,
9861 cval2, maxval),
9862 arg1);
9864 /* All three of these results should be 0 or 1. Confirm they are.
9865 Then use those values to select the proper code to use. */
9867 if (TREE_CODE (high_result) == INTEGER_CST
9868 && TREE_CODE (equal_result) == INTEGER_CST
9869 && TREE_CODE (low_result) == INTEGER_CST)
9871 /* Make a 3-bit mask with the high-order bit being the
9872 value for `>', the next for '=', and the low for '<'. */
9873 switch ((integer_onep (high_result) * 4)
9874 + (integer_onep (equal_result) * 2)
9875 + integer_onep (low_result))
9877 case 0:
9878 /* Always false. */
9879 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9880 case 1:
9881 code = LT_EXPR;
9882 break;
9883 case 2:
9884 code = EQ_EXPR;
9885 break;
9886 case 3:
9887 code = LE_EXPR;
9888 break;
9889 case 4:
9890 code = GT_EXPR;
9891 break;
9892 case 5:
9893 code = NE_EXPR;
9894 break;
9895 case 6:
9896 code = GE_EXPR;
9897 break;
9898 case 7:
9899 /* Always true. */
9900 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9903 return fold_build2_loc (loc, code, type, cval1, cval2);
9908 return NULL_TREE;
9912 /* Subroutine of fold_binary. Optimize complex multiplications of the
9913 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9914 argument EXPR represents the expression "z" of type TYPE. */
9916 static tree
9917 fold_mult_zconjz (location_t loc, tree type, tree expr)
9919 tree itype = TREE_TYPE (type);
9920 tree rpart, ipart, tem;
9922 if (TREE_CODE (expr) == COMPLEX_EXPR)
9924 rpart = TREE_OPERAND (expr, 0);
9925 ipart = TREE_OPERAND (expr, 1);
9927 else if (TREE_CODE (expr) == COMPLEX_CST)
9929 rpart = TREE_REALPART (expr);
9930 ipart = TREE_IMAGPART (expr);
9932 else
9934 expr = save_expr (expr);
9935 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9936 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9939 rpart = save_expr (rpart);
9940 ipart = save_expr (ipart);
9941 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9942 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9943 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9944 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9945 build_zero_cst (itype));
9949 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9950 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9951 true if successful. */
9953 static bool
9954 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9956 unsigned HOST_WIDE_INT i, nunits;
9958 if (TREE_CODE (arg) == VECTOR_CST
9959 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9961 for (i = 0; i < nunits; ++i)
9962 elts[i] = VECTOR_CST_ELT (arg, i);
9964 else if (TREE_CODE (arg) == CONSTRUCTOR)
9966 constructor_elt *elt;
9968 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9969 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9970 return false;
9971 else
9972 elts[i] = elt->value;
9974 else
9975 return false;
9976 for (; i < nelts; i++)
9977 elts[i]
9978 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9979 return true;
9982 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9983 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9984 NULL_TREE otherwise. */
9986 tree
9987 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9989 unsigned int i;
9990 unsigned HOST_WIDE_INT nelts;
9991 bool need_ctor = false;
9993 if (!sel.length ().is_constant (&nelts))
9994 return NULL_TREE;
9995 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9996 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9997 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9998 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9999 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10000 return NULL_TREE;
10002 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10003 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10004 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10005 return NULL_TREE;
10007 tree_vector_builder out_elts (type, nelts, 1);
10008 for (i = 0; i < nelts; i++)
10010 HOST_WIDE_INT index;
10011 if (!sel[i].is_constant (&index))
10012 return NULL_TREE;
10013 if (!CONSTANT_CLASS_P (in_elts[index]))
10014 need_ctor = true;
10015 out_elts.quick_push (unshare_expr (in_elts[index]));
10018 if (need_ctor)
10020 vec<constructor_elt, va_gc> *v;
10021 vec_alloc (v, nelts);
10022 for (i = 0; i < nelts; i++)
10023 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10024 return build_constructor (type, v);
10026 else
10027 return out_elts.build ();
10030 /* Try to fold a pointer difference of type TYPE two address expressions of
10031 array references AREF0 and AREF1 using location LOC. Return a
10032 simplified expression for the difference or NULL_TREE. */
10034 static tree
10035 fold_addr_of_array_ref_difference (location_t loc, tree type,
10036 tree aref0, tree aref1,
10037 bool use_pointer_diff)
10039 tree base0 = TREE_OPERAND (aref0, 0);
10040 tree base1 = TREE_OPERAND (aref1, 0);
10041 tree base_offset = build_int_cst (type, 0);
10043 /* If the bases are array references as well, recurse. If the bases
10044 are pointer indirections compute the difference of the pointers.
10045 If the bases are equal, we are set. */
10046 if ((TREE_CODE (base0) == ARRAY_REF
10047 && TREE_CODE (base1) == ARRAY_REF
10048 && (base_offset
10049 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10050 use_pointer_diff)))
10051 || (INDIRECT_REF_P (base0)
10052 && INDIRECT_REF_P (base1)
10053 && (base_offset
10054 = use_pointer_diff
10055 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10056 TREE_OPERAND (base0, 0),
10057 TREE_OPERAND (base1, 0))
10058 : fold_binary_loc (loc, MINUS_EXPR, type,
10059 fold_convert (type,
10060 TREE_OPERAND (base0, 0)),
10061 fold_convert (type,
10062 TREE_OPERAND (base1, 0)))))
10063 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10065 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10066 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10067 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10068 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10069 return fold_build2_loc (loc, PLUS_EXPR, type,
10070 base_offset,
10071 fold_build2_loc (loc, MULT_EXPR, type,
10072 diff, esz));
10074 return NULL_TREE;
10077 /* If the real or vector real constant CST of type TYPE has an exact
10078 inverse, return it, else return NULL. */
10080 tree
10081 exact_inverse (tree type, tree cst)
10083 REAL_VALUE_TYPE r;
10084 tree unit_type;
10085 machine_mode mode;
10087 switch (TREE_CODE (cst))
10089 case REAL_CST:
10090 r = TREE_REAL_CST (cst);
10092 if (exact_real_inverse (TYPE_MODE (type), &r))
10093 return build_real (type, r);
10095 return NULL_TREE;
10097 case VECTOR_CST:
10099 unit_type = TREE_TYPE (type);
10100 mode = TYPE_MODE (unit_type);
10102 tree_vector_builder elts;
10103 if (!elts.new_unary_operation (type, cst, false))
10104 return NULL_TREE;
10105 unsigned int count = elts.encoded_nelts ();
10106 for (unsigned int i = 0; i < count; ++i)
10108 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10109 if (!exact_real_inverse (mode, &r))
10110 return NULL_TREE;
10111 elts.quick_push (build_real (unit_type, r));
10114 return elts.build ();
10117 default:
10118 return NULL_TREE;
10122 /* Mask out the tz least significant bits of X of type TYPE where
10123 tz is the number of trailing zeroes in Y. */
10124 static wide_int
10125 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10127 int tz = wi::ctz (y);
10128 if (tz > 0)
10129 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10130 return x;
10133 /* Return true when T is an address and is known to be nonzero.
10134 For floating point we further ensure that T is not denormal.
10135 Similar logic is present in nonzero_address in rtlanal.h.
10137 If the return value is based on the assumption that signed overflow
10138 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10139 change *STRICT_OVERFLOW_P. */
10141 static bool
10142 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10144 tree type = TREE_TYPE (t);
10145 enum tree_code code;
10147 /* Doing something useful for floating point would need more work. */
10148 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10149 return false;
10151 code = TREE_CODE (t);
10152 switch (TREE_CODE_CLASS (code))
10154 case tcc_unary:
10155 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10156 strict_overflow_p);
10157 case tcc_binary:
10158 case tcc_comparison:
10159 return tree_binary_nonzero_warnv_p (code, type,
10160 TREE_OPERAND (t, 0),
10161 TREE_OPERAND (t, 1),
10162 strict_overflow_p);
10163 case tcc_constant:
10164 case tcc_declaration:
10165 case tcc_reference:
10166 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10168 default:
10169 break;
10172 switch (code)
10174 case TRUTH_NOT_EXPR:
10175 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10176 strict_overflow_p);
10178 case TRUTH_AND_EXPR:
10179 case TRUTH_OR_EXPR:
10180 case TRUTH_XOR_EXPR:
10181 return tree_binary_nonzero_warnv_p (code, type,
10182 TREE_OPERAND (t, 0),
10183 TREE_OPERAND (t, 1),
10184 strict_overflow_p);
10186 case COND_EXPR:
10187 case CONSTRUCTOR:
10188 case OBJ_TYPE_REF:
10189 case ASSERT_EXPR:
10190 case ADDR_EXPR:
10191 case WITH_SIZE_EXPR:
10192 case SSA_NAME:
10193 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10195 case COMPOUND_EXPR:
10196 case MODIFY_EXPR:
10197 case BIND_EXPR:
10198 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10199 strict_overflow_p);
10201 case SAVE_EXPR:
10202 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10203 strict_overflow_p);
10205 case CALL_EXPR:
10207 tree fndecl = get_callee_fndecl (t);
10208 if (!fndecl) return false;
10209 if (flag_delete_null_pointer_checks && !flag_check_new
10210 && DECL_IS_OPERATOR_NEW_P (fndecl)
10211 && !TREE_NOTHROW (fndecl))
10212 return true;
10213 if (flag_delete_null_pointer_checks
10214 && lookup_attribute ("returns_nonnull",
10215 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10216 return true;
10217 return alloca_call_p (t);
10220 default:
10221 break;
10223 return false;
10226 /* Return true when T is an address and is known to be nonzero.
10227 Handle warnings about undefined signed overflow. */
10229 bool
10230 tree_expr_nonzero_p (tree t)
10232 bool ret, strict_overflow_p;
10234 strict_overflow_p = false;
10235 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10236 if (strict_overflow_p)
10237 fold_overflow_warning (("assuming signed overflow does not occur when "
10238 "determining that expression is always "
10239 "non-zero"),
10240 WARN_STRICT_OVERFLOW_MISC);
10241 return ret;
10244 /* Return true if T is known not to be equal to an integer W. */
10246 bool
10247 expr_not_equal_to (tree t, const wide_int &w)
10249 value_range vr;
10250 switch (TREE_CODE (t))
10252 case INTEGER_CST:
10253 return wi::to_wide (t) != w;
10255 case SSA_NAME:
10256 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10257 return false;
10258 get_range_info (t, vr);
10259 if (!vr.undefined_p ()
10260 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10261 return true;
10262 /* If T has some known zero bits and W has any of those bits set,
10263 then T is known not to be equal to W. */
10264 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10265 TYPE_PRECISION (TREE_TYPE (t))), 0))
10266 return true;
10267 return false;
10269 default:
10270 return false;
10274 /* Fold a binary expression of code CODE and type TYPE with operands
10275 OP0 and OP1. LOC is the location of the resulting expression.
10276 Return the folded expression if folding is successful. Otherwise,
10277 return NULL_TREE. */
10279 tree
10280 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10281 tree op0, tree op1)
10283 enum tree_code_class kind = TREE_CODE_CLASS (code);
10284 tree arg0, arg1, tem;
10285 tree t1 = NULL_TREE;
10286 bool strict_overflow_p;
10287 unsigned int prec;
10289 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10290 && TREE_CODE_LENGTH (code) == 2
10291 && op0 != NULL_TREE
10292 && op1 != NULL_TREE);
10294 arg0 = op0;
10295 arg1 = op1;
10297 /* Strip any conversions that don't change the mode. This is
10298 safe for every expression, except for a comparison expression
10299 because its signedness is derived from its operands. So, in
10300 the latter case, only strip conversions that don't change the
10301 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10302 preserved.
10304 Note that this is done as an internal manipulation within the
10305 constant folder, in order to find the simplest representation
10306 of the arguments so that their form can be studied. In any
10307 cases, the appropriate type conversions should be put back in
10308 the tree that will get out of the constant folder. */
10310 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10312 STRIP_SIGN_NOPS (arg0);
10313 STRIP_SIGN_NOPS (arg1);
10315 else
10317 STRIP_NOPS (arg0);
10318 STRIP_NOPS (arg1);
10321 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10322 constant but we can't do arithmetic on them. */
10323 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10325 tem = const_binop (code, type, arg0, arg1);
10326 if (tem != NULL_TREE)
10328 if (TREE_TYPE (tem) != type)
10329 tem = fold_convert_loc (loc, type, tem);
10330 return tem;
10334 /* If this is a commutative operation, and ARG0 is a constant, move it
10335 to ARG1 to reduce the number of tests below. */
10336 if (commutative_tree_code (code)
10337 && tree_swap_operands_p (arg0, arg1))
10338 return fold_build2_loc (loc, code, type, op1, op0);
10340 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10341 to ARG1 to reduce the number of tests below. */
10342 if (kind == tcc_comparison
10343 && tree_swap_operands_p (arg0, arg1))
10344 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10346 tem = generic_simplify (loc, code, type, op0, op1);
10347 if (tem)
10348 return tem;
10350 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10352 First check for cases where an arithmetic operation is applied to a
10353 compound, conditional, or comparison operation. Push the arithmetic
10354 operation inside the compound or conditional to see if any folding
10355 can then be done. Convert comparison to conditional for this purpose.
10356 The also optimizes non-constant cases that used to be done in
10357 expand_expr.
10359 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10360 one of the operands is a comparison and the other is a comparison, a
10361 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10362 code below would make the expression more complex. Change it to a
10363 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10364 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10366 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10367 || code == EQ_EXPR || code == NE_EXPR)
10368 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10369 && ((truth_value_p (TREE_CODE (arg0))
10370 && (truth_value_p (TREE_CODE (arg1))
10371 || (TREE_CODE (arg1) == BIT_AND_EXPR
10372 && integer_onep (TREE_OPERAND (arg1, 1)))))
10373 || (truth_value_p (TREE_CODE (arg1))
10374 && (truth_value_p (TREE_CODE (arg0))
10375 || (TREE_CODE (arg0) == BIT_AND_EXPR
10376 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10378 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10379 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10380 : TRUTH_XOR_EXPR,
10381 boolean_type_node,
10382 fold_convert_loc (loc, boolean_type_node, arg0),
10383 fold_convert_loc (loc, boolean_type_node, arg1));
10385 if (code == EQ_EXPR)
10386 tem = invert_truthvalue_loc (loc, tem);
10388 return fold_convert_loc (loc, type, tem);
10391 if (TREE_CODE_CLASS (code) == tcc_binary
10392 || TREE_CODE_CLASS (code) == tcc_comparison)
10394 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10396 tem = fold_build2_loc (loc, code, type,
10397 fold_convert_loc (loc, TREE_TYPE (op0),
10398 TREE_OPERAND (arg0, 1)), op1);
10399 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10400 tem);
10402 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10404 tem = fold_build2_loc (loc, code, type, op0,
10405 fold_convert_loc (loc, TREE_TYPE (op1),
10406 TREE_OPERAND (arg1, 1)));
10407 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10408 tem);
10411 if (TREE_CODE (arg0) == COND_EXPR
10412 || TREE_CODE (arg0) == VEC_COND_EXPR
10413 || COMPARISON_CLASS_P (arg0))
10415 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10416 arg0, arg1,
10417 /*cond_first_p=*/1);
10418 if (tem != NULL_TREE)
10419 return tem;
10422 if (TREE_CODE (arg1) == COND_EXPR
10423 || TREE_CODE (arg1) == VEC_COND_EXPR
10424 || COMPARISON_CLASS_P (arg1))
10426 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10427 arg1, arg0,
10428 /*cond_first_p=*/0);
10429 if (tem != NULL_TREE)
10430 return tem;
10434 switch (code)
10436 case MEM_REF:
10437 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10438 if (TREE_CODE (arg0) == ADDR_EXPR
10439 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10441 tree iref = TREE_OPERAND (arg0, 0);
10442 return fold_build2 (MEM_REF, type,
10443 TREE_OPERAND (iref, 0),
10444 int_const_binop (PLUS_EXPR, arg1,
10445 TREE_OPERAND (iref, 1)));
10448 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10449 if (TREE_CODE (arg0) == ADDR_EXPR
10450 && handled_component_p (TREE_OPERAND (arg0, 0)))
10452 tree base;
10453 poly_int64 coffset;
10454 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10455 &coffset);
10456 if (!base)
10457 return NULL_TREE;
10458 return fold_build2 (MEM_REF, type,
10459 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10460 int_const_binop (PLUS_EXPR, arg1,
10461 size_int (coffset)));
10464 return NULL_TREE;
10466 case POINTER_PLUS_EXPR:
10467 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10468 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10469 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10470 return fold_convert_loc (loc, type,
10471 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10472 fold_convert_loc (loc, sizetype,
10473 arg1),
10474 fold_convert_loc (loc, sizetype,
10475 arg0)));
10477 return NULL_TREE;
10479 case PLUS_EXPR:
10480 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10482 /* X + (X / CST) * -CST is X % CST. */
10483 if (TREE_CODE (arg1) == MULT_EXPR
10484 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10485 && operand_equal_p (arg0,
10486 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10488 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10489 tree cst1 = TREE_OPERAND (arg1, 1);
10490 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10491 cst1, cst0);
10492 if (sum && integer_zerop (sum))
10493 return fold_convert_loc (loc, type,
10494 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10495 TREE_TYPE (arg0), arg0,
10496 cst0));
10500 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10501 one. Make sure the type is not saturating and has the signedness of
10502 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10503 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10504 if ((TREE_CODE (arg0) == MULT_EXPR
10505 || TREE_CODE (arg1) == MULT_EXPR)
10506 && !TYPE_SATURATING (type)
10507 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10508 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10509 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10511 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10512 if (tem)
10513 return tem;
10516 if (! FLOAT_TYPE_P (type))
10518 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10519 (plus (plus (mult) (mult)) (foo)) so that we can
10520 take advantage of the factoring cases below. */
10521 if (ANY_INTEGRAL_TYPE_P (type)
10522 && TYPE_OVERFLOW_WRAPS (type)
10523 && (((TREE_CODE (arg0) == PLUS_EXPR
10524 || TREE_CODE (arg0) == MINUS_EXPR)
10525 && TREE_CODE (arg1) == MULT_EXPR)
10526 || ((TREE_CODE (arg1) == PLUS_EXPR
10527 || TREE_CODE (arg1) == MINUS_EXPR)
10528 && TREE_CODE (arg0) == MULT_EXPR)))
10530 tree parg0, parg1, parg, marg;
10531 enum tree_code pcode;
10533 if (TREE_CODE (arg1) == MULT_EXPR)
10534 parg = arg0, marg = arg1;
10535 else
10536 parg = arg1, marg = arg0;
10537 pcode = TREE_CODE (parg);
10538 parg0 = TREE_OPERAND (parg, 0);
10539 parg1 = TREE_OPERAND (parg, 1);
10540 STRIP_NOPS (parg0);
10541 STRIP_NOPS (parg1);
10543 if (TREE_CODE (parg0) == MULT_EXPR
10544 && TREE_CODE (parg1) != MULT_EXPR)
10545 return fold_build2_loc (loc, pcode, type,
10546 fold_build2_loc (loc, PLUS_EXPR, type,
10547 fold_convert_loc (loc, type,
10548 parg0),
10549 fold_convert_loc (loc, type,
10550 marg)),
10551 fold_convert_loc (loc, type, parg1));
10552 if (TREE_CODE (parg0) != MULT_EXPR
10553 && TREE_CODE (parg1) == MULT_EXPR)
10554 return
10555 fold_build2_loc (loc, PLUS_EXPR, type,
10556 fold_convert_loc (loc, type, parg0),
10557 fold_build2_loc (loc, pcode, type,
10558 fold_convert_loc (loc, type, marg),
10559 fold_convert_loc (loc, type,
10560 parg1)));
10563 else
10565 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10566 to __complex__ ( x, y ). This is not the same for SNaNs or
10567 if signed zeros are involved. */
10568 if (!HONOR_SNANS (element_mode (arg0))
10569 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10570 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10572 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10573 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10574 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10575 bool arg0rz = false, arg0iz = false;
10576 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10577 || (arg0i && (arg0iz = real_zerop (arg0i))))
10579 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10580 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10581 if (arg0rz && arg1i && real_zerop (arg1i))
10583 tree rp = arg1r ? arg1r
10584 : build1 (REALPART_EXPR, rtype, arg1);
10585 tree ip = arg0i ? arg0i
10586 : build1 (IMAGPART_EXPR, rtype, arg0);
10587 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10589 else if (arg0iz && arg1r && real_zerop (arg1r))
10591 tree rp = arg0r ? arg0r
10592 : build1 (REALPART_EXPR, rtype, arg0);
10593 tree ip = arg1i ? arg1i
10594 : build1 (IMAGPART_EXPR, rtype, arg1);
10595 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10600 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10601 We associate floats only if the user has specified
10602 -fassociative-math. */
10603 if (flag_associative_math
10604 && TREE_CODE (arg1) == PLUS_EXPR
10605 && TREE_CODE (arg0) != MULT_EXPR)
10607 tree tree10 = TREE_OPERAND (arg1, 0);
10608 tree tree11 = TREE_OPERAND (arg1, 1);
10609 if (TREE_CODE (tree11) == MULT_EXPR
10610 && TREE_CODE (tree10) == MULT_EXPR)
10612 tree tree0;
10613 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10614 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10617 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10618 We associate floats only if the user has specified
10619 -fassociative-math. */
10620 if (flag_associative_math
10621 && TREE_CODE (arg0) == PLUS_EXPR
10622 && TREE_CODE (arg1) != MULT_EXPR)
10624 tree tree00 = TREE_OPERAND (arg0, 0);
10625 tree tree01 = TREE_OPERAND (arg0, 1);
10626 if (TREE_CODE (tree01) == MULT_EXPR
10627 && TREE_CODE (tree00) == MULT_EXPR)
10629 tree tree0;
10630 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10631 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10636 bit_rotate:
10637 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10638 is a rotate of A by C1 bits. */
10639 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10640 is a rotate of A by B bits.
10641 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10642 though in this case CODE must be | and not + or ^, otherwise
10643 it doesn't return A when B is 0. */
10645 enum tree_code code0, code1;
10646 tree rtype;
10647 code0 = TREE_CODE (arg0);
10648 code1 = TREE_CODE (arg1);
10649 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10650 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10651 && operand_equal_p (TREE_OPERAND (arg0, 0),
10652 TREE_OPERAND (arg1, 0), 0)
10653 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10654 TYPE_UNSIGNED (rtype))
10655 /* Only create rotates in complete modes. Other cases are not
10656 expanded properly. */
10657 && (element_precision (rtype)
10658 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10660 tree tree01, tree11;
10661 tree orig_tree01, orig_tree11;
10662 enum tree_code code01, code11;
10664 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10665 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10666 STRIP_NOPS (tree01);
10667 STRIP_NOPS (tree11);
10668 code01 = TREE_CODE (tree01);
10669 code11 = TREE_CODE (tree11);
10670 if (code11 != MINUS_EXPR
10671 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10673 std::swap (code0, code1);
10674 std::swap (code01, code11);
10675 std::swap (tree01, tree11);
10676 std::swap (orig_tree01, orig_tree11);
10678 if (code01 == INTEGER_CST
10679 && code11 == INTEGER_CST
10680 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10681 == element_precision (rtype)))
10683 tem = build2_loc (loc, LROTATE_EXPR,
10684 rtype, TREE_OPERAND (arg0, 0),
10685 code0 == LSHIFT_EXPR
10686 ? orig_tree01 : orig_tree11);
10687 return fold_convert_loc (loc, type, tem);
10689 else if (code11 == MINUS_EXPR)
10691 tree tree110, tree111;
10692 tree110 = TREE_OPERAND (tree11, 0);
10693 tree111 = TREE_OPERAND (tree11, 1);
10694 STRIP_NOPS (tree110);
10695 STRIP_NOPS (tree111);
10696 if (TREE_CODE (tree110) == INTEGER_CST
10697 && compare_tree_int (tree110,
10698 element_precision (rtype)) == 0
10699 && operand_equal_p (tree01, tree111, 0))
10701 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10702 ? LROTATE_EXPR : RROTATE_EXPR),
10703 rtype, TREE_OPERAND (arg0, 0),
10704 orig_tree01);
10705 return fold_convert_loc (loc, type, tem);
10708 else if (code == BIT_IOR_EXPR
10709 && code11 == BIT_AND_EXPR
10710 && pow2p_hwi (element_precision (rtype)))
10712 tree tree110, tree111;
10713 tree110 = TREE_OPERAND (tree11, 0);
10714 tree111 = TREE_OPERAND (tree11, 1);
10715 STRIP_NOPS (tree110);
10716 STRIP_NOPS (tree111);
10717 if (TREE_CODE (tree110) == NEGATE_EXPR
10718 && TREE_CODE (tree111) == INTEGER_CST
10719 && compare_tree_int (tree111,
10720 element_precision (rtype) - 1) == 0
10721 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10723 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10724 ? LROTATE_EXPR : RROTATE_EXPR),
10725 rtype, TREE_OPERAND (arg0, 0),
10726 orig_tree01);
10727 return fold_convert_loc (loc, type, tem);
10733 associate:
10734 /* In most languages, can't associate operations on floats through
10735 parentheses. Rather than remember where the parentheses were, we
10736 don't associate floats at all, unless the user has specified
10737 -fassociative-math.
10738 And, we need to make sure type is not saturating. */
10740 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10741 && !TYPE_SATURATING (type))
10743 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10744 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10745 tree atype = type;
10746 bool ok = true;
10748 /* Split both trees into variables, constants, and literals. Then
10749 associate each group together, the constants with literals,
10750 then the result with variables. This increases the chances of
10751 literals being recombined later and of generating relocatable
10752 expressions for the sum of a constant and literal. */
10753 var0 = split_tree (arg0, type, code,
10754 &minus_var0, &con0, &minus_con0,
10755 &lit0, &minus_lit0, 0);
10756 var1 = split_tree (arg1, type, code,
10757 &minus_var1, &con1, &minus_con1,
10758 &lit1, &minus_lit1, code == MINUS_EXPR);
10760 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10761 if (code == MINUS_EXPR)
10762 code = PLUS_EXPR;
10764 /* With undefined overflow prefer doing association in a type
10765 which wraps on overflow, if that is one of the operand types. */
10766 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10767 && !TYPE_OVERFLOW_WRAPS (type))
10769 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10770 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10771 atype = TREE_TYPE (arg0);
10772 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10773 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10774 atype = TREE_TYPE (arg1);
10775 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10778 /* With undefined overflow we can only associate constants with one
10779 variable, and constants whose association doesn't overflow. */
10780 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10781 && !TYPE_OVERFLOW_WRAPS (atype))
10783 if ((var0 && var1) || (minus_var0 && minus_var1))
10785 /* ??? If split_tree would handle NEGATE_EXPR we could
10786 simply reject these cases and the allowed cases would
10787 be the var0/minus_var1 ones. */
10788 tree tmp0 = var0 ? var0 : minus_var0;
10789 tree tmp1 = var1 ? var1 : minus_var1;
10790 bool one_neg = false;
10792 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10794 tmp0 = TREE_OPERAND (tmp0, 0);
10795 one_neg = !one_neg;
10797 if (CONVERT_EXPR_P (tmp0)
10798 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10799 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10800 <= TYPE_PRECISION (atype)))
10801 tmp0 = TREE_OPERAND (tmp0, 0);
10802 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10804 tmp1 = TREE_OPERAND (tmp1, 0);
10805 one_neg = !one_neg;
10807 if (CONVERT_EXPR_P (tmp1)
10808 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10809 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10810 <= TYPE_PRECISION (atype)))
10811 tmp1 = TREE_OPERAND (tmp1, 0);
10812 /* The only case we can still associate with two variables
10813 is if they cancel out. */
10814 if (!one_neg
10815 || !operand_equal_p (tmp0, tmp1, 0))
10816 ok = false;
10818 else if ((var0 && minus_var1
10819 && ! operand_equal_p (var0, minus_var1, 0))
10820 || (minus_var0 && var1
10821 && ! operand_equal_p (minus_var0, var1, 0)))
10822 ok = false;
10825 /* Only do something if we found more than two objects. Otherwise,
10826 nothing has changed and we risk infinite recursion. */
10827 if (ok
10828 && ((var0 != 0) + (var1 != 0)
10829 + (minus_var0 != 0) + (minus_var1 != 0)
10830 + (con0 != 0) + (con1 != 0)
10831 + (minus_con0 != 0) + (minus_con1 != 0)
10832 + (lit0 != 0) + (lit1 != 0)
10833 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10835 var0 = associate_trees (loc, var0, var1, code, atype);
10836 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10837 code, atype);
10838 con0 = associate_trees (loc, con0, con1, code, atype);
10839 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10840 code, atype);
10841 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10842 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10843 code, atype);
10845 if (minus_var0 && var0)
10847 var0 = associate_trees (loc, var0, minus_var0,
10848 MINUS_EXPR, atype);
10849 minus_var0 = 0;
10851 if (minus_con0 && con0)
10853 con0 = associate_trees (loc, con0, minus_con0,
10854 MINUS_EXPR, atype);
10855 minus_con0 = 0;
10858 /* Preserve the MINUS_EXPR if the negative part of the literal is
10859 greater than the positive part. Otherwise, the multiplicative
10860 folding code (i.e extract_muldiv) may be fooled in case
10861 unsigned constants are subtracted, like in the following
10862 example: ((X*2 + 4) - 8U)/2. */
10863 if (minus_lit0 && lit0)
10865 if (TREE_CODE (lit0) == INTEGER_CST
10866 && TREE_CODE (minus_lit0) == INTEGER_CST
10867 && tree_int_cst_lt (lit0, minus_lit0)
10868 /* But avoid ending up with only negated parts. */
10869 && (var0 || con0))
10871 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10872 MINUS_EXPR, atype);
10873 lit0 = 0;
10875 else
10877 lit0 = associate_trees (loc, lit0, minus_lit0,
10878 MINUS_EXPR, atype);
10879 minus_lit0 = 0;
10883 /* Don't introduce overflows through reassociation. */
10884 if ((lit0 && TREE_OVERFLOW_P (lit0))
10885 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10886 return NULL_TREE;
10888 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10889 con0 = associate_trees (loc, con0, lit0, code, atype);
10890 lit0 = 0;
10891 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10892 code, atype);
10893 minus_lit0 = 0;
10895 /* Eliminate minus_con0. */
10896 if (minus_con0)
10898 if (con0)
10899 con0 = associate_trees (loc, con0, minus_con0,
10900 MINUS_EXPR, atype);
10901 else if (var0)
10902 var0 = associate_trees (loc, var0, minus_con0,
10903 MINUS_EXPR, atype);
10904 else
10905 gcc_unreachable ();
10906 minus_con0 = 0;
10909 /* Eliminate minus_var0. */
10910 if (minus_var0)
10912 if (con0)
10913 con0 = associate_trees (loc, con0, minus_var0,
10914 MINUS_EXPR, atype);
10915 else
10916 gcc_unreachable ();
10917 minus_var0 = 0;
10920 return
10921 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10922 code, atype));
10926 return NULL_TREE;
10928 case POINTER_DIFF_EXPR:
10929 case MINUS_EXPR:
10930 /* Fold &a[i] - &a[j] to i-j. */
10931 if (TREE_CODE (arg0) == ADDR_EXPR
10932 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10933 && TREE_CODE (arg1) == ADDR_EXPR
10934 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10936 tree tem = fold_addr_of_array_ref_difference (loc, type,
10937 TREE_OPERAND (arg0, 0),
10938 TREE_OPERAND (arg1, 0),
10939 code
10940 == POINTER_DIFF_EXPR);
10941 if (tem)
10942 return tem;
10945 /* Further transformations are not for pointers. */
10946 if (code == POINTER_DIFF_EXPR)
10947 return NULL_TREE;
10949 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10950 if (TREE_CODE (arg0) == NEGATE_EXPR
10951 && negate_expr_p (op1)
10952 /* If arg0 is e.g. unsigned int and type is int, then this could
10953 introduce UB, because if A is INT_MIN at runtime, the original
10954 expression can be well defined while the latter is not.
10955 See PR83269. */
10956 && !(ANY_INTEGRAL_TYPE_P (type)
10957 && TYPE_OVERFLOW_UNDEFINED (type)
10958 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10959 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10960 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10961 fold_convert_loc (loc, type,
10962 TREE_OPERAND (arg0, 0)));
10964 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10965 __complex__ ( x, -y ). This is not the same for SNaNs or if
10966 signed zeros are involved. */
10967 if (!HONOR_SNANS (element_mode (arg0))
10968 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10969 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10971 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10972 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10973 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10974 bool arg0rz = false, arg0iz = false;
10975 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10976 || (arg0i && (arg0iz = real_zerop (arg0i))))
10978 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10979 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10980 if (arg0rz && arg1i && real_zerop (arg1i))
10982 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10983 arg1r ? arg1r
10984 : build1 (REALPART_EXPR, rtype, arg1));
10985 tree ip = arg0i ? arg0i
10986 : build1 (IMAGPART_EXPR, rtype, arg0);
10987 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10989 else if (arg0iz && arg1r && real_zerop (arg1r))
10991 tree rp = arg0r ? arg0r
10992 : build1 (REALPART_EXPR, rtype, arg0);
10993 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10994 arg1i ? arg1i
10995 : build1 (IMAGPART_EXPR, rtype, arg1));
10996 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11001 /* A - B -> A + (-B) if B is easily negatable. */
11002 if (negate_expr_p (op1)
11003 && ! TYPE_OVERFLOW_SANITIZED (type)
11004 && ((FLOAT_TYPE_P (type)
11005 /* Avoid this transformation if B is a positive REAL_CST. */
11006 && (TREE_CODE (op1) != REAL_CST
11007 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11008 || INTEGRAL_TYPE_P (type)))
11009 return fold_build2_loc (loc, PLUS_EXPR, type,
11010 fold_convert_loc (loc, type, arg0),
11011 negate_expr (op1));
11013 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11014 one. Make sure the type is not saturating and has the signedness of
11015 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11016 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11017 if ((TREE_CODE (arg0) == MULT_EXPR
11018 || TREE_CODE (arg1) == MULT_EXPR)
11019 && !TYPE_SATURATING (type)
11020 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11021 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11022 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11024 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11025 if (tem)
11026 return tem;
11029 goto associate;
11031 case MULT_EXPR:
11032 if (! FLOAT_TYPE_P (type))
11034 /* Transform x * -C into -x * C if x is easily negatable. */
11035 if (TREE_CODE (op1) == INTEGER_CST
11036 && tree_int_cst_sgn (op1) == -1
11037 && negate_expr_p (op0)
11038 && negate_expr_p (op1)
11039 && (tem = negate_expr (op1)) != op1
11040 && ! TREE_OVERFLOW (tem))
11041 return fold_build2_loc (loc, MULT_EXPR, type,
11042 fold_convert_loc (loc, type,
11043 negate_expr (op0)), tem);
11045 strict_overflow_p = false;
11046 if (TREE_CODE (arg1) == INTEGER_CST
11047 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11048 &strict_overflow_p)) != 0)
11050 if (strict_overflow_p)
11051 fold_overflow_warning (("assuming signed overflow does not "
11052 "occur when simplifying "
11053 "multiplication"),
11054 WARN_STRICT_OVERFLOW_MISC);
11055 return fold_convert_loc (loc, type, tem);
11058 /* Optimize z * conj(z) for integer complex numbers. */
11059 if (TREE_CODE (arg0) == CONJ_EXPR
11060 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11061 return fold_mult_zconjz (loc, type, arg1);
11062 if (TREE_CODE (arg1) == CONJ_EXPR
11063 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11064 return fold_mult_zconjz (loc, type, arg0);
11066 else
11068 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11069 This is not the same for NaNs or if signed zeros are
11070 involved. */
11071 if (!HONOR_NANS (arg0)
11072 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11073 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11074 && TREE_CODE (arg1) == COMPLEX_CST
11075 && real_zerop (TREE_REALPART (arg1)))
11077 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11078 if (real_onep (TREE_IMAGPART (arg1)))
11079 return
11080 fold_build2_loc (loc, COMPLEX_EXPR, type,
11081 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11082 rtype, arg0)),
11083 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11084 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11085 return
11086 fold_build2_loc (loc, COMPLEX_EXPR, type,
11087 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11088 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11089 rtype, arg0)));
11092 /* Optimize z * conj(z) for floating point complex numbers.
11093 Guarded by flag_unsafe_math_optimizations as non-finite
11094 imaginary components don't produce scalar results. */
11095 if (flag_unsafe_math_optimizations
11096 && TREE_CODE (arg0) == CONJ_EXPR
11097 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11098 return fold_mult_zconjz (loc, type, arg1);
11099 if (flag_unsafe_math_optimizations
11100 && TREE_CODE (arg1) == CONJ_EXPR
11101 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11102 return fold_mult_zconjz (loc, type, arg0);
11104 goto associate;
11106 case BIT_IOR_EXPR:
11107 /* Canonicalize (X & C1) | C2. */
11108 if (TREE_CODE (arg0) == BIT_AND_EXPR
11109 && TREE_CODE (arg1) == INTEGER_CST
11110 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11112 int width = TYPE_PRECISION (type), w;
11113 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11114 wide_int c2 = wi::to_wide (arg1);
11116 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11117 if ((c1 & c2) == c1)
11118 return omit_one_operand_loc (loc, type, arg1,
11119 TREE_OPERAND (arg0, 0));
11121 wide_int msk = wi::mask (width, false,
11122 TYPE_PRECISION (TREE_TYPE (arg1)));
11124 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11125 if (wi::bit_and_not (msk, c1 | c2) == 0)
11127 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11128 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11131 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11132 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11133 mode which allows further optimizations. */
11134 c1 &= msk;
11135 c2 &= msk;
11136 wide_int c3 = wi::bit_and_not (c1, c2);
11137 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11139 wide_int mask = wi::mask (w, false,
11140 TYPE_PRECISION (type));
11141 if (((c1 | c2) & mask) == mask
11142 && wi::bit_and_not (c1, mask) == 0)
11144 c3 = mask;
11145 break;
11149 if (c3 != c1)
11151 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11152 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11153 wide_int_to_tree (type, c3));
11154 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11158 /* See if this can be simplified into a rotate first. If that
11159 is unsuccessful continue in the association code. */
11160 goto bit_rotate;
11162 case BIT_XOR_EXPR:
11163 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11164 if (TREE_CODE (arg0) == BIT_AND_EXPR
11165 && INTEGRAL_TYPE_P (type)
11166 && integer_onep (TREE_OPERAND (arg0, 1))
11167 && integer_onep (arg1))
11168 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11169 build_zero_cst (TREE_TYPE (arg0)));
11171 /* See if this can be simplified into a rotate first. If that
11172 is unsuccessful continue in the association code. */
11173 goto bit_rotate;
11175 case BIT_AND_EXPR:
11176 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11177 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11178 && INTEGRAL_TYPE_P (type)
11179 && integer_onep (TREE_OPERAND (arg0, 1))
11180 && integer_onep (arg1))
11182 tree tem2;
11183 tem = TREE_OPERAND (arg0, 0);
11184 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11185 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11186 tem, tem2);
11187 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11188 build_zero_cst (TREE_TYPE (tem)));
11190 /* Fold ~X & 1 as (X & 1) == 0. */
11191 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11192 && INTEGRAL_TYPE_P (type)
11193 && integer_onep (arg1))
11195 tree tem2;
11196 tem = TREE_OPERAND (arg0, 0);
11197 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11198 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11199 tem, tem2);
11200 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11201 build_zero_cst (TREE_TYPE (tem)));
11203 /* Fold !X & 1 as X == 0. */
11204 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11205 && integer_onep (arg1))
11207 tem = TREE_OPERAND (arg0, 0);
11208 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11209 build_zero_cst (TREE_TYPE (tem)));
11212 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11213 multiple of 1 << CST. */
11214 if (TREE_CODE (arg1) == INTEGER_CST)
11216 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11217 wide_int ncst1 = -cst1;
11218 if ((cst1 & ncst1) == ncst1
11219 && multiple_of_p (type, arg0,
11220 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11221 return fold_convert_loc (loc, type, arg0);
11224 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11225 bits from CST2. */
11226 if (TREE_CODE (arg1) == INTEGER_CST
11227 && TREE_CODE (arg0) == MULT_EXPR
11228 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11230 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11231 wide_int masked
11232 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11234 if (masked == 0)
11235 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11236 arg0, arg1);
11237 else if (masked != warg1)
11239 /* Avoid the transform if arg1 is a mask of some
11240 mode which allows further optimizations. */
11241 int pop = wi::popcount (warg1);
11242 if (!(pop >= BITS_PER_UNIT
11243 && pow2p_hwi (pop)
11244 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11245 return fold_build2_loc (loc, code, type, op0,
11246 wide_int_to_tree (type, masked));
11250 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11251 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11252 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11254 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11256 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11257 if (mask == -1)
11258 return
11259 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11262 goto associate;
11264 case RDIV_EXPR:
11265 /* Don't touch a floating-point divide by zero unless the mode
11266 of the constant can represent infinity. */
11267 if (TREE_CODE (arg1) == REAL_CST
11268 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11269 && real_zerop (arg1))
11270 return NULL_TREE;
11272 /* (-A) / (-B) -> A / B */
11273 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11274 return fold_build2_loc (loc, RDIV_EXPR, type,
11275 TREE_OPERAND (arg0, 0),
11276 negate_expr (arg1));
11277 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11278 return fold_build2_loc (loc, RDIV_EXPR, type,
11279 negate_expr (arg0),
11280 TREE_OPERAND (arg1, 0));
11281 return NULL_TREE;
11283 case TRUNC_DIV_EXPR:
11284 /* Fall through */
11286 case FLOOR_DIV_EXPR:
11287 /* Simplify A / (B << N) where A and B are positive and B is
11288 a power of 2, to A >> (N + log2(B)). */
11289 strict_overflow_p = false;
11290 if (TREE_CODE (arg1) == LSHIFT_EXPR
11291 && (TYPE_UNSIGNED (type)
11292 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11294 tree sval = TREE_OPERAND (arg1, 0);
11295 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11297 tree sh_cnt = TREE_OPERAND (arg1, 1);
11298 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11299 wi::exact_log2 (wi::to_wide (sval)));
11301 if (strict_overflow_p)
11302 fold_overflow_warning (("assuming signed overflow does not "
11303 "occur when simplifying A / (B << N)"),
11304 WARN_STRICT_OVERFLOW_MISC);
11306 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11307 sh_cnt, pow2);
11308 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11309 fold_convert_loc (loc, type, arg0), sh_cnt);
11313 /* Fall through */
11315 case ROUND_DIV_EXPR:
11316 case CEIL_DIV_EXPR:
11317 case EXACT_DIV_EXPR:
11318 if (integer_zerop (arg1))
11319 return NULL_TREE;
11321 /* Convert -A / -B to A / B when the type is signed and overflow is
11322 undefined. */
11323 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11324 && TREE_CODE (op0) == NEGATE_EXPR
11325 && negate_expr_p (op1))
11327 if (ANY_INTEGRAL_TYPE_P (type))
11328 fold_overflow_warning (("assuming signed overflow does not occur "
11329 "when distributing negation across "
11330 "division"),
11331 WARN_STRICT_OVERFLOW_MISC);
11332 return fold_build2_loc (loc, code, type,
11333 fold_convert_loc (loc, type,
11334 TREE_OPERAND (arg0, 0)),
11335 negate_expr (op1));
11337 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11338 && TREE_CODE (arg1) == NEGATE_EXPR
11339 && negate_expr_p (op0))
11341 if (ANY_INTEGRAL_TYPE_P (type))
11342 fold_overflow_warning (("assuming signed overflow does not occur "
11343 "when distributing negation across "
11344 "division"),
11345 WARN_STRICT_OVERFLOW_MISC);
11346 return fold_build2_loc (loc, code, type,
11347 negate_expr (op0),
11348 fold_convert_loc (loc, type,
11349 TREE_OPERAND (arg1, 0)));
11352 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11353 operation, EXACT_DIV_EXPR.
11355 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11356 At one time others generated faster code, it's not clear if they do
11357 after the last round to changes to the DIV code in expmed.c. */
11358 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11359 && multiple_of_p (type, arg0, arg1))
11360 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11361 fold_convert (type, arg0),
11362 fold_convert (type, arg1));
11364 strict_overflow_p = false;
11365 if (TREE_CODE (arg1) == INTEGER_CST
11366 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11367 &strict_overflow_p)) != 0)
11369 if (strict_overflow_p)
11370 fold_overflow_warning (("assuming signed overflow does not occur "
11371 "when simplifying division"),
11372 WARN_STRICT_OVERFLOW_MISC);
11373 return fold_convert_loc (loc, type, tem);
11376 return NULL_TREE;
11378 case CEIL_MOD_EXPR:
11379 case FLOOR_MOD_EXPR:
11380 case ROUND_MOD_EXPR:
11381 case TRUNC_MOD_EXPR:
11382 strict_overflow_p = false;
11383 if (TREE_CODE (arg1) == INTEGER_CST
11384 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11385 &strict_overflow_p)) != 0)
11387 if (strict_overflow_p)
11388 fold_overflow_warning (("assuming signed overflow does not occur "
11389 "when simplifying modulus"),
11390 WARN_STRICT_OVERFLOW_MISC);
11391 return fold_convert_loc (loc, type, tem);
11394 return NULL_TREE;
11396 case LROTATE_EXPR:
11397 case RROTATE_EXPR:
11398 case RSHIFT_EXPR:
11399 case LSHIFT_EXPR:
11400 /* Since negative shift count is not well-defined,
11401 don't try to compute it in the compiler. */
11402 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11403 return NULL_TREE;
11405 prec = element_precision (type);
11407 /* If we have a rotate of a bit operation with the rotate count and
11408 the second operand of the bit operation both constant,
11409 permute the two operations. */
11410 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11411 && (TREE_CODE (arg0) == BIT_AND_EXPR
11412 || TREE_CODE (arg0) == BIT_IOR_EXPR
11413 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11414 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11416 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11417 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11418 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11419 fold_build2_loc (loc, code, type,
11420 arg00, arg1),
11421 fold_build2_loc (loc, code, type,
11422 arg01, arg1));
11425 /* Two consecutive rotates adding up to the some integer
11426 multiple of the precision of the type can be ignored. */
11427 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11428 && TREE_CODE (arg0) == RROTATE_EXPR
11429 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11430 && wi::umod_trunc (wi::to_wide (arg1)
11431 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11432 prec) == 0)
11433 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11435 return NULL_TREE;
11437 case MIN_EXPR:
11438 case MAX_EXPR:
11439 goto associate;
11441 case TRUTH_ANDIF_EXPR:
11442 /* Note that the operands of this must be ints
11443 and their values must be 0 or 1.
11444 ("true" is a fixed value perhaps depending on the language.) */
11445 /* If first arg is constant zero, return it. */
11446 if (integer_zerop (arg0))
11447 return fold_convert_loc (loc, type, arg0);
11448 /* FALLTHRU */
11449 case TRUTH_AND_EXPR:
11450 /* If either arg is constant true, drop it. */
11451 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11452 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11453 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11454 /* Preserve sequence points. */
11455 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11456 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11457 /* If second arg is constant zero, result is zero, but first arg
11458 must be evaluated. */
11459 if (integer_zerop (arg1))
11460 return omit_one_operand_loc (loc, type, arg1, arg0);
11461 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11462 case will be handled here. */
11463 if (integer_zerop (arg0))
11464 return omit_one_operand_loc (loc, type, arg0, arg1);
11466 /* !X && X is always false. */
11467 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11468 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11469 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11470 /* X && !X is always false. */
11471 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11472 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11473 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11475 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11476 means A >= Y && A != MAX, but in this case we know that
11477 A < X <= MAX. */
11479 if (!TREE_SIDE_EFFECTS (arg0)
11480 && !TREE_SIDE_EFFECTS (arg1))
11482 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11483 if (tem && !operand_equal_p (tem, arg0, 0))
11484 return fold_build2_loc (loc, code, type, tem, arg1);
11486 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11487 if (tem && !operand_equal_p (tem, arg1, 0))
11488 return fold_build2_loc (loc, code, type, arg0, tem);
11491 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11492 != NULL_TREE)
11493 return tem;
11495 return NULL_TREE;
11497 case TRUTH_ORIF_EXPR:
11498 /* Note that the operands of this must be ints
11499 and their values must be 0 or true.
11500 ("true" is a fixed value perhaps depending on the language.) */
11501 /* If first arg is constant true, return it. */
11502 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11503 return fold_convert_loc (loc, type, arg0);
11504 /* FALLTHRU */
11505 case TRUTH_OR_EXPR:
11506 /* If either arg is constant zero, drop it. */
11507 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11508 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11509 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11510 /* Preserve sequence points. */
11511 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11512 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11513 /* If second arg is constant true, result is true, but we must
11514 evaluate first arg. */
11515 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11516 return omit_one_operand_loc (loc, type, arg1, arg0);
11517 /* Likewise for first arg, but note this only occurs here for
11518 TRUTH_OR_EXPR. */
11519 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11520 return omit_one_operand_loc (loc, type, arg0, arg1);
11522 /* !X || X is always true. */
11523 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11524 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11525 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11526 /* X || !X is always true. */
11527 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11528 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11529 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11531 /* (X && !Y) || (!X && Y) is X ^ Y */
11532 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11533 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11535 tree a0, a1, l0, l1, n0, n1;
11537 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11538 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11540 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11541 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11543 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11544 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11546 if ((operand_equal_p (n0, a0, 0)
11547 && operand_equal_p (n1, a1, 0))
11548 || (operand_equal_p (n0, a1, 0)
11549 && operand_equal_p (n1, a0, 0)))
11550 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11553 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11554 != NULL_TREE)
11555 return tem;
11557 return NULL_TREE;
11559 case TRUTH_XOR_EXPR:
11560 /* If the second arg is constant zero, drop it. */
11561 if (integer_zerop (arg1))
11562 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11563 /* If the second arg is constant true, this is a logical inversion. */
11564 if (integer_onep (arg1))
11566 tem = invert_truthvalue_loc (loc, arg0);
11567 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11569 /* Identical arguments cancel to zero. */
11570 if (operand_equal_p (arg0, arg1, 0))
11571 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11573 /* !X ^ X is always true. */
11574 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11575 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11576 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11578 /* X ^ !X is always true. */
11579 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11580 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11581 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11583 return NULL_TREE;
11585 case EQ_EXPR:
11586 case NE_EXPR:
11587 STRIP_NOPS (arg0);
11588 STRIP_NOPS (arg1);
11590 tem = fold_comparison (loc, code, type, op0, op1);
11591 if (tem != NULL_TREE)
11592 return tem;
11594 /* bool_var != 1 becomes !bool_var. */
11595 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11596 && code == NE_EXPR)
11597 return fold_convert_loc (loc, type,
11598 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11599 TREE_TYPE (arg0), arg0));
11601 /* bool_var == 0 becomes !bool_var. */
11602 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11603 && code == EQ_EXPR)
11604 return fold_convert_loc (loc, type,
11605 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11606 TREE_TYPE (arg0), arg0));
11608 /* !exp != 0 becomes !exp */
11609 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11610 && code == NE_EXPR)
11611 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11613 /* If this is an EQ or NE comparison with zero and ARG0 is
11614 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11615 two operations, but the latter can be done in one less insn
11616 on machines that have only two-operand insns or on which a
11617 constant cannot be the first operand. */
11618 if (TREE_CODE (arg0) == BIT_AND_EXPR
11619 && integer_zerop (arg1))
11621 tree arg00 = TREE_OPERAND (arg0, 0);
11622 tree arg01 = TREE_OPERAND (arg0, 1);
11623 if (TREE_CODE (arg00) == LSHIFT_EXPR
11624 && integer_onep (TREE_OPERAND (arg00, 0)))
11626 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11627 arg01, TREE_OPERAND (arg00, 1));
11628 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11629 build_int_cst (TREE_TYPE (arg0), 1));
11630 return fold_build2_loc (loc, code, type,
11631 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11632 arg1);
11634 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11635 && integer_onep (TREE_OPERAND (arg01, 0)))
11637 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11638 arg00, TREE_OPERAND (arg01, 1));
11639 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11640 build_int_cst (TREE_TYPE (arg0), 1));
11641 return fold_build2_loc (loc, code, type,
11642 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11643 arg1);
11647 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11648 C1 is a valid shift constant, and C2 is a power of two, i.e.
11649 a single bit. */
11650 if (TREE_CODE (arg0) == BIT_AND_EXPR
11651 && integer_pow2p (TREE_OPERAND (arg0, 1))
11652 && integer_zerop (arg1))
11654 tree arg00 = TREE_OPERAND (arg0, 0);
11655 STRIP_NOPS (arg00);
11656 if (TREE_CODE (arg00) == RSHIFT_EXPR
11657 && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
11659 tree itype = TREE_TYPE (arg00);
11660 tree arg001 = TREE_OPERAND (arg00, 1);
11661 prec = TYPE_PRECISION (itype);
11663 /* Check for a valid shift count. */
11664 if (wi::ltu_p (wi::to_wide (arg001), prec))
11666 tree arg01 = TREE_OPERAND (arg0, 1);
11667 tree arg000 = TREE_OPERAND (arg00, 0);
11668 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11669 /* If (C2 << C1) doesn't overflow, then
11670 ((X >> C1) & C2) != 0 can be rewritten as
11671 (X & (C2 << C1)) != 0. */
11672 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11674 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
11675 arg01, arg001);
11676 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
11677 arg000, tem);
11678 return fold_build2_loc (loc, code, type, tem,
11679 fold_convert_loc (loc, itype, arg1));
11681 /* Otherwise, for signed (arithmetic) shifts,
11682 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11683 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11684 else if (!TYPE_UNSIGNED (itype))
11685 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
11686 : LT_EXPR,
11687 type, arg000,
11688 build_int_cst (itype, 0));
11689 /* Otherwise, of unsigned (logical) shifts,
11690 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11691 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11692 else
11693 return omit_one_operand_loc (loc, type,
11694 code == EQ_EXPR ? integer_one_node
11695 : integer_zero_node,
11696 arg000);
11701 /* If this is a comparison of a field, we may be able to simplify it. */
11702 if ((TREE_CODE (arg0) == COMPONENT_REF
11703 || TREE_CODE (arg0) == BIT_FIELD_REF)
11704 /* Handle the constant case even without -O
11705 to make sure the warnings are given. */
11706 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11708 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11709 if (t1)
11710 return t1;
11713 /* Optimize comparisons of strlen vs zero to a compare of the
11714 first character of the string vs zero. To wit,
11715 strlen(ptr) == 0 => *ptr == 0
11716 strlen(ptr) != 0 => *ptr != 0
11717 Other cases should reduce to one of these two (or a constant)
11718 due to the return value of strlen being unsigned. */
11719 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11721 tree fndecl = get_callee_fndecl (arg0);
11723 if (fndecl
11724 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11725 && call_expr_nargs (arg0) == 1
11726 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11727 == POINTER_TYPE))
11729 tree ptrtype
11730 = build_pointer_type (build_qualified_type (char_type_node,
11731 TYPE_QUAL_CONST));
11732 tree ptr = fold_convert_loc (loc, ptrtype,
11733 CALL_EXPR_ARG (arg0, 0));
11734 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11735 return fold_build2_loc (loc, code, type, iref,
11736 build_int_cst (TREE_TYPE (iref), 0));
11740 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11741 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11742 if (TREE_CODE (arg0) == RSHIFT_EXPR
11743 && integer_zerop (arg1)
11744 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11746 tree arg00 = TREE_OPERAND (arg0, 0);
11747 tree arg01 = TREE_OPERAND (arg0, 1);
11748 tree itype = TREE_TYPE (arg00);
11749 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11751 if (TYPE_UNSIGNED (itype))
11753 itype = signed_type_for (itype);
11754 arg00 = fold_convert_loc (loc, itype, arg00);
11756 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11757 type, arg00, build_zero_cst (itype));
11761 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11762 (X & C) == 0 when C is a single bit. */
11763 if (TREE_CODE (arg0) == BIT_AND_EXPR
11764 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11765 && integer_zerop (arg1)
11766 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11768 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11769 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11770 TREE_OPERAND (arg0, 1));
11771 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11772 type, tem,
11773 fold_convert_loc (loc, TREE_TYPE (arg0),
11774 arg1));
11777 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11778 constant C is a power of two, i.e. a single bit. */
11779 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11780 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11781 && integer_zerop (arg1)
11782 && integer_pow2p (TREE_OPERAND (arg0, 1))
11783 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11784 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11786 tree arg00 = TREE_OPERAND (arg0, 0);
11787 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11788 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11791 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11792 when is C is a power of two, i.e. a single bit. */
11793 if (TREE_CODE (arg0) == BIT_AND_EXPR
11794 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11795 && integer_zerop (arg1)
11796 && integer_pow2p (TREE_OPERAND (arg0, 1))
11797 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11798 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11800 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11801 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11802 arg000, TREE_OPERAND (arg0, 1));
11803 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11804 tem, build_int_cst (TREE_TYPE (tem), 0));
11807 if (integer_zerop (arg1)
11808 && tree_expr_nonzero_p (arg0))
11810 tree res = constant_boolean_node (code==NE_EXPR, type);
11811 return omit_one_operand_loc (loc, type, res, arg0);
11814 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11815 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11817 tree arg00 = TREE_OPERAND (arg0, 0);
11818 tree arg01 = TREE_OPERAND (arg0, 1);
11819 tree arg10 = TREE_OPERAND (arg1, 0);
11820 tree arg11 = TREE_OPERAND (arg1, 1);
11821 tree itype = TREE_TYPE (arg0);
11823 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11824 operand_equal_p guarantees no side-effects so we don't need
11825 to use omit_one_operand on Z. */
11826 if (operand_equal_p (arg01, arg11, 0))
11827 return fold_build2_loc (loc, code, type, arg00,
11828 fold_convert_loc (loc, TREE_TYPE (arg00),
11829 arg10));
11830 if (operand_equal_p (arg01, arg10, 0))
11831 return fold_build2_loc (loc, code, type, arg00,
11832 fold_convert_loc (loc, TREE_TYPE (arg00),
11833 arg11));
11834 if (operand_equal_p (arg00, arg11, 0))
11835 return fold_build2_loc (loc, code, type, arg01,
11836 fold_convert_loc (loc, TREE_TYPE (arg01),
11837 arg10));
11838 if (operand_equal_p (arg00, arg10, 0))
11839 return fold_build2_loc (loc, code, type, arg01,
11840 fold_convert_loc (loc, TREE_TYPE (arg01),
11841 arg11));
11843 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11844 if (TREE_CODE (arg01) == INTEGER_CST
11845 && TREE_CODE (arg11) == INTEGER_CST)
11847 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11848 fold_convert_loc (loc, itype, arg11));
11849 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11850 return fold_build2_loc (loc, code, type, tem,
11851 fold_convert_loc (loc, itype, arg10));
11855 /* Attempt to simplify equality/inequality comparisons of complex
11856 values. Only lower the comparison if the result is known or
11857 can be simplified to a single scalar comparison. */
11858 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11859 || TREE_CODE (arg0) == COMPLEX_CST)
11860 && (TREE_CODE (arg1) == COMPLEX_EXPR
11861 || TREE_CODE (arg1) == COMPLEX_CST))
11863 tree real0, imag0, real1, imag1;
11864 tree rcond, icond;
11866 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11868 real0 = TREE_OPERAND (arg0, 0);
11869 imag0 = TREE_OPERAND (arg0, 1);
11871 else
11873 real0 = TREE_REALPART (arg0);
11874 imag0 = TREE_IMAGPART (arg0);
11877 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11879 real1 = TREE_OPERAND (arg1, 0);
11880 imag1 = TREE_OPERAND (arg1, 1);
11882 else
11884 real1 = TREE_REALPART (arg1);
11885 imag1 = TREE_IMAGPART (arg1);
11888 rcond = fold_binary_loc (loc, code, type, real0, real1);
11889 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11891 if (integer_zerop (rcond))
11893 if (code == EQ_EXPR)
11894 return omit_two_operands_loc (loc, type, boolean_false_node,
11895 imag0, imag1);
11896 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11898 else
11900 if (code == NE_EXPR)
11901 return omit_two_operands_loc (loc, type, boolean_true_node,
11902 imag0, imag1);
11903 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11907 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11908 if (icond && TREE_CODE (icond) == INTEGER_CST)
11910 if (integer_zerop (icond))
11912 if (code == EQ_EXPR)
11913 return omit_two_operands_loc (loc, type, boolean_false_node,
11914 real0, real1);
11915 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11917 else
11919 if (code == NE_EXPR)
11920 return omit_two_operands_loc (loc, type, boolean_true_node,
11921 real0, real1);
11922 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11927 return NULL_TREE;
11929 case LT_EXPR:
11930 case GT_EXPR:
11931 case LE_EXPR:
11932 case GE_EXPR:
11933 tem = fold_comparison (loc, code, type, op0, op1);
11934 if (tem != NULL_TREE)
11935 return tem;
11937 /* Transform comparisons of the form X +- C CMP X. */
11938 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11939 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11940 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11941 && !HONOR_SNANS (arg0))
11943 tree arg01 = TREE_OPERAND (arg0, 1);
11944 enum tree_code code0 = TREE_CODE (arg0);
11945 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11947 /* (X - c) > X becomes false. */
11948 if (code == GT_EXPR
11949 && ((code0 == MINUS_EXPR && is_positive >= 0)
11950 || (code0 == PLUS_EXPR && is_positive <= 0)))
11951 return constant_boolean_node (0, type);
11953 /* Likewise (X + c) < X becomes false. */
11954 if (code == LT_EXPR
11955 && ((code0 == PLUS_EXPR && is_positive >= 0)
11956 || (code0 == MINUS_EXPR && is_positive <= 0)))
11957 return constant_boolean_node (0, type);
11959 /* Convert (X - c) <= X to true. */
11960 if (!HONOR_NANS (arg1)
11961 && code == LE_EXPR
11962 && ((code0 == MINUS_EXPR && is_positive >= 0)
11963 || (code0 == PLUS_EXPR && is_positive <= 0)))
11964 return constant_boolean_node (1, type);
11966 /* Convert (X + c) >= X to true. */
11967 if (!HONOR_NANS (arg1)
11968 && code == GE_EXPR
11969 && ((code0 == PLUS_EXPR && is_positive >= 0)
11970 || (code0 == MINUS_EXPR && is_positive <= 0)))
11971 return constant_boolean_node (1, type);
11974 /* If we are comparing an ABS_EXPR with a constant, we can
11975 convert all the cases into explicit comparisons, but they may
11976 well not be faster than doing the ABS and one comparison.
11977 But ABS (X) <= C is a range comparison, which becomes a subtraction
11978 and a comparison, and is probably faster. */
11979 if (code == LE_EXPR
11980 && TREE_CODE (arg1) == INTEGER_CST
11981 && TREE_CODE (arg0) == ABS_EXPR
11982 && ! TREE_SIDE_EFFECTS (arg0)
11983 && (tem = negate_expr (arg1)) != 0
11984 && TREE_CODE (tem) == INTEGER_CST
11985 && !TREE_OVERFLOW (tem))
11986 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11987 build2 (GE_EXPR, type,
11988 TREE_OPERAND (arg0, 0), tem),
11989 build2 (LE_EXPR, type,
11990 TREE_OPERAND (arg0, 0), arg1));
11992 /* Convert ABS_EXPR<x> >= 0 to true. */
11993 strict_overflow_p = false;
11994 if (code == GE_EXPR
11995 && (integer_zerop (arg1)
11996 || (! HONOR_NANS (arg0)
11997 && real_zerop (arg1)))
11998 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12000 if (strict_overflow_p)
12001 fold_overflow_warning (("assuming signed overflow does not occur "
12002 "when simplifying comparison of "
12003 "absolute value and zero"),
12004 WARN_STRICT_OVERFLOW_CONDITIONAL);
12005 return omit_one_operand_loc (loc, type,
12006 constant_boolean_node (true, type),
12007 arg0);
12010 /* Convert ABS_EXPR<x> < 0 to false. */
12011 strict_overflow_p = false;
12012 if (code == LT_EXPR
12013 && (integer_zerop (arg1) || real_zerop (arg1))
12014 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12016 if (strict_overflow_p)
12017 fold_overflow_warning (("assuming signed overflow does not occur "
12018 "when simplifying comparison of "
12019 "absolute value and zero"),
12020 WARN_STRICT_OVERFLOW_CONDITIONAL);
12021 return omit_one_operand_loc (loc, type,
12022 constant_boolean_node (false, type),
12023 arg0);
12026 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12027 and similarly for >= into !=. */
12028 if ((code == LT_EXPR || code == GE_EXPR)
12029 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12030 && TREE_CODE (arg1) == LSHIFT_EXPR
12031 && integer_onep (TREE_OPERAND (arg1, 0)))
12032 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12033 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12034 TREE_OPERAND (arg1, 1)),
12035 build_zero_cst (TREE_TYPE (arg0)));
12037 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12038 otherwise Y might be >= # of bits in X's type and thus e.g.
12039 (unsigned char) (1 << Y) for Y 15 might be 0.
12040 If the cast is widening, then 1 << Y should have unsigned type,
12041 otherwise if Y is number of bits in the signed shift type minus 1,
12042 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12043 31 might be 0xffffffff80000000. */
12044 if ((code == LT_EXPR || code == GE_EXPR)
12045 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12046 && CONVERT_EXPR_P (arg1)
12047 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12048 && (element_precision (TREE_TYPE (arg1))
12049 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12050 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12051 || (element_precision (TREE_TYPE (arg1))
12052 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12053 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12055 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12056 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12057 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12058 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12059 build_zero_cst (TREE_TYPE (arg0)));
12062 return NULL_TREE;
12064 case UNORDERED_EXPR:
12065 case ORDERED_EXPR:
12066 case UNLT_EXPR:
12067 case UNLE_EXPR:
12068 case UNGT_EXPR:
12069 case UNGE_EXPR:
12070 case UNEQ_EXPR:
12071 case LTGT_EXPR:
12072 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12074 tree targ0 = strip_float_extensions (arg0);
12075 tree targ1 = strip_float_extensions (arg1);
12076 tree newtype = TREE_TYPE (targ0);
12078 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12079 newtype = TREE_TYPE (targ1);
12081 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12082 return fold_build2_loc (loc, code, type,
12083 fold_convert_loc (loc, newtype, targ0),
12084 fold_convert_loc (loc, newtype, targ1));
12087 return NULL_TREE;
12089 case COMPOUND_EXPR:
12090 /* When pedantic, a compound expression can be neither an lvalue
12091 nor an integer constant expression. */
12092 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12093 return NULL_TREE;
12094 /* Don't let (0, 0) be null pointer constant. */
12095 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12096 : fold_convert_loc (loc, type, arg1);
12097 return pedantic_non_lvalue_loc (loc, tem);
12099 case ASSERT_EXPR:
12100 /* An ASSERT_EXPR should never be passed to fold_binary. */
12101 gcc_unreachable ();
12103 default:
12104 return NULL_TREE;
12105 } /* switch (code) */
12108 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12109 ((A & N) + B) & M -> (A + B) & M
12110 Similarly if (N & M) == 0,
12111 ((A | N) + B) & M -> (A + B) & M
12112 and for - instead of + (or unary - instead of +)
12113 and/or ^ instead of |.
12114 If B is constant and (B & M) == 0, fold into A & M.
12116 This function is a helper for match.pd patterns. Return non-NULL
12117 type in which the simplified operation should be performed only
12118 if any optimization is possible.
12120 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12121 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12122 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12123 +/-. */
12124 tree
12125 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12126 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12127 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12128 tree *pmop)
12130 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12131 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12132 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12133 if (~cst1 == 0
12134 || (cst1 & (cst1 + 1)) != 0
12135 || !INTEGRAL_TYPE_P (type)
12136 || (!TYPE_OVERFLOW_WRAPS (type)
12137 && TREE_CODE (type) != INTEGER_TYPE)
12138 || (wi::max_value (type) & cst1) != cst1)
12139 return NULL_TREE;
12141 enum tree_code codes[2] = { code00, code01 };
12142 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12143 int which = 0;
12144 wide_int cst0;
12146 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12147 arg1 (M) is == (1LL << cst) - 1.
12148 Store C into PMOP[0] and D into PMOP[1]. */
12149 pmop[0] = arg00;
12150 pmop[1] = arg01;
12151 which = code != NEGATE_EXPR;
12153 for (; which >= 0; which--)
12154 switch (codes[which])
12156 case BIT_AND_EXPR:
12157 case BIT_IOR_EXPR:
12158 case BIT_XOR_EXPR:
12159 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12160 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12161 if (codes[which] == BIT_AND_EXPR)
12163 if (cst0 != cst1)
12164 break;
12166 else if (cst0 != 0)
12167 break;
12168 /* If C or D is of the form (A & N) where
12169 (N & M) == M, or of the form (A | N) or
12170 (A ^ N) where (N & M) == 0, replace it with A. */
12171 pmop[which] = arg0xx[2 * which];
12172 break;
12173 case ERROR_MARK:
12174 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12175 break;
12176 /* If C or D is a N where (N & M) == 0, it can be
12177 omitted (replaced with 0). */
12178 if ((code == PLUS_EXPR
12179 || (code == MINUS_EXPR && which == 0))
12180 && (cst1 & wi::to_wide (pmop[which])) == 0)
12181 pmop[which] = build_int_cst (type, 0);
12182 /* Similarly, with C - N where (-N & M) == 0. */
12183 if (code == MINUS_EXPR
12184 && which == 1
12185 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12186 pmop[which] = build_int_cst (type, 0);
12187 break;
12188 default:
12189 gcc_unreachable ();
12192 /* Only build anything new if we optimized one or both arguments above. */
12193 if (pmop[0] == arg00 && pmop[1] == arg01)
12194 return NULL_TREE;
12196 if (TYPE_OVERFLOW_WRAPS (type))
12197 return type;
12198 else
12199 return unsigned_type_for (type);
12202 /* Used by contains_label_[p1]. */
12204 struct contains_label_data
12206 hash_set<tree> *pset;
12207 bool inside_switch_p;
12210 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12211 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12212 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12214 static tree
12215 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12217 contains_label_data *d = (contains_label_data *) data;
12218 switch (TREE_CODE (*tp))
12220 case LABEL_EXPR:
12221 return *tp;
12223 case CASE_LABEL_EXPR:
12224 if (!d->inside_switch_p)
12225 return *tp;
12226 return NULL_TREE;
12228 case SWITCH_EXPR:
12229 if (!d->inside_switch_p)
12231 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12232 return *tp;
12233 d->inside_switch_p = true;
12234 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12235 return *tp;
12236 d->inside_switch_p = false;
12237 *walk_subtrees = 0;
12239 return NULL_TREE;
12241 case GOTO_EXPR:
12242 *walk_subtrees = 0;
12243 return NULL_TREE;
12245 default:
12246 return NULL_TREE;
12250 /* Return whether the sub-tree ST contains a label which is accessible from
12251 outside the sub-tree. */
12253 static bool
12254 contains_label_p (tree st)
12256 hash_set<tree> pset;
12257 contains_label_data data = { &pset, false };
12258 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12261 /* Fold a ternary expression of code CODE and type TYPE with operands
12262 OP0, OP1, and OP2. Return the folded expression if folding is
12263 successful. Otherwise, return NULL_TREE. */
12265 tree
12266 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12267 tree op0, tree op1, tree op2)
12269 tree tem;
12270 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12271 enum tree_code_class kind = TREE_CODE_CLASS (code);
12273 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12274 && TREE_CODE_LENGTH (code) == 3);
12276 /* If this is a commutative operation, and OP0 is a constant, move it
12277 to OP1 to reduce the number of tests below. */
12278 if (commutative_ternary_tree_code (code)
12279 && tree_swap_operands_p (op0, op1))
12280 return fold_build3_loc (loc, code, type, op1, op0, op2);
12282 tem = generic_simplify (loc, code, type, op0, op1, op2);
12283 if (tem)
12284 return tem;
12286 /* Strip any conversions that don't change the mode. This is safe
12287 for every expression, except for a comparison expression because
12288 its signedness is derived from its operands. So, in the latter
12289 case, only strip conversions that don't change the signedness.
12291 Note that this is done as an internal manipulation within the
12292 constant folder, in order to find the simplest representation of
12293 the arguments so that their form can be studied. In any cases,
12294 the appropriate type conversions should be put back in the tree
12295 that will get out of the constant folder. */
12296 if (op0)
12298 arg0 = op0;
12299 STRIP_NOPS (arg0);
12302 if (op1)
12304 arg1 = op1;
12305 STRIP_NOPS (arg1);
12308 if (op2)
12310 arg2 = op2;
12311 STRIP_NOPS (arg2);
12314 switch (code)
12316 case COMPONENT_REF:
12317 if (TREE_CODE (arg0) == CONSTRUCTOR
12318 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12320 unsigned HOST_WIDE_INT idx;
12321 tree field, value;
12322 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12323 if (field == arg1)
12324 return value;
12326 return NULL_TREE;
12328 case COND_EXPR:
12329 case VEC_COND_EXPR:
12330 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12331 so all simple results must be passed through pedantic_non_lvalue. */
12332 if (TREE_CODE (arg0) == INTEGER_CST)
12334 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12335 tem = integer_zerop (arg0) ? op2 : op1;
12336 /* Only optimize constant conditions when the selected branch
12337 has the same type as the COND_EXPR. This avoids optimizing
12338 away "c ? x : throw", where the throw has a void type.
12339 Avoid throwing away that operand which contains label. */
12340 if ((!TREE_SIDE_EFFECTS (unused_op)
12341 || !contains_label_p (unused_op))
12342 && (! VOID_TYPE_P (TREE_TYPE (tem))
12343 || VOID_TYPE_P (type)))
12344 return pedantic_non_lvalue_loc (loc, tem);
12345 return NULL_TREE;
12347 else if (TREE_CODE (arg0) == VECTOR_CST)
12349 unsigned HOST_WIDE_INT nelts;
12350 if ((TREE_CODE (arg1) == VECTOR_CST
12351 || TREE_CODE (arg1) == CONSTRUCTOR)
12352 && (TREE_CODE (arg2) == VECTOR_CST
12353 || TREE_CODE (arg2) == CONSTRUCTOR)
12354 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12356 vec_perm_builder sel (nelts, nelts, 1);
12357 for (unsigned int i = 0; i < nelts; i++)
12359 tree val = VECTOR_CST_ELT (arg0, i);
12360 if (integer_all_onesp (val))
12361 sel.quick_push (i);
12362 else if (integer_zerop (val))
12363 sel.quick_push (nelts + i);
12364 else /* Currently unreachable. */
12365 return NULL_TREE;
12367 vec_perm_indices indices (sel, 2, nelts);
12368 tree t = fold_vec_perm (type, arg1, arg2, indices);
12369 if (t != NULL_TREE)
12370 return t;
12374 /* If we have A op B ? A : C, we may be able to convert this to a
12375 simpler expression, depending on the operation and the values
12376 of B and C. Signed zeros prevent all of these transformations,
12377 for reasons given above each one.
12379 Also try swapping the arguments and inverting the conditional. */
12380 if (COMPARISON_CLASS_P (arg0)
12381 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12382 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12384 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12385 if (tem)
12386 return tem;
12389 if (COMPARISON_CLASS_P (arg0)
12390 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12391 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12393 location_t loc0 = expr_location_or (arg0, loc);
12394 tem = fold_invert_truthvalue (loc0, arg0);
12395 if (tem && COMPARISON_CLASS_P (tem))
12397 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12398 if (tem)
12399 return tem;
12403 /* If the second operand is simpler than the third, swap them
12404 since that produces better jump optimization results. */
12405 if (truth_value_p (TREE_CODE (arg0))
12406 && tree_swap_operands_p (op1, op2))
12408 location_t loc0 = expr_location_or (arg0, loc);
12409 /* See if this can be inverted. If it can't, possibly because
12410 it was a floating-point inequality comparison, don't do
12411 anything. */
12412 tem = fold_invert_truthvalue (loc0, arg0);
12413 if (tem)
12414 return fold_build3_loc (loc, code, type, tem, op2, op1);
12417 /* Convert A ? 1 : 0 to simply A. */
12418 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12419 : (integer_onep (op1)
12420 && !VECTOR_TYPE_P (type)))
12421 && integer_zerop (op2)
12422 /* If we try to convert OP0 to our type, the
12423 call to fold will try to move the conversion inside
12424 a COND, which will recurse. In that case, the COND_EXPR
12425 is probably the best choice, so leave it alone. */
12426 && type == TREE_TYPE (arg0))
12427 return pedantic_non_lvalue_loc (loc, arg0);
12429 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12430 over COND_EXPR in cases such as floating point comparisons. */
12431 if (integer_zerop (op1)
12432 && code == COND_EXPR
12433 && integer_onep (op2)
12434 && !VECTOR_TYPE_P (type)
12435 && truth_value_p (TREE_CODE (arg0)))
12436 return pedantic_non_lvalue_loc (loc,
12437 fold_convert_loc (loc, type,
12438 invert_truthvalue_loc (loc,
12439 arg0)));
12441 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12442 if (TREE_CODE (arg0) == LT_EXPR
12443 && integer_zerop (TREE_OPERAND (arg0, 1))
12444 && integer_zerop (op2)
12445 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12447 /* sign_bit_p looks through both zero and sign extensions,
12448 but for this optimization only sign extensions are
12449 usable. */
12450 tree tem2 = TREE_OPERAND (arg0, 0);
12451 while (tem != tem2)
12453 if (TREE_CODE (tem2) != NOP_EXPR
12454 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12456 tem = NULL_TREE;
12457 break;
12459 tem2 = TREE_OPERAND (tem2, 0);
12461 /* sign_bit_p only checks ARG1 bits within A's precision.
12462 If <sign bit of A> has wider type than A, bits outside
12463 of A's precision in <sign bit of A> need to be checked.
12464 If they are all 0, this optimization needs to be done
12465 in unsigned A's type, if they are all 1 in signed A's type,
12466 otherwise this can't be done. */
12467 if (tem
12468 && TYPE_PRECISION (TREE_TYPE (tem))
12469 < TYPE_PRECISION (TREE_TYPE (arg1))
12470 && TYPE_PRECISION (TREE_TYPE (tem))
12471 < TYPE_PRECISION (type))
12473 int inner_width, outer_width;
12474 tree tem_type;
12476 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12477 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12478 if (outer_width > TYPE_PRECISION (type))
12479 outer_width = TYPE_PRECISION (type);
12481 wide_int mask = wi::shifted_mask
12482 (inner_width, outer_width - inner_width, false,
12483 TYPE_PRECISION (TREE_TYPE (arg1)));
12485 wide_int common = mask & wi::to_wide (arg1);
12486 if (common == mask)
12488 tem_type = signed_type_for (TREE_TYPE (tem));
12489 tem = fold_convert_loc (loc, tem_type, tem);
12491 else if (common == 0)
12493 tem_type = unsigned_type_for (TREE_TYPE (tem));
12494 tem = fold_convert_loc (loc, tem_type, tem);
12496 else
12497 tem = NULL;
12500 if (tem)
12501 return
12502 fold_convert_loc (loc, type,
12503 fold_build2_loc (loc, BIT_AND_EXPR,
12504 TREE_TYPE (tem), tem,
12505 fold_convert_loc (loc,
12506 TREE_TYPE (tem),
12507 arg1)));
12510 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12511 already handled above. */
12512 if (TREE_CODE (arg0) == BIT_AND_EXPR
12513 && integer_onep (TREE_OPERAND (arg0, 1))
12514 && integer_zerop (op2)
12515 && integer_pow2p (arg1))
12517 tree tem = TREE_OPERAND (arg0, 0);
12518 STRIP_NOPS (tem);
12519 if (TREE_CODE (tem) == RSHIFT_EXPR
12520 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12521 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12522 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12523 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12524 fold_convert_loc (loc, type,
12525 TREE_OPERAND (tem, 0)),
12526 op1);
12529 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12530 is probably obsolete because the first operand should be a
12531 truth value (that's why we have the two cases above), but let's
12532 leave it in until we can confirm this for all front-ends. */
12533 if (integer_zerop (op2)
12534 && TREE_CODE (arg0) == NE_EXPR
12535 && integer_zerop (TREE_OPERAND (arg0, 1))
12536 && integer_pow2p (arg1)
12537 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12538 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12539 arg1, OEP_ONLY_CONST)
12540 /* operand_equal_p compares just value, not precision, so e.g.
12541 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12542 second operand 32-bit -128, which is not a power of two (or vice
12543 versa. */
12544 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12545 return pedantic_non_lvalue_loc (loc,
12546 fold_convert_loc (loc, type,
12547 TREE_OPERAND (arg0,
12548 0)));
12550 /* Disable the transformations below for vectors, since
12551 fold_binary_op_with_conditional_arg may undo them immediately,
12552 yielding an infinite loop. */
12553 if (code == VEC_COND_EXPR)
12554 return NULL_TREE;
12556 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12557 if (integer_zerop (op2)
12558 && truth_value_p (TREE_CODE (arg0))
12559 && truth_value_p (TREE_CODE (arg1))
12560 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12561 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12562 : TRUTH_ANDIF_EXPR,
12563 type, fold_convert_loc (loc, type, arg0), op1);
12565 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12566 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12567 && truth_value_p (TREE_CODE (arg0))
12568 && truth_value_p (TREE_CODE (arg1))
12569 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12571 location_t loc0 = expr_location_or (arg0, loc);
12572 /* Only perform transformation if ARG0 is easily inverted. */
12573 tem = fold_invert_truthvalue (loc0, arg0);
12574 if (tem)
12575 return fold_build2_loc (loc, code == VEC_COND_EXPR
12576 ? BIT_IOR_EXPR
12577 : TRUTH_ORIF_EXPR,
12578 type, fold_convert_loc (loc, type, tem),
12579 op1);
12582 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12583 if (integer_zerop (arg1)
12584 && truth_value_p (TREE_CODE (arg0))
12585 && truth_value_p (TREE_CODE (op2))
12586 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12588 location_t loc0 = expr_location_or (arg0, loc);
12589 /* Only perform transformation if ARG0 is easily inverted. */
12590 tem = fold_invert_truthvalue (loc0, arg0);
12591 if (tem)
12592 return fold_build2_loc (loc, code == VEC_COND_EXPR
12593 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12594 type, fold_convert_loc (loc, type, tem),
12595 op2);
12598 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12599 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12600 && truth_value_p (TREE_CODE (arg0))
12601 && truth_value_p (TREE_CODE (op2))
12602 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12603 return fold_build2_loc (loc, code == VEC_COND_EXPR
12604 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12605 type, fold_convert_loc (loc, type, arg0), op2);
12607 return NULL_TREE;
12609 case CALL_EXPR:
12610 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12611 of fold_ternary on them. */
12612 gcc_unreachable ();
12614 case BIT_FIELD_REF:
12615 if (TREE_CODE (arg0) == VECTOR_CST
12616 && (type == TREE_TYPE (TREE_TYPE (arg0))
12617 || (VECTOR_TYPE_P (type)
12618 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12619 && tree_fits_uhwi_p (op1)
12620 && tree_fits_uhwi_p (op2))
12622 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12623 unsigned HOST_WIDE_INT width
12624 = (TREE_CODE (eltype) == BOOLEAN_TYPE
12625 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
12626 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12627 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12629 if (n != 0
12630 && (idx % width) == 0
12631 && (n % width) == 0
12632 && known_le ((idx + n) / width,
12633 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12635 idx = idx / width;
12636 n = n / width;
12638 if (TREE_CODE (arg0) == VECTOR_CST)
12640 if (n == 1)
12642 tem = VECTOR_CST_ELT (arg0, idx);
12643 if (VECTOR_TYPE_P (type))
12644 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12645 return tem;
12648 tree_vector_builder vals (type, n, 1);
12649 for (unsigned i = 0; i < n; ++i)
12650 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12651 return vals.build ();
12656 /* On constants we can use native encode/interpret to constant
12657 fold (nearly) all BIT_FIELD_REFs. */
12658 if (CONSTANT_CLASS_P (arg0)
12659 && can_native_interpret_type_p (type)
12660 && BITS_PER_UNIT == 8
12661 && tree_fits_uhwi_p (op1)
12662 && tree_fits_uhwi_p (op2))
12664 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12665 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12666 /* Limit us to a reasonable amount of work. To relax the
12667 other limitations we need bit-shifting of the buffer
12668 and rounding up the size. */
12669 if (bitpos % BITS_PER_UNIT == 0
12670 && bitsize % BITS_PER_UNIT == 0
12671 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12673 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12674 unsigned HOST_WIDE_INT len
12675 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12676 bitpos / BITS_PER_UNIT);
12677 if (len > 0
12678 && len * BITS_PER_UNIT >= bitsize)
12680 tree v = native_interpret_expr (type, b,
12681 bitsize / BITS_PER_UNIT);
12682 if (v)
12683 return v;
12688 return NULL_TREE;
12690 case VEC_PERM_EXPR:
12691 /* Perform constant folding of BIT_INSERT_EXPR. */
12692 if (TREE_CODE (arg2) == VECTOR_CST
12693 && TREE_CODE (op0) == VECTOR_CST
12694 && TREE_CODE (op1) == VECTOR_CST)
12696 /* Build a vector of integers from the tree mask. */
12697 vec_perm_builder builder;
12698 if (!tree_to_vec_perm_builder (&builder, arg2))
12699 return NULL_TREE;
12701 /* Create a vec_perm_indices for the integer vector. */
12702 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12703 bool single_arg = (op0 == op1);
12704 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12705 return fold_vec_perm (type, op0, op1, sel);
12707 return NULL_TREE;
12709 case BIT_INSERT_EXPR:
12710 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12711 if (TREE_CODE (arg0) == INTEGER_CST
12712 && TREE_CODE (arg1) == INTEGER_CST)
12714 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12715 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12716 wide_int tem = (wi::to_wide (arg0)
12717 & wi::shifted_mask (bitpos, bitsize, true,
12718 TYPE_PRECISION (type)));
12719 wide_int tem2
12720 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12721 bitsize), bitpos);
12722 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12724 else if (TREE_CODE (arg0) == VECTOR_CST
12725 && CONSTANT_CLASS_P (arg1)
12726 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12727 TREE_TYPE (arg1)))
12729 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12730 unsigned HOST_WIDE_INT elsize
12731 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12732 if (bitpos % elsize == 0)
12734 unsigned k = bitpos / elsize;
12735 unsigned HOST_WIDE_INT nelts;
12736 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12737 return arg0;
12738 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12740 tree_vector_builder elts (type, nelts, 1);
12741 elts.quick_grow (nelts);
12742 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12743 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12744 return elts.build ();
12748 return NULL_TREE;
12750 default:
12751 return NULL_TREE;
12752 } /* switch (code) */
12755 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12756 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12757 constructor element index of the value returned. If the element is
12758 not found NULL_TREE is returned and *CTOR_IDX is updated to
12759 the index of the element after the ACCESS_INDEX position (which
12760 may be outside of the CTOR array). */
12762 tree
12763 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12764 unsigned *ctor_idx)
12766 tree index_type = NULL_TREE;
12767 signop index_sgn = UNSIGNED;
12768 offset_int low_bound = 0;
12770 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12772 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12773 if (domain_type && TYPE_MIN_VALUE (domain_type))
12775 /* Static constructors for variably sized objects makes no sense. */
12776 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12777 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12778 /* ??? When it is obvious that the range is signed, treat it so. */
12779 if (TYPE_UNSIGNED (index_type)
12780 && TYPE_MAX_VALUE (domain_type)
12781 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12782 TYPE_MIN_VALUE (domain_type)))
12784 index_sgn = SIGNED;
12785 low_bound
12786 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12787 SIGNED);
12789 else
12791 index_sgn = TYPE_SIGN (index_type);
12792 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12797 if (index_type)
12798 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12799 index_sgn);
12801 offset_int index = low_bound;
12802 if (index_type)
12803 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12805 offset_int max_index = index;
12806 unsigned cnt;
12807 tree cfield, cval;
12808 bool first_p = true;
12810 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12812 /* Array constructor might explicitly set index, or specify a range,
12813 or leave index NULL meaning that it is next index after previous
12814 one. */
12815 if (cfield)
12817 if (TREE_CODE (cfield) == INTEGER_CST)
12818 max_index = index
12819 = offset_int::from (wi::to_wide (cfield), index_sgn);
12820 else
12822 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12823 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12824 index_sgn);
12825 max_index
12826 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12827 index_sgn);
12828 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12831 else if (!first_p)
12833 index = max_index + 1;
12834 if (index_type)
12835 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12836 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12837 max_index = index;
12839 else
12840 first_p = false;
12842 /* Do we have match? */
12843 if (wi::cmp (access_index, index, index_sgn) >= 0)
12845 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12847 if (ctor_idx)
12848 *ctor_idx = cnt;
12849 return cval;
12852 else if (in_gimple_form)
12853 /* We're past the element we search for. Note during parsing
12854 the elements might not be sorted.
12855 ??? We should use a binary search and a flag on the
12856 CONSTRUCTOR as to whether elements are sorted in declaration
12857 order. */
12858 break;
12860 if (ctor_idx)
12861 *ctor_idx = cnt;
12862 return NULL_TREE;
12865 /* Perform constant folding and related simplification of EXPR.
12866 The related simplifications include x*1 => x, x*0 => 0, etc.,
12867 and application of the associative law.
12868 NOP_EXPR conversions may be removed freely (as long as we
12869 are careful not to change the type of the overall expression).
12870 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12871 but we can constant-fold them if they have constant operands. */
12873 #ifdef ENABLE_FOLD_CHECKING
12874 # define fold(x) fold_1 (x)
12875 static tree fold_1 (tree);
12876 static
12877 #endif
12878 tree
12879 fold (tree expr)
12881 const tree t = expr;
12882 enum tree_code code = TREE_CODE (t);
12883 enum tree_code_class kind = TREE_CODE_CLASS (code);
12884 tree tem;
12885 location_t loc = EXPR_LOCATION (expr);
12887 /* Return right away if a constant. */
12888 if (kind == tcc_constant)
12889 return t;
12891 /* CALL_EXPR-like objects with variable numbers of operands are
12892 treated specially. */
12893 if (kind == tcc_vl_exp)
12895 if (code == CALL_EXPR)
12897 tem = fold_call_expr (loc, expr, false);
12898 return tem ? tem : expr;
12900 return expr;
12903 if (IS_EXPR_CODE_CLASS (kind))
12905 tree type = TREE_TYPE (t);
12906 tree op0, op1, op2;
12908 switch (TREE_CODE_LENGTH (code))
12910 case 1:
12911 op0 = TREE_OPERAND (t, 0);
12912 tem = fold_unary_loc (loc, code, type, op0);
12913 return tem ? tem : expr;
12914 case 2:
12915 op0 = TREE_OPERAND (t, 0);
12916 op1 = TREE_OPERAND (t, 1);
12917 tem = fold_binary_loc (loc, code, type, op0, op1);
12918 return tem ? tem : expr;
12919 case 3:
12920 op0 = TREE_OPERAND (t, 0);
12921 op1 = TREE_OPERAND (t, 1);
12922 op2 = TREE_OPERAND (t, 2);
12923 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12924 return tem ? tem : expr;
12925 default:
12926 break;
12930 switch (code)
12932 case ARRAY_REF:
12934 tree op0 = TREE_OPERAND (t, 0);
12935 tree op1 = TREE_OPERAND (t, 1);
12937 if (TREE_CODE (op1) == INTEGER_CST
12938 && TREE_CODE (op0) == CONSTRUCTOR
12939 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12941 tree val = get_array_ctor_element_at_index (op0,
12942 wi::to_offset (op1));
12943 if (val)
12944 return val;
12947 return t;
12950 /* Return a VECTOR_CST if possible. */
12951 case CONSTRUCTOR:
12953 tree type = TREE_TYPE (t);
12954 if (TREE_CODE (type) != VECTOR_TYPE)
12955 return t;
12957 unsigned i;
12958 tree val;
12959 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12960 if (! CONSTANT_CLASS_P (val))
12961 return t;
12963 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12966 case CONST_DECL:
12967 return fold (DECL_INITIAL (t));
12969 default:
12970 return t;
12971 } /* switch (code) */
12974 #ifdef ENABLE_FOLD_CHECKING
12975 #undef fold
12977 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12978 hash_table<nofree_ptr_hash<const tree_node> > *);
12979 static void fold_check_failed (const_tree, const_tree);
12980 void print_fold_checksum (const_tree);
12982 /* When --enable-checking=fold, compute a digest of expr before
12983 and after actual fold call to see if fold did not accidentally
12984 change original expr. */
12986 tree
12987 fold (tree expr)
12989 tree ret;
12990 struct md5_ctx ctx;
12991 unsigned char checksum_before[16], checksum_after[16];
12992 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12994 md5_init_ctx (&ctx);
12995 fold_checksum_tree (expr, &ctx, &ht);
12996 md5_finish_ctx (&ctx, checksum_before);
12997 ht.empty ();
12999 ret = fold_1 (expr);
13001 md5_init_ctx (&ctx);
13002 fold_checksum_tree (expr, &ctx, &ht);
13003 md5_finish_ctx (&ctx, checksum_after);
13005 if (memcmp (checksum_before, checksum_after, 16))
13006 fold_check_failed (expr, ret);
13008 return ret;
13011 void
13012 print_fold_checksum (const_tree expr)
13014 struct md5_ctx ctx;
13015 unsigned char checksum[16], cnt;
13016 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13018 md5_init_ctx (&ctx);
13019 fold_checksum_tree (expr, &ctx, &ht);
13020 md5_finish_ctx (&ctx, checksum);
13021 for (cnt = 0; cnt < 16; ++cnt)
13022 fprintf (stderr, "%02x", checksum[cnt]);
13023 putc ('\n', stderr);
13026 static void
13027 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13029 internal_error ("fold check: original tree changed by fold");
13032 static void
13033 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13034 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13036 const tree_node **slot;
13037 enum tree_code code;
13038 union tree_node *buf;
13039 int i, len;
13041 recursive_label:
13042 if (expr == NULL)
13043 return;
13044 slot = ht->find_slot (expr, INSERT);
13045 if (*slot != NULL)
13046 return;
13047 *slot = expr;
13048 code = TREE_CODE (expr);
13049 if (TREE_CODE_CLASS (code) == tcc_declaration
13050 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13052 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13053 size_t sz = tree_size (expr);
13054 buf = XALLOCAVAR (union tree_node, sz);
13055 memcpy ((char *) buf, expr, sz);
13056 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13057 buf->decl_with_vis.symtab_node = NULL;
13058 buf->base.nowarning_flag = 0;
13059 expr = (tree) buf;
13061 else if (TREE_CODE_CLASS (code) == tcc_type
13062 && (TYPE_POINTER_TO (expr)
13063 || TYPE_REFERENCE_TO (expr)
13064 || TYPE_CACHED_VALUES_P (expr)
13065 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13066 || TYPE_NEXT_VARIANT (expr)
13067 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13069 /* Allow these fields to be modified. */
13070 tree tmp;
13071 size_t sz = tree_size (expr);
13072 buf = XALLOCAVAR (union tree_node, sz);
13073 memcpy ((char *) buf, expr, sz);
13074 expr = tmp = (tree) buf;
13075 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13076 TYPE_POINTER_TO (tmp) = NULL;
13077 TYPE_REFERENCE_TO (tmp) = NULL;
13078 TYPE_NEXT_VARIANT (tmp) = NULL;
13079 TYPE_ALIAS_SET (tmp) = -1;
13080 if (TYPE_CACHED_VALUES_P (tmp))
13082 TYPE_CACHED_VALUES_P (tmp) = 0;
13083 TYPE_CACHED_VALUES (tmp) = NULL;
13086 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
13088 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
13089 and change builtins.c etc. instead - see PR89543. */
13090 size_t sz = tree_size (expr);
13091 buf = XALLOCAVAR (union tree_node, sz);
13092 memcpy ((char *) buf, expr, sz);
13093 buf->base.nowarning_flag = 0;
13094 expr = (tree) buf;
13096 md5_process_bytes (expr, tree_size (expr), ctx);
13097 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13098 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13099 if (TREE_CODE_CLASS (code) != tcc_type
13100 && TREE_CODE_CLASS (code) != tcc_declaration
13101 && code != TREE_LIST
13102 && code != SSA_NAME
13103 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13104 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13105 switch (TREE_CODE_CLASS (code))
13107 case tcc_constant:
13108 switch (code)
13110 case STRING_CST:
13111 md5_process_bytes (TREE_STRING_POINTER (expr),
13112 TREE_STRING_LENGTH (expr), ctx);
13113 break;
13114 case COMPLEX_CST:
13115 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13116 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13117 break;
13118 case VECTOR_CST:
13119 len = vector_cst_encoded_nelts (expr);
13120 for (i = 0; i < len; ++i)
13121 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13122 break;
13123 default:
13124 break;
13126 break;
13127 case tcc_exceptional:
13128 switch (code)
13130 case TREE_LIST:
13131 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13132 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13133 expr = TREE_CHAIN (expr);
13134 goto recursive_label;
13135 break;
13136 case TREE_VEC:
13137 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13138 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13139 break;
13140 default:
13141 break;
13143 break;
13144 case tcc_expression:
13145 case tcc_reference:
13146 case tcc_comparison:
13147 case tcc_unary:
13148 case tcc_binary:
13149 case tcc_statement:
13150 case tcc_vl_exp:
13151 len = TREE_OPERAND_LENGTH (expr);
13152 for (i = 0; i < len; ++i)
13153 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13154 break;
13155 case tcc_declaration:
13156 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13157 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13158 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13160 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13161 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13162 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13163 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13164 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13167 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13169 if (TREE_CODE (expr) == FUNCTION_DECL)
13171 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13172 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13174 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13176 break;
13177 case tcc_type:
13178 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13179 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13180 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13181 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13182 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13183 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13184 if (INTEGRAL_TYPE_P (expr)
13185 || SCALAR_FLOAT_TYPE_P (expr))
13187 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13188 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13190 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13191 if (TREE_CODE (expr) == RECORD_TYPE
13192 || TREE_CODE (expr) == UNION_TYPE
13193 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13194 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13195 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13196 break;
13197 default:
13198 break;
13202 /* Helper function for outputting the checksum of a tree T. When
13203 debugging with gdb, you can "define mynext" to be "next" followed
13204 by "call debug_fold_checksum (op0)", then just trace down till the
13205 outputs differ. */
13207 DEBUG_FUNCTION void
13208 debug_fold_checksum (const_tree t)
13210 int i;
13211 unsigned char checksum[16];
13212 struct md5_ctx ctx;
13213 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13215 md5_init_ctx (&ctx);
13216 fold_checksum_tree (t, &ctx, &ht);
13217 md5_finish_ctx (&ctx, checksum);
13218 ht.empty ();
13220 for (i = 0; i < 16; i++)
13221 fprintf (stderr, "%d ", checksum[i]);
13223 fprintf (stderr, "\n");
13226 #endif
13228 /* Fold a unary tree expression with code CODE of type TYPE with an
13229 operand OP0. LOC is the location of the resulting expression.
13230 Return a folded expression if successful. Otherwise, return a tree
13231 expression with code CODE of type TYPE with an operand OP0. */
13233 tree
13234 fold_build1_loc (location_t loc,
13235 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13237 tree tem;
13238 #ifdef ENABLE_FOLD_CHECKING
13239 unsigned char checksum_before[16], checksum_after[16];
13240 struct md5_ctx ctx;
13241 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13243 md5_init_ctx (&ctx);
13244 fold_checksum_tree (op0, &ctx, &ht);
13245 md5_finish_ctx (&ctx, checksum_before);
13246 ht.empty ();
13247 #endif
13249 tem = fold_unary_loc (loc, code, type, op0);
13250 if (!tem)
13251 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13253 #ifdef ENABLE_FOLD_CHECKING
13254 md5_init_ctx (&ctx);
13255 fold_checksum_tree (op0, &ctx, &ht);
13256 md5_finish_ctx (&ctx, checksum_after);
13258 if (memcmp (checksum_before, checksum_after, 16))
13259 fold_check_failed (op0, tem);
13260 #endif
13261 return tem;
13264 /* Fold a binary tree expression with code CODE of type TYPE with
13265 operands OP0 and OP1. LOC is the location of the resulting
13266 expression. Return a folded expression if successful. Otherwise,
13267 return a tree expression with code CODE of type TYPE with operands
13268 OP0 and OP1. */
13270 tree
13271 fold_build2_loc (location_t loc,
13272 enum tree_code code, tree type, tree op0, tree op1
13273 MEM_STAT_DECL)
13275 tree tem;
13276 #ifdef ENABLE_FOLD_CHECKING
13277 unsigned char checksum_before_op0[16],
13278 checksum_before_op1[16],
13279 checksum_after_op0[16],
13280 checksum_after_op1[16];
13281 struct md5_ctx ctx;
13282 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13284 md5_init_ctx (&ctx);
13285 fold_checksum_tree (op0, &ctx, &ht);
13286 md5_finish_ctx (&ctx, checksum_before_op0);
13287 ht.empty ();
13289 md5_init_ctx (&ctx);
13290 fold_checksum_tree (op1, &ctx, &ht);
13291 md5_finish_ctx (&ctx, checksum_before_op1);
13292 ht.empty ();
13293 #endif
13295 tem = fold_binary_loc (loc, code, type, op0, op1);
13296 if (!tem)
13297 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13299 #ifdef ENABLE_FOLD_CHECKING
13300 md5_init_ctx (&ctx);
13301 fold_checksum_tree (op0, &ctx, &ht);
13302 md5_finish_ctx (&ctx, checksum_after_op0);
13303 ht.empty ();
13305 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13306 fold_check_failed (op0, tem);
13308 md5_init_ctx (&ctx);
13309 fold_checksum_tree (op1, &ctx, &ht);
13310 md5_finish_ctx (&ctx, checksum_after_op1);
13312 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13313 fold_check_failed (op1, tem);
13314 #endif
13315 return tem;
13318 /* Fold a ternary tree expression with code CODE of type TYPE with
13319 operands OP0, OP1, and OP2. Return a folded expression if
13320 successful. Otherwise, return a tree expression with code CODE of
13321 type TYPE with operands OP0, OP1, and OP2. */
13323 tree
13324 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13325 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13327 tree tem;
13328 #ifdef ENABLE_FOLD_CHECKING
13329 unsigned char checksum_before_op0[16],
13330 checksum_before_op1[16],
13331 checksum_before_op2[16],
13332 checksum_after_op0[16],
13333 checksum_after_op1[16],
13334 checksum_after_op2[16];
13335 struct md5_ctx ctx;
13336 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13338 md5_init_ctx (&ctx);
13339 fold_checksum_tree (op0, &ctx, &ht);
13340 md5_finish_ctx (&ctx, checksum_before_op0);
13341 ht.empty ();
13343 md5_init_ctx (&ctx);
13344 fold_checksum_tree (op1, &ctx, &ht);
13345 md5_finish_ctx (&ctx, checksum_before_op1);
13346 ht.empty ();
13348 md5_init_ctx (&ctx);
13349 fold_checksum_tree (op2, &ctx, &ht);
13350 md5_finish_ctx (&ctx, checksum_before_op2);
13351 ht.empty ();
13352 #endif
13354 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13355 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13356 if (!tem)
13357 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13359 #ifdef ENABLE_FOLD_CHECKING
13360 md5_init_ctx (&ctx);
13361 fold_checksum_tree (op0, &ctx, &ht);
13362 md5_finish_ctx (&ctx, checksum_after_op0);
13363 ht.empty ();
13365 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13366 fold_check_failed (op0, tem);
13368 md5_init_ctx (&ctx);
13369 fold_checksum_tree (op1, &ctx, &ht);
13370 md5_finish_ctx (&ctx, checksum_after_op1);
13371 ht.empty ();
13373 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13374 fold_check_failed (op1, tem);
13376 md5_init_ctx (&ctx);
13377 fold_checksum_tree (op2, &ctx, &ht);
13378 md5_finish_ctx (&ctx, checksum_after_op2);
13380 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13381 fold_check_failed (op2, tem);
13382 #endif
13383 return tem;
13386 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13387 arguments in ARGARRAY, and a null static chain.
13388 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13389 of type TYPE from the given operands as constructed by build_call_array. */
13391 tree
13392 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13393 int nargs, tree *argarray)
13395 tree tem;
13396 #ifdef ENABLE_FOLD_CHECKING
13397 unsigned char checksum_before_fn[16],
13398 checksum_before_arglist[16],
13399 checksum_after_fn[16],
13400 checksum_after_arglist[16];
13401 struct md5_ctx ctx;
13402 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13403 int i;
13405 md5_init_ctx (&ctx);
13406 fold_checksum_tree (fn, &ctx, &ht);
13407 md5_finish_ctx (&ctx, checksum_before_fn);
13408 ht.empty ();
13410 md5_init_ctx (&ctx);
13411 for (i = 0; i < nargs; i++)
13412 fold_checksum_tree (argarray[i], &ctx, &ht);
13413 md5_finish_ctx (&ctx, checksum_before_arglist);
13414 ht.empty ();
13415 #endif
13417 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13418 if (!tem)
13419 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13421 #ifdef ENABLE_FOLD_CHECKING
13422 md5_init_ctx (&ctx);
13423 fold_checksum_tree (fn, &ctx, &ht);
13424 md5_finish_ctx (&ctx, checksum_after_fn);
13425 ht.empty ();
13427 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13428 fold_check_failed (fn, tem);
13430 md5_init_ctx (&ctx);
13431 for (i = 0; i < nargs; i++)
13432 fold_checksum_tree (argarray[i], &ctx, &ht);
13433 md5_finish_ctx (&ctx, checksum_after_arglist);
13435 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13436 fold_check_failed (NULL_TREE, tem);
13437 #endif
13438 return tem;
13441 /* Perform constant folding and related simplification of initializer
13442 expression EXPR. These behave identically to "fold_buildN" but ignore
13443 potential run-time traps and exceptions that fold must preserve. */
13445 #define START_FOLD_INIT \
13446 int saved_signaling_nans = flag_signaling_nans;\
13447 int saved_trapping_math = flag_trapping_math;\
13448 int saved_rounding_math = flag_rounding_math;\
13449 int saved_trapv = flag_trapv;\
13450 int saved_folding_initializer = folding_initializer;\
13451 flag_signaling_nans = 0;\
13452 flag_trapping_math = 0;\
13453 flag_rounding_math = 0;\
13454 flag_trapv = 0;\
13455 folding_initializer = 1;
13457 #define END_FOLD_INIT \
13458 flag_signaling_nans = saved_signaling_nans;\
13459 flag_trapping_math = saved_trapping_math;\
13460 flag_rounding_math = saved_rounding_math;\
13461 flag_trapv = saved_trapv;\
13462 folding_initializer = saved_folding_initializer;
13464 tree
13465 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13466 tree type, tree op)
13468 tree result;
13469 START_FOLD_INIT;
13471 result = fold_build1_loc (loc, code, type, op);
13473 END_FOLD_INIT;
13474 return result;
13477 tree
13478 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13479 tree type, tree op0, tree op1)
13481 tree result;
13482 START_FOLD_INIT;
13484 result = fold_build2_loc (loc, code, type, op0, op1);
13486 END_FOLD_INIT;
13487 return result;
13490 tree
13491 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13492 int nargs, tree *argarray)
13494 tree result;
13495 START_FOLD_INIT;
13497 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13499 END_FOLD_INIT;
13500 return result;
13503 #undef START_FOLD_INIT
13504 #undef END_FOLD_INIT
13506 /* Determine if first argument is a multiple of second argument. Return 0 if
13507 it is not, or we cannot easily determined it to be.
13509 An example of the sort of thing we care about (at this point; this routine
13510 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13511 fold cases do now) is discovering that
13513 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13515 is a multiple of
13517 SAVE_EXPR (J * 8)
13519 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13521 This code also handles discovering that
13523 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13525 is a multiple of 8 so we don't have to worry about dealing with a
13526 possible remainder.
13528 Note that we *look* inside a SAVE_EXPR only to determine how it was
13529 calculated; it is not safe for fold to do much of anything else with the
13530 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13531 at run time. For example, the latter example above *cannot* be implemented
13532 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13533 evaluation time of the original SAVE_EXPR is not necessarily the same at
13534 the time the new expression is evaluated. The only optimization of this
13535 sort that would be valid is changing
13537 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13539 divided by 8 to
13541 SAVE_EXPR (I) * SAVE_EXPR (J)
13543 (where the same SAVE_EXPR (J) is used in the original and the
13544 transformed version). */
13547 multiple_of_p (tree type, const_tree top, const_tree bottom)
13549 gimple *stmt;
13550 tree t1, op1, op2;
13552 if (operand_equal_p (top, bottom, 0))
13553 return 1;
13555 if (TREE_CODE (type) != INTEGER_TYPE)
13556 return 0;
13558 switch (TREE_CODE (top))
13560 case BIT_AND_EXPR:
13561 /* Bitwise and provides a power of two multiple. If the mask is
13562 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13563 if (!integer_pow2p (bottom))
13564 return 0;
13565 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13566 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13568 case MULT_EXPR:
13569 if (TREE_CODE (bottom) == INTEGER_CST)
13571 op1 = TREE_OPERAND (top, 0);
13572 op2 = TREE_OPERAND (top, 1);
13573 if (TREE_CODE (op1) == INTEGER_CST)
13574 std::swap (op1, op2);
13575 if (TREE_CODE (op2) == INTEGER_CST)
13577 if (multiple_of_p (type, op2, bottom))
13578 return 1;
13579 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13580 if (multiple_of_p (type, bottom, op2))
13582 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13583 wi::to_widest (op2));
13584 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13586 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13587 return multiple_of_p (type, op1, op2);
13590 return multiple_of_p (type, op1, bottom);
13593 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13594 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13596 case MINUS_EXPR:
13597 /* It is impossible to prove if op0 - op1 is multiple of bottom
13598 precisely, so be conservative here checking if both op0 and op1
13599 are multiple of bottom. Note we check the second operand first
13600 since it's usually simpler. */
13601 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13602 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13604 case PLUS_EXPR:
13605 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13606 as op0 - 3 if the expression has unsigned type. For example,
13607 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13608 op1 = TREE_OPERAND (top, 1);
13609 if (TYPE_UNSIGNED (type)
13610 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13611 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13612 return (multiple_of_p (type, op1, bottom)
13613 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13615 case LSHIFT_EXPR:
13616 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13618 op1 = TREE_OPERAND (top, 1);
13619 /* const_binop may not detect overflow correctly,
13620 so check for it explicitly here. */
13621 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13622 wi::to_wide (op1))
13623 && (t1 = fold_convert (type,
13624 const_binop (LSHIFT_EXPR, size_one_node,
13625 op1))) != 0
13626 && !TREE_OVERFLOW (t1))
13627 return multiple_of_p (type, t1, bottom);
13629 return 0;
13631 case NOP_EXPR:
13632 /* Can't handle conversions from non-integral or wider integral type. */
13633 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13634 || (TYPE_PRECISION (type)
13635 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13636 return 0;
13638 /* fall through */
13640 case SAVE_EXPR:
13641 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13643 case COND_EXPR:
13644 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13645 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13647 case INTEGER_CST:
13648 if (TREE_CODE (bottom) != INTEGER_CST
13649 || integer_zerop (bottom)
13650 || (TYPE_UNSIGNED (type)
13651 && (tree_int_cst_sgn (top) < 0
13652 || tree_int_cst_sgn (bottom) < 0)))
13653 return 0;
13654 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13655 SIGNED);
13657 case SSA_NAME:
13658 if (TREE_CODE (bottom) == INTEGER_CST
13659 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13660 && gimple_code (stmt) == GIMPLE_ASSIGN)
13662 enum tree_code code = gimple_assign_rhs_code (stmt);
13664 /* Check for special cases to see if top is defined as multiple
13665 of bottom:
13667 top = (X & ~(bottom - 1) ; bottom is power of 2
13671 Y = X % bottom
13672 top = X - Y. */
13673 if (code == BIT_AND_EXPR
13674 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13675 && TREE_CODE (op2) == INTEGER_CST
13676 && integer_pow2p (bottom)
13677 && wi::multiple_of_p (wi::to_widest (op2),
13678 wi::to_widest (bottom), UNSIGNED))
13679 return 1;
13681 op1 = gimple_assign_rhs1 (stmt);
13682 if (code == MINUS_EXPR
13683 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13684 && TREE_CODE (op2) == SSA_NAME
13685 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13686 && gimple_code (stmt) == GIMPLE_ASSIGN
13687 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13688 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13689 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13690 return 1;
13693 /* fall through */
13695 default:
13696 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13697 return multiple_p (wi::to_poly_widest (top),
13698 wi::to_poly_widest (bottom));
13700 return 0;
13704 #define tree_expr_nonnegative_warnv_p(X, Y) \
13705 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13707 #define RECURSE(X) \
13708 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13710 /* Return true if CODE or TYPE is known to be non-negative. */
13712 static bool
13713 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13715 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13716 && truth_value_p (code))
13717 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13718 have a signed:1 type (where the value is -1 and 0). */
13719 return true;
13720 return false;
13723 /* Return true if (CODE OP0) is known to be non-negative. If the return
13724 value is based on the assumption that signed overflow is undefined,
13725 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13726 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13728 bool
13729 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13730 bool *strict_overflow_p, int depth)
13732 if (TYPE_UNSIGNED (type))
13733 return true;
13735 switch (code)
13737 case ABS_EXPR:
13738 /* We can't return 1 if flag_wrapv is set because
13739 ABS_EXPR<INT_MIN> = INT_MIN. */
13740 if (!ANY_INTEGRAL_TYPE_P (type))
13741 return true;
13742 if (TYPE_OVERFLOW_UNDEFINED (type))
13744 *strict_overflow_p = true;
13745 return true;
13747 break;
13749 case NON_LVALUE_EXPR:
13750 case FLOAT_EXPR:
13751 case FIX_TRUNC_EXPR:
13752 return RECURSE (op0);
13754 CASE_CONVERT:
13756 tree inner_type = TREE_TYPE (op0);
13757 tree outer_type = type;
13759 if (TREE_CODE (outer_type) == REAL_TYPE)
13761 if (TREE_CODE (inner_type) == REAL_TYPE)
13762 return RECURSE (op0);
13763 if (INTEGRAL_TYPE_P (inner_type))
13765 if (TYPE_UNSIGNED (inner_type))
13766 return true;
13767 return RECURSE (op0);
13770 else if (INTEGRAL_TYPE_P (outer_type))
13772 if (TREE_CODE (inner_type) == REAL_TYPE)
13773 return RECURSE (op0);
13774 if (INTEGRAL_TYPE_P (inner_type))
13775 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13776 && TYPE_UNSIGNED (inner_type);
13779 break;
13781 default:
13782 return tree_simple_nonnegative_warnv_p (code, type);
13785 /* We don't know sign of `t', so be conservative and return false. */
13786 return false;
13789 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13790 value is based on the assumption that signed overflow is undefined,
13791 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13792 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13794 bool
13795 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13796 tree op1, bool *strict_overflow_p,
13797 int depth)
13799 if (TYPE_UNSIGNED (type))
13800 return true;
13802 switch (code)
13804 case POINTER_PLUS_EXPR:
13805 case PLUS_EXPR:
13806 if (FLOAT_TYPE_P (type))
13807 return RECURSE (op0) && RECURSE (op1);
13809 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13810 both unsigned and at least 2 bits shorter than the result. */
13811 if (TREE_CODE (type) == INTEGER_TYPE
13812 && TREE_CODE (op0) == NOP_EXPR
13813 && TREE_CODE (op1) == NOP_EXPR)
13815 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13816 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13817 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13818 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13820 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13821 TYPE_PRECISION (inner2)) + 1;
13822 return prec < TYPE_PRECISION (type);
13825 break;
13827 case MULT_EXPR:
13828 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13830 /* x * x is always non-negative for floating point x
13831 or without overflow. */
13832 if (operand_equal_p (op0, op1, 0)
13833 || (RECURSE (op0) && RECURSE (op1)))
13835 if (ANY_INTEGRAL_TYPE_P (type)
13836 && TYPE_OVERFLOW_UNDEFINED (type))
13837 *strict_overflow_p = true;
13838 return true;
13842 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13843 both unsigned and their total bits is shorter than the result. */
13844 if (TREE_CODE (type) == INTEGER_TYPE
13845 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13846 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13848 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13849 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13850 : TREE_TYPE (op0);
13851 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13852 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13853 : TREE_TYPE (op1);
13855 bool unsigned0 = TYPE_UNSIGNED (inner0);
13856 bool unsigned1 = TYPE_UNSIGNED (inner1);
13858 if (TREE_CODE (op0) == INTEGER_CST)
13859 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13861 if (TREE_CODE (op1) == INTEGER_CST)
13862 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13864 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13865 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13867 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13868 ? tree_int_cst_min_precision (op0, UNSIGNED)
13869 : TYPE_PRECISION (inner0);
13871 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13872 ? tree_int_cst_min_precision (op1, UNSIGNED)
13873 : TYPE_PRECISION (inner1);
13875 return precision0 + precision1 < TYPE_PRECISION (type);
13878 return false;
13880 case BIT_AND_EXPR:
13881 case MAX_EXPR:
13882 return RECURSE (op0) || RECURSE (op1);
13884 case BIT_IOR_EXPR:
13885 case BIT_XOR_EXPR:
13886 case MIN_EXPR:
13887 case RDIV_EXPR:
13888 case TRUNC_DIV_EXPR:
13889 case CEIL_DIV_EXPR:
13890 case FLOOR_DIV_EXPR:
13891 case ROUND_DIV_EXPR:
13892 return RECURSE (op0) && RECURSE (op1);
13894 case TRUNC_MOD_EXPR:
13895 return RECURSE (op0);
13897 case FLOOR_MOD_EXPR:
13898 return RECURSE (op1);
13900 case CEIL_MOD_EXPR:
13901 case ROUND_MOD_EXPR:
13902 default:
13903 return tree_simple_nonnegative_warnv_p (code, type);
13906 /* We don't know sign of `t', so be conservative and return false. */
13907 return false;
13910 /* Return true if T is known to be non-negative. If the return
13911 value is based on the assumption that signed overflow is undefined,
13912 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13913 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13915 bool
13916 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13918 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13919 return true;
13921 switch (TREE_CODE (t))
13923 case INTEGER_CST:
13924 return tree_int_cst_sgn (t) >= 0;
13926 case REAL_CST:
13927 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13929 case FIXED_CST:
13930 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13932 case COND_EXPR:
13933 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13935 case SSA_NAME:
13936 /* Limit the depth of recursion to avoid quadratic behavior.
13937 This is expected to catch almost all occurrences in practice.
13938 If this code misses important cases that unbounded recursion
13939 would not, passes that need this information could be revised
13940 to provide it through dataflow propagation. */
13941 return (!name_registered_for_update_p (t)
13942 && depth < param_max_ssa_name_query_depth
13943 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13944 strict_overflow_p, depth));
13946 default:
13947 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13951 /* Return true if T is known to be non-negative. If the return
13952 value is based on the assumption that signed overflow is undefined,
13953 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13954 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13956 bool
13957 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13958 bool *strict_overflow_p, int depth)
13960 switch (fn)
13962 CASE_CFN_ACOS:
13963 CASE_CFN_ACOSH:
13964 CASE_CFN_CABS:
13965 CASE_CFN_COSH:
13966 CASE_CFN_ERFC:
13967 CASE_CFN_EXP:
13968 CASE_CFN_EXP10:
13969 CASE_CFN_EXP2:
13970 CASE_CFN_FABS:
13971 CASE_CFN_FDIM:
13972 CASE_CFN_HYPOT:
13973 CASE_CFN_POW10:
13974 CASE_CFN_FFS:
13975 CASE_CFN_PARITY:
13976 CASE_CFN_POPCOUNT:
13977 CASE_CFN_CLZ:
13978 CASE_CFN_CLRSB:
13979 case CFN_BUILT_IN_BSWAP16:
13980 case CFN_BUILT_IN_BSWAP32:
13981 case CFN_BUILT_IN_BSWAP64:
13982 case CFN_BUILT_IN_BSWAP128:
13983 /* Always true. */
13984 return true;
13986 CASE_CFN_SQRT:
13987 CASE_CFN_SQRT_FN:
13988 /* sqrt(-0.0) is -0.0. */
13989 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13990 return true;
13991 return RECURSE (arg0);
13993 CASE_CFN_ASINH:
13994 CASE_CFN_ATAN:
13995 CASE_CFN_ATANH:
13996 CASE_CFN_CBRT:
13997 CASE_CFN_CEIL:
13998 CASE_CFN_CEIL_FN:
13999 CASE_CFN_ERF:
14000 CASE_CFN_EXPM1:
14001 CASE_CFN_FLOOR:
14002 CASE_CFN_FLOOR_FN:
14003 CASE_CFN_FMOD:
14004 CASE_CFN_FREXP:
14005 CASE_CFN_ICEIL:
14006 CASE_CFN_IFLOOR:
14007 CASE_CFN_IRINT:
14008 CASE_CFN_IROUND:
14009 CASE_CFN_LCEIL:
14010 CASE_CFN_LDEXP:
14011 CASE_CFN_LFLOOR:
14012 CASE_CFN_LLCEIL:
14013 CASE_CFN_LLFLOOR:
14014 CASE_CFN_LLRINT:
14015 CASE_CFN_LLROUND:
14016 CASE_CFN_LRINT:
14017 CASE_CFN_LROUND:
14018 CASE_CFN_MODF:
14019 CASE_CFN_NEARBYINT:
14020 CASE_CFN_NEARBYINT_FN:
14021 CASE_CFN_RINT:
14022 CASE_CFN_RINT_FN:
14023 CASE_CFN_ROUND:
14024 CASE_CFN_ROUND_FN:
14025 CASE_CFN_ROUNDEVEN:
14026 CASE_CFN_ROUNDEVEN_FN:
14027 CASE_CFN_SCALB:
14028 CASE_CFN_SCALBLN:
14029 CASE_CFN_SCALBN:
14030 CASE_CFN_SIGNBIT:
14031 CASE_CFN_SIGNIFICAND:
14032 CASE_CFN_SINH:
14033 CASE_CFN_TANH:
14034 CASE_CFN_TRUNC:
14035 CASE_CFN_TRUNC_FN:
14036 /* True if the 1st argument is nonnegative. */
14037 return RECURSE (arg0);
14039 CASE_CFN_FMAX:
14040 CASE_CFN_FMAX_FN:
14041 /* True if the 1st OR 2nd arguments are nonnegative. */
14042 return RECURSE (arg0) || RECURSE (arg1);
14044 CASE_CFN_FMIN:
14045 CASE_CFN_FMIN_FN:
14046 /* True if the 1st AND 2nd arguments are nonnegative. */
14047 return RECURSE (arg0) && RECURSE (arg1);
14049 CASE_CFN_COPYSIGN:
14050 CASE_CFN_COPYSIGN_FN:
14051 /* True if the 2nd argument is nonnegative. */
14052 return RECURSE (arg1);
14054 CASE_CFN_POWI:
14055 /* True if the 1st argument is nonnegative or the second
14056 argument is an even integer. */
14057 if (TREE_CODE (arg1) == INTEGER_CST
14058 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14059 return true;
14060 return RECURSE (arg0);
14062 CASE_CFN_POW:
14063 /* True if the 1st argument is nonnegative or the second
14064 argument is an even integer valued real. */
14065 if (TREE_CODE (arg1) == REAL_CST)
14067 REAL_VALUE_TYPE c;
14068 HOST_WIDE_INT n;
14070 c = TREE_REAL_CST (arg1);
14071 n = real_to_integer (&c);
14072 if ((n & 1) == 0)
14074 REAL_VALUE_TYPE cint;
14075 real_from_integer (&cint, VOIDmode, n, SIGNED);
14076 if (real_identical (&c, &cint))
14077 return true;
14080 return RECURSE (arg0);
14082 default:
14083 break;
14085 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14088 /* Return true if T is known to be non-negative. If the return
14089 value is based on the assumption that signed overflow is undefined,
14090 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14091 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14093 static bool
14094 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14096 enum tree_code code = TREE_CODE (t);
14097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14098 return true;
14100 switch (code)
14102 case TARGET_EXPR:
14104 tree temp = TARGET_EXPR_SLOT (t);
14105 t = TARGET_EXPR_INITIAL (t);
14107 /* If the initializer is non-void, then it's a normal expression
14108 that will be assigned to the slot. */
14109 if (!VOID_TYPE_P (t))
14110 return RECURSE (t);
14112 /* Otherwise, the initializer sets the slot in some way. One common
14113 way is an assignment statement at the end of the initializer. */
14114 while (1)
14116 if (TREE_CODE (t) == BIND_EXPR)
14117 t = expr_last (BIND_EXPR_BODY (t));
14118 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14119 || TREE_CODE (t) == TRY_CATCH_EXPR)
14120 t = expr_last (TREE_OPERAND (t, 0));
14121 else if (TREE_CODE (t) == STATEMENT_LIST)
14122 t = expr_last (t);
14123 else
14124 break;
14126 if (TREE_CODE (t) == MODIFY_EXPR
14127 && TREE_OPERAND (t, 0) == temp)
14128 return RECURSE (TREE_OPERAND (t, 1));
14130 return false;
14133 case CALL_EXPR:
14135 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14136 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14138 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14139 get_call_combined_fn (t),
14140 arg0,
14141 arg1,
14142 strict_overflow_p, depth);
14144 case COMPOUND_EXPR:
14145 case MODIFY_EXPR:
14146 return RECURSE (TREE_OPERAND (t, 1));
14148 case BIND_EXPR:
14149 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14151 case SAVE_EXPR:
14152 return RECURSE (TREE_OPERAND (t, 0));
14154 default:
14155 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14159 #undef RECURSE
14160 #undef tree_expr_nonnegative_warnv_p
14162 /* Return true if T is known to be non-negative. If the return
14163 value is based on the assumption that signed overflow is undefined,
14164 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14165 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14167 bool
14168 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14170 enum tree_code code;
14171 if (t == error_mark_node)
14172 return false;
14174 code = TREE_CODE (t);
14175 switch (TREE_CODE_CLASS (code))
14177 case tcc_binary:
14178 case tcc_comparison:
14179 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14180 TREE_TYPE (t),
14181 TREE_OPERAND (t, 0),
14182 TREE_OPERAND (t, 1),
14183 strict_overflow_p, depth);
14185 case tcc_unary:
14186 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14187 TREE_TYPE (t),
14188 TREE_OPERAND (t, 0),
14189 strict_overflow_p, depth);
14191 case tcc_constant:
14192 case tcc_declaration:
14193 case tcc_reference:
14194 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14196 default:
14197 break;
14200 switch (code)
14202 case TRUTH_AND_EXPR:
14203 case TRUTH_OR_EXPR:
14204 case TRUTH_XOR_EXPR:
14205 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14206 TREE_TYPE (t),
14207 TREE_OPERAND (t, 0),
14208 TREE_OPERAND (t, 1),
14209 strict_overflow_p, depth);
14210 case TRUTH_NOT_EXPR:
14211 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14212 TREE_TYPE (t),
14213 TREE_OPERAND (t, 0),
14214 strict_overflow_p, depth);
14216 case COND_EXPR:
14217 case CONSTRUCTOR:
14218 case OBJ_TYPE_REF:
14219 case ASSERT_EXPR:
14220 case ADDR_EXPR:
14221 case WITH_SIZE_EXPR:
14222 case SSA_NAME:
14223 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14225 default:
14226 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14230 /* Return true if `t' is known to be non-negative. Handle warnings
14231 about undefined signed overflow. */
14233 bool
14234 tree_expr_nonnegative_p (tree t)
14236 bool ret, strict_overflow_p;
14238 strict_overflow_p = false;
14239 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14240 if (strict_overflow_p)
14241 fold_overflow_warning (("assuming signed overflow does not occur when "
14242 "determining that expression is always "
14243 "non-negative"),
14244 WARN_STRICT_OVERFLOW_MISC);
14245 return ret;
14249 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14250 For floating point we further ensure that T is not denormal.
14251 Similar logic is present in nonzero_address in rtlanal.h.
14253 If the return value is based on the assumption that signed overflow
14254 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14255 change *STRICT_OVERFLOW_P. */
14257 bool
14258 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14259 bool *strict_overflow_p)
14261 switch (code)
14263 case ABS_EXPR:
14264 return tree_expr_nonzero_warnv_p (op0,
14265 strict_overflow_p);
14267 case NOP_EXPR:
14269 tree inner_type = TREE_TYPE (op0);
14270 tree outer_type = type;
14272 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14273 && tree_expr_nonzero_warnv_p (op0,
14274 strict_overflow_p));
14276 break;
14278 case NON_LVALUE_EXPR:
14279 return tree_expr_nonzero_warnv_p (op0,
14280 strict_overflow_p);
14282 default:
14283 break;
14286 return false;
14289 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14290 For floating point we further ensure that T is not denormal.
14291 Similar logic is present in nonzero_address in rtlanal.h.
14293 If the return value is based on the assumption that signed overflow
14294 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14295 change *STRICT_OVERFLOW_P. */
14297 bool
14298 tree_binary_nonzero_warnv_p (enum tree_code code,
14299 tree type,
14300 tree op0,
14301 tree op1, bool *strict_overflow_p)
14303 bool sub_strict_overflow_p;
14304 switch (code)
14306 case POINTER_PLUS_EXPR:
14307 case PLUS_EXPR:
14308 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14310 /* With the presence of negative values it is hard
14311 to say something. */
14312 sub_strict_overflow_p = false;
14313 if (!tree_expr_nonnegative_warnv_p (op0,
14314 &sub_strict_overflow_p)
14315 || !tree_expr_nonnegative_warnv_p (op1,
14316 &sub_strict_overflow_p))
14317 return false;
14318 /* One of operands must be positive and the other non-negative. */
14319 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14320 overflows, on a twos-complement machine the sum of two
14321 nonnegative numbers can never be zero. */
14322 return (tree_expr_nonzero_warnv_p (op0,
14323 strict_overflow_p)
14324 || tree_expr_nonzero_warnv_p (op1,
14325 strict_overflow_p));
14327 break;
14329 case MULT_EXPR:
14330 if (TYPE_OVERFLOW_UNDEFINED (type))
14332 if (tree_expr_nonzero_warnv_p (op0,
14333 strict_overflow_p)
14334 && tree_expr_nonzero_warnv_p (op1,
14335 strict_overflow_p))
14337 *strict_overflow_p = true;
14338 return true;
14341 break;
14343 case MIN_EXPR:
14344 sub_strict_overflow_p = false;
14345 if (tree_expr_nonzero_warnv_p (op0,
14346 &sub_strict_overflow_p)
14347 && tree_expr_nonzero_warnv_p (op1,
14348 &sub_strict_overflow_p))
14350 if (sub_strict_overflow_p)
14351 *strict_overflow_p = true;
14353 break;
14355 case MAX_EXPR:
14356 sub_strict_overflow_p = false;
14357 if (tree_expr_nonzero_warnv_p (op0,
14358 &sub_strict_overflow_p))
14360 if (sub_strict_overflow_p)
14361 *strict_overflow_p = true;
14363 /* When both operands are nonzero, then MAX must be too. */
14364 if (tree_expr_nonzero_warnv_p (op1,
14365 strict_overflow_p))
14366 return true;
14368 /* MAX where operand 0 is positive is positive. */
14369 return tree_expr_nonnegative_warnv_p (op0,
14370 strict_overflow_p);
14372 /* MAX where operand 1 is positive is positive. */
14373 else if (tree_expr_nonzero_warnv_p (op1,
14374 &sub_strict_overflow_p)
14375 && tree_expr_nonnegative_warnv_p (op1,
14376 &sub_strict_overflow_p))
14378 if (sub_strict_overflow_p)
14379 *strict_overflow_p = true;
14380 return true;
14382 break;
14384 case BIT_IOR_EXPR:
14385 return (tree_expr_nonzero_warnv_p (op1,
14386 strict_overflow_p)
14387 || tree_expr_nonzero_warnv_p (op0,
14388 strict_overflow_p));
14390 default:
14391 break;
14394 return false;
14397 /* Return true when T is an address and is known to be nonzero.
14398 For floating point we further ensure that T is not denormal.
14399 Similar logic is present in nonzero_address in rtlanal.h.
14401 If the return value is based on the assumption that signed overflow
14402 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14403 change *STRICT_OVERFLOW_P. */
14405 bool
14406 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14408 bool sub_strict_overflow_p;
14409 switch (TREE_CODE (t))
14411 case INTEGER_CST:
14412 return !integer_zerop (t);
14414 case ADDR_EXPR:
14416 tree base = TREE_OPERAND (t, 0);
14418 if (!DECL_P (base))
14419 base = get_base_address (base);
14421 if (base && TREE_CODE (base) == TARGET_EXPR)
14422 base = TARGET_EXPR_SLOT (base);
14424 if (!base)
14425 return false;
14427 /* For objects in symbol table check if we know they are non-zero.
14428 Don't do anything for variables and functions before symtab is built;
14429 it is quite possible that they will be declared weak later. */
14430 int nonzero_addr = maybe_nonzero_address (base);
14431 if (nonzero_addr >= 0)
14432 return nonzero_addr;
14434 /* Constants are never weak. */
14435 if (CONSTANT_CLASS_P (base))
14436 return true;
14438 return false;
14441 case COND_EXPR:
14442 sub_strict_overflow_p = false;
14443 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14444 &sub_strict_overflow_p)
14445 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14446 &sub_strict_overflow_p))
14448 if (sub_strict_overflow_p)
14449 *strict_overflow_p = true;
14450 return true;
14452 break;
14454 case SSA_NAME:
14455 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14456 break;
14457 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14459 default:
14460 break;
14462 return false;
14465 #define integer_valued_real_p(X) \
14466 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14468 #define RECURSE(X) \
14469 ((integer_valued_real_p) (X, depth + 1))
14471 /* Return true if the floating point result of (CODE OP0) has an
14472 integer value. We also allow +Inf, -Inf and NaN to be considered
14473 integer values. Return false for signaling NaN.
14475 DEPTH is the current nesting depth of the query. */
14477 bool
14478 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14480 switch (code)
14482 case FLOAT_EXPR:
14483 return true;
14485 case ABS_EXPR:
14486 return RECURSE (op0);
14488 CASE_CONVERT:
14490 tree type = TREE_TYPE (op0);
14491 if (TREE_CODE (type) == INTEGER_TYPE)
14492 return true;
14493 if (TREE_CODE (type) == REAL_TYPE)
14494 return RECURSE (op0);
14495 break;
14498 default:
14499 break;
14501 return false;
14504 /* Return true if the floating point result of (CODE OP0 OP1) has an
14505 integer value. We also allow +Inf, -Inf and NaN to be considered
14506 integer values. Return false for signaling NaN.
14508 DEPTH is the current nesting depth of the query. */
14510 bool
14511 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14513 switch (code)
14515 case PLUS_EXPR:
14516 case MINUS_EXPR:
14517 case MULT_EXPR:
14518 case MIN_EXPR:
14519 case MAX_EXPR:
14520 return RECURSE (op0) && RECURSE (op1);
14522 default:
14523 break;
14525 return false;
14528 /* Return true if the floating point result of calling FNDECL with arguments
14529 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14530 considered integer values. Return false for signaling NaN. If FNDECL
14531 takes fewer than 2 arguments, the remaining ARGn are null.
14533 DEPTH is the current nesting depth of the query. */
14535 bool
14536 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14538 switch (fn)
14540 CASE_CFN_CEIL:
14541 CASE_CFN_CEIL_FN:
14542 CASE_CFN_FLOOR:
14543 CASE_CFN_FLOOR_FN:
14544 CASE_CFN_NEARBYINT:
14545 CASE_CFN_NEARBYINT_FN:
14546 CASE_CFN_RINT:
14547 CASE_CFN_RINT_FN:
14548 CASE_CFN_ROUND:
14549 CASE_CFN_ROUND_FN:
14550 CASE_CFN_ROUNDEVEN:
14551 CASE_CFN_ROUNDEVEN_FN:
14552 CASE_CFN_TRUNC:
14553 CASE_CFN_TRUNC_FN:
14554 return true;
14556 CASE_CFN_FMIN:
14557 CASE_CFN_FMIN_FN:
14558 CASE_CFN_FMAX:
14559 CASE_CFN_FMAX_FN:
14560 return RECURSE (arg0) && RECURSE (arg1);
14562 default:
14563 break;
14565 return false;
14568 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14569 has an integer value. We also allow +Inf, -Inf and NaN to be
14570 considered integer values. Return false for signaling NaN.
14572 DEPTH is the current nesting depth of the query. */
14574 bool
14575 integer_valued_real_single_p (tree t, int depth)
14577 switch (TREE_CODE (t))
14579 case REAL_CST:
14580 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14582 case COND_EXPR:
14583 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14585 case SSA_NAME:
14586 /* Limit the depth of recursion to avoid quadratic behavior.
14587 This is expected to catch almost all occurrences in practice.
14588 If this code misses important cases that unbounded recursion
14589 would not, passes that need this information could be revised
14590 to provide it through dataflow propagation. */
14591 return (!name_registered_for_update_p (t)
14592 && depth < param_max_ssa_name_query_depth
14593 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14594 depth));
14596 default:
14597 break;
14599 return false;
14602 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14603 has an integer value. We also allow +Inf, -Inf and NaN to be
14604 considered integer values. Return false for signaling NaN.
14606 DEPTH is the current nesting depth of the query. */
14608 static bool
14609 integer_valued_real_invalid_p (tree t, int depth)
14611 switch (TREE_CODE (t))
14613 case COMPOUND_EXPR:
14614 case MODIFY_EXPR:
14615 case BIND_EXPR:
14616 return RECURSE (TREE_OPERAND (t, 1));
14618 case SAVE_EXPR:
14619 return RECURSE (TREE_OPERAND (t, 0));
14621 default:
14622 break;
14624 return false;
14627 #undef RECURSE
14628 #undef integer_valued_real_p
14630 /* Return true if the floating point expression T has an integer value.
14631 We also allow +Inf, -Inf and NaN to be considered integer values.
14632 Return false for signaling NaN.
14634 DEPTH is the current nesting depth of the query. */
14636 bool
14637 integer_valued_real_p (tree t, int depth)
14639 if (t == error_mark_node)
14640 return false;
14642 STRIP_ANY_LOCATION_WRAPPER (t);
14644 tree_code code = TREE_CODE (t);
14645 switch (TREE_CODE_CLASS (code))
14647 case tcc_binary:
14648 case tcc_comparison:
14649 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14650 TREE_OPERAND (t, 1), depth);
14652 case tcc_unary:
14653 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14655 case tcc_constant:
14656 case tcc_declaration:
14657 case tcc_reference:
14658 return integer_valued_real_single_p (t, depth);
14660 default:
14661 break;
14664 switch (code)
14666 case COND_EXPR:
14667 case SSA_NAME:
14668 return integer_valued_real_single_p (t, depth);
14670 case CALL_EXPR:
14672 tree arg0 = (call_expr_nargs (t) > 0
14673 ? CALL_EXPR_ARG (t, 0)
14674 : NULL_TREE);
14675 tree arg1 = (call_expr_nargs (t) > 1
14676 ? CALL_EXPR_ARG (t, 1)
14677 : NULL_TREE);
14678 return integer_valued_real_call_p (get_call_combined_fn (t),
14679 arg0, arg1, depth);
14682 default:
14683 return integer_valued_real_invalid_p (t, depth);
14687 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14688 attempt to fold the expression to a constant without modifying TYPE,
14689 OP0 or OP1.
14691 If the expression could be simplified to a constant, then return
14692 the constant. If the expression would not be simplified to a
14693 constant, then return NULL_TREE. */
14695 tree
14696 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14698 tree tem = fold_binary (code, type, op0, op1);
14699 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14702 /* Given the components of a unary expression CODE, TYPE and OP0,
14703 attempt to fold the expression to a constant without modifying
14704 TYPE or OP0.
14706 If the expression could be simplified to a constant, then return
14707 the constant. If the expression would not be simplified to a
14708 constant, then return NULL_TREE. */
14710 tree
14711 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14713 tree tem = fold_unary (code, type, op0);
14714 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14717 /* If EXP represents referencing an element in a constant string
14718 (either via pointer arithmetic or array indexing), return the
14719 tree representing the value accessed, otherwise return NULL. */
14721 tree
14722 fold_read_from_constant_string (tree exp)
14724 if ((TREE_CODE (exp) == INDIRECT_REF
14725 || TREE_CODE (exp) == ARRAY_REF)
14726 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14728 tree exp1 = TREE_OPERAND (exp, 0);
14729 tree index;
14730 tree string;
14731 location_t loc = EXPR_LOCATION (exp);
14733 if (TREE_CODE (exp) == INDIRECT_REF)
14734 string = string_constant (exp1, &index, NULL, NULL);
14735 else
14737 tree low_bound = array_ref_low_bound (exp);
14738 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14740 /* Optimize the special-case of a zero lower bound.
14742 We convert the low_bound to sizetype to avoid some problems
14743 with constant folding. (E.g. suppose the lower bound is 1,
14744 and its mode is QI. Without the conversion,l (ARRAY
14745 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14746 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14747 if (! integer_zerop (low_bound))
14748 index = size_diffop_loc (loc, index,
14749 fold_convert_loc (loc, sizetype, low_bound));
14751 string = exp1;
14754 scalar_int_mode char_mode;
14755 if (string
14756 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14757 && TREE_CODE (string) == STRING_CST
14758 && TREE_CODE (index) == INTEGER_CST
14759 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14760 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14761 &char_mode)
14762 && GET_MODE_SIZE (char_mode) == 1)
14763 return build_int_cst_type (TREE_TYPE (exp),
14764 (TREE_STRING_POINTER (string)
14765 [TREE_INT_CST_LOW (index)]));
14767 return NULL;
14770 /* Folds a read from vector element at IDX of vector ARG. */
14772 tree
14773 fold_read_from_vector (tree arg, poly_uint64 idx)
14775 unsigned HOST_WIDE_INT i;
14776 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14777 && known_ge (idx, 0u)
14778 && idx.is_constant (&i))
14780 if (TREE_CODE (arg) == VECTOR_CST)
14781 return VECTOR_CST_ELT (arg, i);
14782 else if (TREE_CODE (arg) == CONSTRUCTOR)
14784 if (i >= CONSTRUCTOR_NELTS (arg))
14785 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14786 return CONSTRUCTOR_ELT (arg, i)->value;
14789 return NULL_TREE;
14792 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14793 an integer constant, real, or fixed-point constant.
14795 TYPE is the type of the result. */
14797 static tree
14798 fold_negate_const (tree arg0, tree type)
14800 tree t = NULL_TREE;
14802 switch (TREE_CODE (arg0))
14804 case REAL_CST:
14805 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14806 break;
14808 case FIXED_CST:
14810 FIXED_VALUE_TYPE f;
14811 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14812 &(TREE_FIXED_CST (arg0)), NULL,
14813 TYPE_SATURATING (type));
14814 t = build_fixed (type, f);
14815 /* Propagate overflow flags. */
14816 if (overflow_p | TREE_OVERFLOW (arg0))
14817 TREE_OVERFLOW (t) = 1;
14818 break;
14821 default:
14822 if (poly_int_tree_p (arg0))
14824 wi::overflow_type overflow;
14825 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14826 t = force_fit_type (type, res, 1,
14827 (overflow && ! TYPE_UNSIGNED (type))
14828 || TREE_OVERFLOW (arg0));
14829 break;
14832 gcc_unreachable ();
14835 return t;
14838 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14839 an integer constant or real constant.
14841 TYPE is the type of the result. */
14843 tree
14844 fold_abs_const (tree arg0, tree type)
14846 tree t = NULL_TREE;
14848 switch (TREE_CODE (arg0))
14850 case INTEGER_CST:
14852 /* If the value is unsigned or non-negative, then the absolute value
14853 is the same as the ordinary value. */
14854 wide_int val = wi::to_wide (arg0);
14855 wi::overflow_type overflow = wi::OVF_NONE;
14856 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14859 /* If the value is negative, then the absolute value is
14860 its negation. */
14861 else
14862 val = wi::neg (val, &overflow);
14864 /* Force to the destination type, set TREE_OVERFLOW for signed
14865 TYPE only. */
14866 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14868 break;
14870 case REAL_CST:
14871 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14872 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14873 else
14874 t = arg0;
14875 break;
14877 default:
14878 gcc_unreachable ();
14881 return t;
14884 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14885 constant. TYPE is the type of the result. */
14887 static tree
14888 fold_not_const (const_tree arg0, tree type)
14890 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14892 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14895 /* Given CODE, a relational operator, the target type, TYPE and two
14896 constant operands OP0 and OP1, return the result of the
14897 relational operation. If the result is not a compile time
14898 constant, then return NULL_TREE. */
14900 static tree
14901 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14903 int result, invert;
14905 /* From here on, the only cases we handle are when the result is
14906 known to be a constant. */
14908 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14910 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14911 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14913 /* Handle the cases where either operand is a NaN. */
14914 if (real_isnan (c0) || real_isnan (c1))
14916 switch (code)
14918 case EQ_EXPR:
14919 case ORDERED_EXPR:
14920 result = 0;
14921 break;
14923 case NE_EXPR:
14924 case UNORDERED_EXPR:
14925 case UNLT_EXPR:
14926 case UNLE_EXPR:
14927 case UNGT_EXPR:
14928 case UNGE_EXPR:
14929 case UNEQ_EXPR:
14930 result = 1;
14931 break;
14933 case LT_EXPR:
14934 case LE_EXPR:
14935 case GT_EXPR:
14936 case GE_EXPR:
14937 case LTGT_EXPR:
14938 if (flag_trapping_math)
14939 return NULL_TREE;
14940 result = 0;
14941 break;
14943 default:
14944 gcc_unreachable ();
14947 return constant_boolean_node (result, type);
14950 return constant_boolean_node (real_compare (code, c0, c1), type);
14953 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14955 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14956 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14957 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14960 /* Handle equality/inequality of complex constants. */
14961 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14963 tree rcond = fold_relational_const (code, type,
14964 TREE_REALPART (op0),
14965 TREE_REALPART (op1));
14966 tree icond = fold_relational_const (code, type,
14967 TREE_IMAGPART (op0),
14968 TREE_IMAGPART (op1));
14969 if (code == EQ_EXPR)
14970 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14971 else if (code == NE_EXPR)
14972 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14973 else
14974 return NULL_TREE;
14977 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14979 if (!VECTOR_TYPE_P (type))
14981 /* Have vector comparison with scalar boolean result. */
14982 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14983 && known_eq (VECTOR_CST_NELTS (op0),
14984 VECTOR_CST_NELTS (op1)));
14985 unsigned HOST_WIDE_INT nunits;
14986 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14987 return NULL_TREE;
14988 for (unsigned i = 0; i < nunits; i++)
14990 tree elem0 = VECTOR_CST_ELT (op0, i);
14991 tree elem1 = VECTOR_CST_ELT (op1, i);
14992 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14993 if (tmp == NULL_TREE)
14994 return NULL_TREE;
14995 if (integer_zerop (tmp))
14996 return constant_boolean_node (code == NE_EXPR, type);
14998 return constant_boolean_node (code == EQ_EXPR, type);
15000 tree_vector_builder elts;
15001 if (!elts.new_binary_operation (type, op0, op1, false))
15002 return NULL_TREE;
15003 unsigned int count = elts.encoded_nelts ();
15004 for (unsigned i = 0; i < count; i++)
15006 tree elem_type = TREE_TYPE (type);
15007 tree elem0 = VECTOR_CST_ELT (op0, i);
15008 tree elem1 = VECTOR_CST_ELT (op1, i);
15010 tree tem = fold_relational_const (code, elem_type,
15011 elem0, elem1);
15013 if (tem == NULL_TREE)
15014 return NULL_TREE;
15016 elts.quick_push (build_int_cst (elem_type,
15017 integer_zerop (tem) ? 0 : -1));
15020 return elts.build ();
15023 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15025 To compute GT, swap the arguments and do LT.
15026 To compute GE, do LT and invert the result.
15027 To compute LE, swap the arguments, do LT and invert the result.
15028 To compute NE, do EQ and invert the result.
15030 Therefore, the code below must handle only EQ and LT. */
15032 if (code == LE_EXPR || code == GT_EXPR)
15034 std::swap (op0, op1);
15035 code = swap_tree_comparison (code);
15038 /* Note that it is safe to invert for real values here because we
15039 have already handled the one case that it matters. */
15041 invert = 0;
15042 if (code == NE_EXPR || code == GE_EXPR)
15044 invert = 1;
15045 code = invert_tree_comparison (code, false);
15048 /* Compute a result for LT or EQ if args permit;
15049 Otherwise return T. */
15050 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15052 if (code == EQ_EXPR)
15053 result = tree_int_cst_equal (op0, op1);
15054 else
15055 result = tree_int_cst_lt (op0, op1);
15057 else
15058 return NULL_TREE;
15060 if (invert)
15061 result ^= 1;
15062 return constant_boolean_node (result, type);
15065 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15066 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15067 itself. */
15069 tree
15070 fold_build_cleanup_point_expr (tree type, tree expr)
15072 /* If the expression does not have side effects then we don't have to wrap
15073 it with a cleanup point expression. */
15074 if (!TREE_SIDE_EFFECTS (expr))
15075 return expr;
15077 /* If the expression is a return, check to see if the expression inside the
15078 return has no side effects or the right hand side of the modify expression
15079 inside the return. If either don't have side effects set we don't need to
15080 wrap the expression in a cleanup point expression. Note we don't check the
15081 left hand side of the modify because it should always be a return decl. */
15082 if (TREE_CODE (expr) == RETURN_EXPR)
15084 tree op = TREE_OPERAND (expr, 0);
15085 if (!op || !TREE_SIDE_EFFECTS (op))
15086 return expr;
15087 op = TREE_OPERAND (op, 1);
15088 if (!TREE_SIDE_EFFECTS (op))
15089 return expr;
15092 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15095 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15096 of an indirection through OP0, or NULL_TREE if no simplification is
15097 possible. */
15099 tree
15100 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15102 tree sub = op0;
15103 tree subtype;
15104 poly_uint64 const_op01;
15106 STRIP_NOPS (sub);
15107 subtype = TREE_TYPE (sub);
15108 if (!POINTER_TYPE_P (subtype)
15109 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15110 return NULL_TREE;
15112 if (TREE_CODE (sub) == ADDR_EXPR)
15114 tree op = TREE_OPERAND (sub, 0);
15115 tree optype = TREE_TYPE (op);
15117 /* *&CONST_DECL -> to the value of the const decl. */
15118 if (TREE_CODE (op) == CONST_DECL)
15119 return DECL_INITIAL (op);
15120 /* *&p => p; make sure to handle *&"str"[cst] here. */
15121 if (type == optype)
15123 tree fop = fold_read_from_constant_string (op);
15124 if (fop)
15125 return fop;
15126 else
15127 return op;
15129 /* *(foo *)&fooarray => fooarray[0] */
15130 else if (TREE_CODE (optype) == ARRAY_TYPE
15131 && type == TREE_TYPE (optype)
15132 && (!in_gimple_form
15133 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15135 tree type_domain = TYPE_DOMAIN (optype);
15136 tree min_val = size_zero_node;
15137 if (type_domain && TYPE_MIN_VALUE (type_domain))
15138 min_val = TYPE_MIN_VALUE (type_domain);
15139 if (in_gimple_form
15140 && TREE_CODE (min_val) != INTEGER_CST)
15141 return NULL_TREE;
15142 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15143 NULL_TREE, NULL_TREE);
15145 /* *(foo *)&complexfoo => __real__ complexfoo */
15146 else if (TREE_CODE (optype) == COMPLEX_TYPE
15147 && type == TREE_TYPE (optype))
15148 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15149 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15150 else if (VECTOR_TYPE_P (optype)
15151 && type == TREE_TYPE (optype))
15153 tree part_width = TYPE_SIZE (type);
15154 tree index = bitsize_int (0);
15155 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15156 index);
15160 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15161 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15163 tree op00 = TREE_OPERAND (sub, 0);
15164 tree op01 = TREE_OPERAND (sub, 1);
15166 STRIP_NOPS (op00);
15167 if (TREE_CODE (op00) == ADDR_EXPR)
15169 tree op00type;
15170 op00 = TREE_OPERAND (op00, 0);
15171 op00type = TREE_TYPE (op00);
15173 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15174 if (VECTOR_TYPE_P (op00type)
15175 && type == TREE_TYPE (op00type)
15176 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15177 but we want to treat offsets with MSB set as negative.
15178 For the code below negative offsets are invalid and
15179 TYPE_SIZE of the element is something unsigned, so
15180 check whether op01 fits into poly_int64, which implies
15181 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15182 then just use poly_uint64 because we want to treat the
15183 value as unsigned. */
15184 && tree_fits_poly_int64_p (op01))
15186 tree part_width = TYPE_SIZE (type);
15187 poly_uint64 max_offset
15188 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15189 * TYPE_VECTOR_SUBPARTS (op00type));
15190 if (known_lt (const_op01, max_offset))
15192 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15193 return fold_build3_loc (loc,
15194 BIT_FIELD_REF, type, op00,
15195 part_width, index);
15198 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15199 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15200 && type == TREE_TYPE (op00type))
15202 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15203 const_op01))
15204 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15206 /* ((foo *)&fooarray)[1] => fooarray[1] */
15207 else if (TREE_CODE (op00type) == ARRAY_TYPE
15208 && type == TREE_TYPE (op00type))
15210 tree type_domain = TYPE_DOMAIN (op00type);
15211 tree min_val = size_zero_node;
15212 if (type_domain && TYPE_MIN_VALUE (type_domain))
15213 min_val = TYPE_MIN_VALUE (type_domain);
15214 poly_uint64 type_size, index;
15215 if (poly_int_tree_p (min_val)
15216 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15217 && multiple_p (const_op01, type_size, &index))
15219 poly_offset_int off = index + wi::to_poly_offset (min_val);
15220 op01 = wide_int_to_tree (sizetype, off);
15221 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15222 NULL_TREE, NULL_TREE);
15228 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15229 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15230 && type == TREE_TYPE (TREE_TYPE (subtype))
15231 && (!in_gimple_form
15232 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15234 tree type_domain;
15235 tree min_val = size_zero_node;
15236 sub = build_fold_indirect_ref_loc (loc, sub);
15237 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15238 if (type_domain && TYPE_MIN_VALUE (type_domain))
15239 min_val = TYPE_MIN_VALUE (type_domain);
15240 if (in_gimple_form
15241 && TREE_CODE (min_val) != INTEGER_CST)
15242 return NULL_TREE;
15243 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15244 NULL_TREE);
15247 return NULL_TREE;
15250 /* Builds an expression for an indirection through T, simplifying some
15251 cases. */
15253 tree
15254 build_fold_indirect_ref_loc (location_t loc, tree t)
15256 tree type = TREE_TYPE (TREE_TYPE (t));
15257 tree sub = fold_indirect_ref_1 (loc, type, t);
15259 if (sub)
15260 return sub;
15262 return build1_loc (loc, INDIRECT_REF, type, t);
15265 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15267 tree
15268 fold_indirect_ref_loc (location_t loc, tree t)
15270 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15272 if (sub)
15273 return sub;
15274 else
15275 return t;
15278 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15279 whose result is ignored. The type of the returned tree need not be
15280 the same as the original expression. */
15282 tree
15283 fold_ignored_result (tree t)
15285 if (!TREE_SIDE_EFFECTS (t))
15286 return integer_zero_node;
15288 for (;;)
15289 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15291 case tcc_unary:
15292 t = TREE_OPERAND (t, 0);
15293 break;
15295 case tcc_binary:
15296 case tcc_comparison:
15297 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15298 t = TREE_OPERAND (t, 0);
15299 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15300 t = TREE_OPERAND (t, 1);
15301 else
15302 return t;
15303 break;
15305 case tcc_expression:
15306 switch (TREE_CODE (t))
15308 case COMPOUND_EXPR:
15309 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15310 return t;
15311 t = TREE_OPERAND (t, 0);
15312 break;
15314 case COND_EXPR:
15315 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15316 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15317 return t;
15318 t = TREE_OPERAND (t, 0);
15319 break;
15321 default:
15322 return t;
15324 break;
15326 default:
15327 return t;
15331 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15333 tree
15334 round_up_loc (location_t loc, tree value, unsigned int divisor)
15336 tree div = NULL_TREE;
15338 if (divisor == 1)
15339 return value;
15341 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15342 have to do anything. Only do this when we are not given a const,
15343 because in that case, this check is more expensive than just
15344 doing it. */
15345 if (TREE_CODE (value) != INTEGER_CST)
15347 div = build_int_cst (TREE_TYPE (value), divisor);
15349 if (multiple_of_p (TREE_TYPE (value), value, div))
15350 return value;
15353 /* If divisor is a power of two, simplify this to bit manipulation. */
15354 if (pow2_or_zerop (divisor))
15356 if (TREE_CODE (value) == INTEGER_CST)
15358 wide_int val = wi::to_wide (value);
15359 bool overflow_p;
15361 if ((val & (divisor - 1)) == 0)
15362 return value;
15364 overflow_p = TREE_OVERFLOW (value);
15365 val += divisor - 1;
15366 val &= (int) -divisor;
15367 if (val == 0)
15368 overflow_p = true;
15370 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15372 else
15374 tree t;
15376 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15377 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15378 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15379 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15382 else
15384 if (!div)
15385 div = build_int_cst (TREE_TYPE (value), divisor);
15386 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15387 value = size_binop_loc (loc, MULT_EXPR, value, div);
15390 return value;
15393 /* Likewise, but round down. */
15395 tree
15396 round_down_loc (location_t loc, tree value, int divisor)
15398 tree div = NULL_TREE;
15400 gcc_assert (divisor > 0);
15401 if (divisor == 1)
15402 return value;
15404 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15405 have to do anything. Only do this when we are not given a const,
15406 because in that case, this check is more expensive than just
15407 doing it. */
15408 if (TREE_CODE (value) != INTEGER_CST)
15410 div = build_int_cst (TREE_TYPE (value), divisor);
15412 if (multiple_of_p (TREE_TYPE (value), value, div))
15413 return value;
15416 /* If divisor is a power of two, simplify this to bit manipulation. */
15417 if (pow2_or_zerop (divisor))
15419 tree t;
15421 t = build_int_cst (TREE_TYPE (value), -divisor);
15422 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15424 else
15426 if (!div)
15427 div = build_int_cst (TREE_TYPE (value), divisor);
15428 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15429 value = size_binop_loc (loc, MULT_EXPR, value, div);
15432 return value;
15435 /* Returns the pointer to the base of the object addressed by EXP and
15436 extracts the information about the offset of the access, storing it
15437 to PBITPOS and POFFSET. */
15439 static tree
15440 split_address_to_core_and_offset (tree exp,
15441 poly_int64_pod *pbitpos, tree *poffset)
15443 tree core;
15444 machine_mode mode;
15445 int unsignedp, reversep, volatilep;
15446 poly_int64 bitsize;
15447 location_t loc = EXPR_LOCATION (exp);
15449 if (TREE_CODE (exp) == ADDR_EXPR)
15451 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15452 poffset, &mode, &unsignedp, &reversep,
15453 &volatilep);
15454 core = build_fold_addr_expr_loc (loc, core);
15456 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15458 core = TREE_OPERAND (exp, 0);
15459 STRIP_NOPS (core);
15460 *pbitpos = 0;
15461 *poffset = TREE_OPERAND (exp, 1);
15462 if (poly_int_tree_p (*poffset))
15464 poly_offset_int tem
15465 = wi::sext (wi::to_poly_offset (*poffset),
15466 TYPE_PRECISION (TREE_TYPE (*poffset)));
15467 tem <<= LOG2_BITS_PER_UNIT;
15468 if (tem.to_shwi (pbitpos))
15469 *poffset = NULL_TREE;
15472 else
15474 core = exp;
15475 *pbitpos = 0;
15476 *poffset = NULL_TREE;
15479 return core;
15482 /* Returns true if addresses of E1 and E2 differ by a constant, false
15483 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15485 bool
15486 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15488 tree core1, core2;
15489 poly_int64 bitpos1, bitpos2;
15490 tree toffset1, toffset2, tdiff, type;
15492 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15493 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15495 poly_int64 bytepos1, bytepos2;
15496 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15497 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15498 || !operand_equal_p (core1, core2, 0))
15499 return false;
15501 if (toffset1 && toffset2)
15503 type = TREE_TYPE (toffset1);
15504 if (type != TREE_TYPE (toffset2))
15505 toffset2 = fold_convert (type, toffset2);
15507 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15508 if (!cst_and_fits_in_hwi (tdiff))
15509 return false;
15511 *diff = int_cst_value (tdiff);
15513 else if (toffset1 || toffset2)
15515 /* If only one of the offsets is non-constant, the difference cannot
15516 be a constant. */
15517 return false;
15519 else
15520 *diff = 0;
15522 *diff += bytepos1 - bytepos2;
15523 return true;
15526 /* Return OFF converted to a pointer offset type suitable as offset for
15527 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15528 tree
15529 convert_to_ptrofftype_loc (location_t loc, tree off)
15531 if (ptrofftype_p (TREE_TYPE (off)))
15532 return off;
15533 return fold_convert_loc (loc, sizetype, off);
15536 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15537 tree
15538 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15540 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15541 ptr, convert_to_ptrofftype_loc (loc, off));
15544 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15545 tree
15546 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15548 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15549 ptr, size_int (off));
15552 /* Return a pointer to a NUL-terminated string containing the sequence
15553 of bytes corresponding to the representation of the object referred to
15554 by SRC (or a subsequence of such bytes within it if SRC is a reference
15555 to an initialized constant array plus some constant offset).
15556 Set *STRSIZE the number of bytes in the constant sequence including
15557 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
15558 where A is the array that stores the constant sequence that SRC points
15559 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
15560 need not point to a string or even an array of characters but may point
15561 to an object of any type. */
15563 const char *
15564 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
15566 /* The offset into the array A storing the string, and A's byte size. */
15567 tree offset_node;
15568 tree mem_size;
15570 if (strsize)
15571 *strsize = 0;
15573 if (strsize)
15574 src = byte_representation (src, &offset_node, &mem_size, NULL);
15575 else
15576 src = string_constant (src, &offset_node, &mem_size, NULL);
15577 if (!src)
15578 return NULL;
15580 unsigned HOST_WIDE_INT offset = 0;
15581 if (offset_node != NULL_TREE)
15583 if (!tree_fits_uhwi_p (offset_node))
15584 return NULL;
15585 else
15586 offset = tree_to_uhwi (offset_node);
15589 if (!tree_fits_uhwi_p (mem_size))
15590 return NULL;
15592 /* ARRAY_SIZE is the byte size of the array the constant sequence
15593 is stored in and equal to sizeof A. INIT_BYTES is the number
15594 of bytes in the constant sequence used to initialize the array,
15595 including any embedded NULs as well as the terminating NUL (for
15596 strings), but not including any trailing zeros/NULs past
15597 the terminating one appended implicitly to a string literal to
15598 zero out the remainder of the array it's stored in. For example,
15599 given:
15600 const char a[7] = "abc\0d";
15601 n = strlen (a + 1);
15602 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
15603 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
15604 is equal to strlen (A) + 1. */
15605 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
15606 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
15607 const char *string = TREE_STRING_POINTER (src);
15609 /* Ideally this would turn into a gcc_checking_assert over time. */
15610 if (init_bytes > array_size)
15611 init_bytes = array_size;
15613 if (init_bytes == 0 || offset >= array_size)
15614 return NULL;
15616 if (strsize)
15618 /* Compute and store the number of characters from the beginning
15619 of the substring at OFFSET to the end, including the terminating
15620 nul. Offsets past the initial length refer to null strings. */
15621 if (offset < init_bytes)
15622 *strsize = init_bytes - offset;
15623 else
15624 *strsize = 1;
15626 else
15628 tree eltype = TREE_TYPE (TREE_TYPE (src));
15629 /* Support only properly NUL-terminated single byte strings. */
15630 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15631 return NULL;
15632 if (string[init_bytes - 1] != '\0')
15633 return NULL;
15636 return offset < init_bytes ? string + offset : "";
15639 /* Return a pointer to a NUL-terminated string corresponding to
15640 the expression STR referencing a constant string, possibly
15641 involving a constant offset. Return null if STR either doesn't
15642 reference a constant string or if it involves a nonconstant
15643 offset. */
15645 const char *
15646 c_getstr (tree str)
15648 return getbyterep (str, NULL);
15651 /* Given a tree T, compute which bits in T may be nonzero. */
15653 wide_int
15654 tree_nonzero_bits (const_tree t)
15656 switch (TREE_CODE (t))
15658 case INTEGER_CST:
15659 return wi::to_wide (t);
15660 case SSA_NAME:
15661 return get_nonzero_bits (t);
15662 case NON_LVALUE_EXPR:
15663 case SAVE_EXPR:
15664 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15665 case BIT_AND_EXPR:
15666 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15667 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15668 case BIT_IOR_EXPR:
15669 case BIT_XOR_EXPR:
15670 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15671 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15672 case COND_EXPR:
15673 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15674 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15675 CASE_CONVERT:
15676 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15677 TYPE_PRECISION (TREE_TYPE (t)),
15678 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15679 case PLUS_EXPR:
15680 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15682 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15683 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15684 if (wi::bit_and (nzbits1, nzbits2) == 0)
15685 return wi::bit_or (nzbits1, nzbits2);
15687 break;
15688 case LSHIFT_EXPR:
15689 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15691 tree type = TREE_TYPE (t);
15692 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15693 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15694 TYPE_PRECISION (type));
15695 return wi::neg_p (arg1)
15696 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15697 : wi::lshift (nzbits, arg1);
15699 break;
15700 case RSHIFT_EXPR:
15701 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15703 tree type = TREE_TYPE (t);
15704 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15705 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15706 TYPE_PRECISION (type));
15707 return wi::neg_p (arg1)
15708 ? wi::lshift (nzbits, -arg1)
15709 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15711 break;
15712 default:
15713 break;
15716 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15719 #if CHECKING_P
15721 namespace selftest {
15723 /* Helper functions for writing tests of folding trees. */
15725 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15727 static void
15728 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15729 tree constant)
15731 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15734 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15735 wrapping WRAPPED_EXPR. */
15737 static void
15738 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15739 tree wrapped_expr)
15741 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15742 ASSERT_NE (wrapped_expr, result);
15743 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15744 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15747 /* Verify that various arithmetic binary operations are folded
15748 correctly. */
15750 static void
15751 test_arithmetic_folding ()
15753 tree type = integer_type_node;
15754 tree x = create_tmp_var_raw (type, "x");
15755 tree zero = build_zero_cst (type);
15756 tree one = build_int_cst (type, 1);
15758 /* Addition. */
15759 /* 1 <-- (0 + 1) */
15760 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15761 one);
15762 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15763 one);
15765 /* (nonlvalue)x <-- (x + 0) */
15766 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15769 /* Subtraction. */
15770 /* 0 <-- (x - x) */
15771 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15772 zero);
15773 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15776 /* Multiplication. */
15777 /* 0 <-- (x * 0) */
15778 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15779 zero);
15781 /* (nonlvalue)x <-- (x * 1) */
15782 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15786 /* Verify that various binary operations on vectors are folded
15787 correctly. */
15789 static void
15790 test_vector_folding ()
15792 tree inner_type = integer_type_node;
15793 tree type = build_vector_type (inner_type, 4);
15794 tree zero = build_zero_cst (type);
15795 tree one = build_one_cst (type);
15796 tree index = build_index_vector (type, 0, 1);
15798 /* Verify equality tests that return a scalar boolean result. */
15799 tree res_type = boolean_type_node;
15800 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15801 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15802 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15803 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15804 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15805 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15806 index, one)));
15807 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15808 index, index)));
15809 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15810 index, index)));
15813 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15815 static void
15816 test_vec_duplicate_folding ()
15818 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15819 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15820 /* This will be 1 if VEC_MODE isn't a vector mode. */
15821 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15823 tree type = build_vector_type (ssizetype, nunits);
15824 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15825 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15826 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15829 /* Run all of the selftests within this file. */
15831 void
15832 fold_const_c_tests ()
15834 test_arithmetic_folding ();
15835 test_vector_folding ();
15836 test_vec_duplicate_folding ();
15839 } // namespace selftest
15841 #endif /* CHECKING_P */