Explicitly link with libatomic when needed.
[official-gcc.git] / gcc / fold-const.c
blob71a1d3eb735e2a35ee2ba86ed82c161a2ba56ffb
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
146 static location_t
147 expr_location_or (tree t, location_t loc)
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
168 return x;
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 widest_int quo;
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 return NULL_TREE;
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
196 static int fold_deferring_overflow_warnings;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
213 void
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 const char *warnmsg;
232 location_t locus;
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
248 if (!issue || warnmsg == NULL)
249 return;
251 if (gimple_no_warning_p (stmt))
252 return;
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
259 if (!issue_strict_overflow_warning (code))
260 return;
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL, 0);
278 /* Whether we are deferring overflow warnings. */
280 bool
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings > 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 if (fold_deferring_overflow_warnings > 0)
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
308 bool
309 negate_mathfn_p (combined_fn fn)
311 switch (fn)
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
346 default:
347 break;
349 return false;
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
355 bool
356 may_negate_without_overflow_p (const_tree t)
358 tree type;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
366 return !wi::only_sign_bit_p (wi::to_wide (t));
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
372 static bool
373 negate_expr_p (tree t)
375 tree type;
377 if (t == 0)
378 return false;
380 type = TREE_TYPE (t);
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
395 case FIXED_CST:
396 return true;
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
465 /* Fall through. */
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
501 break;
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
517 break;
519 default:
520 break;
522 return false;
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
533 tree type = TREE_TYPE (t);
534 tree tem;
536 switch (TREE_CODE (t))
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case COMPLEX_CST:
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
568 break;
570 case VECTOR_CST:
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
583 return elts.build ();
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
624 break;
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
638 /* Fall through. */
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
652 break;
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
687 break;
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
694 tree fndecl, arg;
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
700 break;
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
717 break;
719 default:
720 break;
723 return NULL_TREE;
726 /* A wrapper for fold_negate_expr_1. */
728 static tree
729 fold_negate_expr (location_t loc, tree t)
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
743 static tree
744 negate_expr (tree t)
746 tree type, tem;
747 location_t loc;
749 if (t == NULL_TREE)
750 return NULL_TREE;
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
780 If IN is itself a literal or constant, return it as appropriate.
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
861 else
862 var = in;
864 if (negate_p)
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
887 return var;
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 if (t1 == 0)
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
903 else if (t2 == 0)
904 return t1;
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
913 if (code == PLUS_EXPR)
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
928 else if (code == MINUS_EXPR)
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
953 switch (code)
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
961 default:
962 break;
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1005 else
1006 tmp = arg2;
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1027 else
1028 tmp = arg2;
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1109 default:
1110 return false;
1112 return true;
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1163 default:
1164 return false;
1166 return true;
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1206 switch (op)
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1216 default:
1217 return false;
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1242 return int_const_binop (code, arg1, arg2);
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1266 default:
1267 return NULL_TREE;
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1300 else if (REAL_VALUE_ISNAN (d2))
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode)
1316 && REAL_VALUE_ISINF (result)
1317 && !REAL_VALUE_ISINF (d1)
1318 && !REAL_VALUE_ISINF (d2))
1319 return NULL_TREE;
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 && (inexact || !real_identical (&result, &value)))
1328 return NULL_TREE;
1330 t = build_real (type, result);
1332 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333 return t;
1336 if (TREE_CODE (arg1) == FIXED_CST)
1338 FIXED_VALUE_TYPE f1;
1339 FIXED_VALUE_TYPE f2;
1340 FIXED_VALUE_TYPE result;
1341 tree t, type;
1342 int sat_p;
1343 bool overflow_p;
1345 /* The following codes are handled by fixed_arithmetic. */
1346 switch (code)
1348 case PLUS_EXPR:
1349 case MINUS_EXPR:
1350 case MULT_EXPR:
1351 case TRUNC_DIV_EXPR:
1352 if (TREE_CODE (arg2) != FIXED_CST)
1353 return NULL_TREE;
1354 f2 = TREE_FIXED_CST (arg2);
1355 break;
1357 case LSHIFT_EXPR:
1358 case RSHIFT_EXPR:
1360 if (TREE_CODE (arg2) != INTEGER_CST)
1361 return NULL_TREE;
1362 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 f2.data.high = w2.elt (1);
1364 f2.data.low = w2.ulow ();
1365 f2.mode = SImode;
1367 break;
1369 default:
1370 return NULL_TREE;
1373 f1 = TREE_FIXED_CST (arg1);
1374 type = TREE_TYPE (arg1);
1375 sat_p = TYPE_SATURATING (type);
1376 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377 t = build_fixed (type, result);
1378 /* Propagate overflow flags. */
1379 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 TREE_OVERFLOW (t) = 1;
1381 return t;
1384 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1386 tree type = TREE_TYPE (arg1);
1387 tree r1 = TREE_REALPART (arg1);
1388 tree i1 = TREE_IMAGPART (arg1);
1389 tree r2 = TREE_REALPART (arg2);
1390 tree i2 = TREE_IMAGPART (arg2);
1391 tree real, imag;
1393 switch (code)
1395 case PLUS_EXPR:
1396 case MINUS_EXPR:
1397 real = const_binop (code, r1, r2);
1398 imag = const_binop (code, i1, i2);
1399 break;
1401 case MULT_EXPR:
1402 if (COMPLEX_FLOAT_TYPE_P (type))
1403 return do_mpc_arg2 (arg1, arg2, type,
1404 /* do_nonfinite= */ folding_initializer,
1405 mpc_mul);
1407 real = const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, r1, r2),
1409 const_binop (MULT_EXPR, i1, i2));
1410 imag = const_binop (PLUS_EXPR,
1411 const_binop (MULT_EXPR, r1, i2),
1412 const_binop (MULT_EXPR, i1, r2));
1413 break;
1415 case RDIV_EXPR:
1416 if (COMPLEX_FLOAT_TYPE_P (type))
1417 return do_mpc_arg2 (arg1, arg2, type,
1418 /* do_nonfinite= */ folding_initializer,
1419 mpc_div);
1420 /* Fallthru. */
1421 case TRUNC_DIV_EXPR:
1422 case CEIL_DIV_EXPR:
1423 case FLOOR_DIV_EXPR:
1424 case ROUND_DIV_EXPR:
1425 if (flag_complex_method == 0)
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 t = br*br + bi*bi
1434 tree magsquared
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r2, r2),
1437 const_binop (MULT_EXPR, i2, i2));
1438 tree t1
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r1, r2),
1441 const_binop (MULT_EXPR, i1, i2));
1442 tree t2
1443 = const_binop (MINUS_EXPR,
1444 const_binop (MULT_EXPR, i1, r2),
1445 const_binop (MULT_EXPR, r1, i2));
1447 real = const_binop (code, t1, magsquared);
1448 imag = const_binop (code, t2, magsquared);
1450 else
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 fold_abs_const (r2, TREE_TYPE (type)),
1459 fold_abs_const (i2, TREE_TYPE (type)));
1461 if (integer_nonzerop (compare))
1463 /* In the TRUE branch, we compute
1464 ratio = br/bi;
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1468 tr = tr / div;
1469 ti = ti / div; */
1470 tree ratio = const_binop (code, r2, i2);
1471 tree div = const_binop (PLUS_EXPR, i2,
1472 const_binop (MULT_EXPR, r2, ratio));
1473 real = const_binop (MULT_EXPR, r1, ratio);
1474 real = const_binop (PLUS_EXPR, real, i1);
1475 real = const_binop (code, real, div);
1477 imag = const_binop (MULT_EXPR, i1, ratio);
1478 imag = const_binop (MINUS_EXPR, imag, r1);
1479 imag = const_binop (code, imag, div);
1481 else
1483 /* In the FALSE branch, we compute
1484 ratio = d/c;
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1488 tr = tr / div;
1489 ti = ti / div; */
1490 tree ratio = const_binop (code, i2, r2);
1491 tree div = const_binop (PLUS_EXPR, r2,
1492 const_binop (MULT_EXPR, i2, ratio));
1494 real = const_binop (MULT_EXPR, i1, ratio);
1495 real = const_binop (PLUS_EXPR, real, r1);
1496 real = const_binop (code, real, div);
1498 imag = const_binop (MULT_EXPR, r1, ratio);
1499 imag = const_binop (MINUS_EXPR, i1, imag);
1500 imag = const_binop (code, imag, div);
1503 break;
1505 default:
1506 return NULL_TREE;
1509 if (real && imag)
1510 return build_complex (type, real, imag);
1513 if (TREE_CODE (arg1) == VECTOR_CST
1514 && TREE_CODE (arg2) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1518 tree type = TREE_TYPE (arg1);
1519 bool step_ok_p;
1520 if (VECTOR_CST_STEPPED_P (arg1)
1521 && VECTOR_CST_STEPPED_P (arg2))
1522 /* We can operate directly on the encoding if:
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 implies
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531 else if (VECTOR_CST_STEPPED_P (arg1))
1532 /* We can operate directly on stepped encodings if:
1534 a3 - a2 == a2 - a1
1535 implies:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p = distributes_over_addition_p (code, 1);
1540 else
1541 /* Similarly in reverse. */
1542 step_ok_p = distributes_over_addition_p (code, 2);
1543 tree_vector_builder elts;
1544 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 return NULL_TREE;
1546 unsigned int count = elts.encoded_nelts ();
1547 for (unsigned int i = 0; i < count; ++i)
1549 tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 tree elem2 = VECTOR_CST_ELT (arg2, i);
1552 tree elt = const_binop (code, elem1, elem2);
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt == NULL_TREE)
1557 return NULL_TREE;
1558 elts.quick_push (elt);
1561 return elts.build ();
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1) == VECTOR_CST
1566 && TREE_CODE (arg2) == INTEGER_CST)
1568 tree type = TREE_TYPE (arg1);
1569 bool step_ok_p = distributes_over_addition_p (code, 1);
1570 tree_vector_builder elts;
1571 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 return NULL_TREE;
1573 unsigned int count = elts.encoded_nelts ();
1574 for (unsigned int i = 0; i < count; ++i)
1576 tree elem1 = VECTOR_CST_ELT (arg1, i);
1578 tree elt = const_binop (code, elem1, arg2);
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt == NULL_TREE)
1583 return NULL_TREE;
1584 elts.quick_push (elt);
1587 return elts.build ();
1589 return NULL_TREE;
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1595 tree
1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1598 if (TREE_CODE_CLASS (code) == tcc_comparison)
1599 return fold_relational_const (code, type, arg1, arg2);
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1603 switch (code)
1605 case VEC_SERIES_EXPR:
1606 if (CONSTANT_CLASS_P (arg1)
1607 && CONSTANT_CLASS_P (arg2))
1608 return build_vec_series (type, arg1, arg2);
1609 return NULL_TREE;
1611 case COMPLEX_EXPR:
1612 if ((TREE_CODE (arg1) == REAL_CST
1613 && TREE_CODE (arg2) == REAL_CST)
1614 || (TREE_CODE (arg1) == INTEGER_CST
1615 && TREE_CODE (arg2) == INTEGER_CST))
1616 return build_complex (type, arg1, arg2);
1617 return NULL_TREE;
1619 case POINTER_DIFF_EXPR:
1620 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1622 poly_offset_int res = (wi::to_poly_offset (arg1)
1623 - wi::to_poly_offset (arg2));
1624 return force_fit_type (type, res, 1,
1625 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1627 return NULL_TREE;
1629 case VEC_PACK_TRUNC_EXPR:
1630 case VEC_PACK_FIX_TRUNC_EXPR:
1631 case VEC_PACK_FLOAT_EXPR:
1633 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1635 if (TREE_CODE (arg1) != VECTOR_CST
1636 || TREE_CODE (arg2) != VECTOR_CST)
1637 return NULL_TREE;
1639 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 return NULL_TREE;
1642 out_nelts = in_nelts * 2;
1643 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1646 tree_vector_builder elts (type, out_nelts, 1);
1647 for (i = 0; i < out_nelts; i++)
1649 tree elt = (i < in_nelts
1650 ? VECTOR_CST_ELT (arg1, i)
1651 : VECTOR_CST_ELT (arg2, i - in_nelts));
1652 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 ? NOP_EXPR
1654 : code == VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 TREE_TYPE (type), elt);
1657 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 return NULL_TREE;
1659 elts.quick_push (elt);
1662 return elts.build ();
1665 case VEC_WIDEN_MULT_LO_EXPR:
1666 case VEC_WIDEN_MULT_HI_EXPR:
1667 case VEC_WIDEN_MULT_EVEN_EXPR:
1668 case VEC_WIDEN_MULT_ODD_EXPR:
1670 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1672 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 return NULL_TREE;
1675 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 return NULL_TREE;
1677 out_nelts = in_nelts / 2;
1678 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1681 if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 scale = 1, ofs = 0;
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 scale = 1, ofs = 1;
1690 tree_vector_builder elts (type, out_nelts, 1);
1691 for (out = 0; out < out_nelts; out++)
1693 unsigned int in = (out << scale) + ofs;
1694 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 VECTOR_CST_ELT (arg1, in));
1696 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 VECTOR_CST_ELT (arg2, in));
1699 if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 return NULL_TREE;
1701 tree elt = const_binop (MULT_EXPR, t1, t2);
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1707 return elts.build ();
1710 default:;
1713 if (TREE_CODE_CLASS (code) != tcc_binary)
1714 return NULL_TREE;
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type)
1718 == TYPE_SATURATING (TREE_TYPE (arg1)));
1720 return const_binop (code, arg1, arg2);
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1726 tree
1727 const_unop (enum tree_code code, tree type, tree arg0)
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734 && code != NEGATE_EXPR
1735 && code != ABS_EXPR
1736 && code != ABSU_EXPR)
1737 return NULL_TREE;
1739 switch (code)
1741 CASE_CONVERT:
1742 case FLOAT_EXPR:
1743 case FIX_TRUNC_EXPR:
1744 case FIXED_CONVERT_EXPR:
1745 return fold_convert_const (code, type, arg0);
1747 case ADDR_SPACE_CONVERT_EXPR:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0)
1751 && !(targetm.addr_space.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 return fold_convert_const (code, type, arg0);
1754 break;
1756 case VIEW_CONVERT_EXPR:
1757 return fold_view_convert_expr (type, arg0);
1759 case NEGATE_EXPR:
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1763 constants. */
1764 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 if (tem && CONSTANT_CLASS_P (tem))
1766 return tem;
1767 break;
1770 case ABS_EXPR:
1771 case ABSU_EXPR:
1772 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 return fold_abs_const (arg0, type);
1774 break;
1776 case CONJ_EXPR:
1777 if (TREE_CODE (arg0) == COMPLEX_CST)
1779 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 TREE_TYPE (type));
1781 return build_complex (type, TREE_REALPART (arg0), ipart);
1783 break;
1785 case BIT_NOT_EXPR:
1786 if (TREE_CODE (arg0) == INTEGER_CST)
1787 return fold_not_const (arg0, type);
1788 else if (POLY_INT_CST_P (arg0))
1789 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0) == VECTOR_CST)
1793 tree elem;
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements;
1797 elements.new_unary_operation (type, arg0, true);
1798 unsigned int i, count = elements.encoded_nelts ();
1799 for (i = 0; i < count; ++i)
1801 elem = VECTOR_CST_ELT (arg0, i);
1802 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 if (elem == NULL_TREE)
1804 break;
1805 elements.quick_push (elem);
1807 if (i == count)
1808 return elements.build ();
1810 break;
1812 case TRUTH_NOT_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST)
1814 return constant_boolean_node (integer_zerop (arg0), type);
1815 break;
1817 case REALPART_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1819 return fold_convert (type, TREE_REALPART (arg0));
1820 break;
1822 case IMAGPART_EXPR:
1823 if (TREE_CODE (arg0) == COMPLEX_CST)
1824 return fold_convert (type, TREE_IMAGPART (arg0));
1825 break;
1827 case VEC_UNPACK_LO_EXPR:
1828 case VEC_UNPACK_HI_EXPR:
1829 case VEC_UNPACK_FLOAT_LO_EXPR:
1830 case VEC_UNPACK_FLOAT_HI_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1834 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 enum tree_code subcode;
1837 if (TREE_CODE (arg0) != VECTOR_CST)
1838 return NULL_TREE;
1840 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 return NULL_TREE;
1842 out_nelts = in_nelts / 2;
1843 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1845 unsigned int offset = 0;
1846 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 offset = out_nelts;
1851 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 subcode = NOP_EXPR;
1853 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 subcode = FLOAT_EXPR;
1856 else
1857 subcode = FIX_TRUNC_EXPR;
1859 tree_vector_builder elts (type, out_nelts, 1);
1860 for (i = 0; i < out_nelts; i++)
1862 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 VECTOR_CST_ELT (arg0, i + offset));
1864 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 return NULL_TREE;
1866 elts.quick_push (elt);
1869 return elts.build ();
1872 case VEC_DUPLICATE_EXPR:
1873 if (CONSTANT_CLASS_P (arg0))
1874 return build_vector_from_val (type, arg0);
1875 return NULL_TREE;
1877 default:
1878 break;
1881 return NULL_TREE;
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1887 tree
1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1890 return build_int_cst (sizetype_tab[(int) kind], number);
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1898 tree
1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1901 tree type = TREE_TYPE (arg0);
1903 if (arg0 == error_mark_node || arg1 == error_mark_node)
1904 return error_mark_node;
1906 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907 TREE_TYPE (arg1)));
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1912 /* And some specific cases even faster than that. */
1913 if (code == PLUS_EXPR)
1915 if (integer_zerop (arg0)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 return arg1;
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1922 else if (code == MINUS_EXPR)
1924 if (integer_zerop (arg1)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 return arg0;
1928 else if (code == MULT_EXPR)
1930 if (integer_onep (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res = int_const_binop (code, arg0, arg1, -1);
1939 if (res != NULL_TREE)
1940 return res;
1943 return fold_build2_loc (loc, code, type, arg0, arg1);
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1950 tree
1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1953 tree type = TREE_TYPE (arg0);
1954 tree ctype;
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 TREE_TYPE (arg1)));
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type))
1961 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1963 if (type == sizetype)
1964 ctype = ssizetype;
1965 else if (type == bitsizetype)
1966 ctype = sbitsizetype;
1967 else
1968 ctype = signed_type_for (type);
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974 return size_binop_loc (loc, MINUS_EXPR,
1975 fold_convert_loc (loc, ctype, arg0),
1976 fold_convert_loc (loc, ctype, arg1));
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0, arg1))
1983 return build_int_cst (ctype, 0);
1984 else if (tree_int_cst_lt (arg1, arg0))
1985 return fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987 else
1988 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 fold_convert_loc (loc, ctype,
1990 size_binop_loc (loc,
1991 MINUS_EXPR,
1992 arg1, arg0)));
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1998 static tree
1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type, wi::to_widest (arg1),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 TREE_OVERFLOW (arg1));
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2012 static tree
2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2015 bool overflow = false;
2016 tree t;
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2027 wide_int val;
2028 REAL_VALUE_TYPE r;
2029 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2031 switch (code)
2033 case FIX_TRUNC_EXPR:
2034 real_trunc (&r, VOIDmode, &x);
2035 break;
2037 default:
2038 gcc_unreachable ();
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r))
2044 overflow = true;
2045 val = wi::zero (TYPE_PRECISION (type));
2048 /* See if R is less than the lower bound or greater than the
2049 upper bound. */
2051 if (! overflow)
2053 tree lt = TYPE_MIN_VALUE (type);
2054 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055 if (real_less (&r, &l))
2057 overflow = true;
2058 val = wi::to_wide (lt);
2062 if (! overflow)
2064 tree ut = TYPE_MAX_VALUE (type);
2065 if (ut)
2067 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 if (real_less (&u, &r))
2070 overflow = true;
2071 val = wi::to_wide (ut);
2076 if (! overflow)
2077 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2079 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080 return t;
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2086 static tree
2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2089 tree t;
2090 double_int temp, temp_trunc;
2091 scalar_mode mode;
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp = TREE_FIXED_CST (arg1).data;
2095 mode = TREE_FIXED_CST (arg1).mode;
2096 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2098 temp = temp.rshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 HOST_BITS_PER_DOUBLE_INT,
2105 SIGNED_FIXED_POINT_MODE_P (mode));
2107 else
2109 temp = double_int_zero;
2110 temp_trunc = double_int_zero;
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode)
2116 && temp_trunc.is_negative ()
2117 && TREE_FIXED_CST (arg1).data != temp_trunc)
2118 temp += double_int_one;
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type (type, temp, -1,
2123 (temp.is_negative ()
2124 && (TYPE_UNSIGNED (type)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 | TREE_OVERFLOW (arg1));
2128 return t;
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2134 static tree
2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2137 REAL_VALUE_TYPE value;
2138 tree t;
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144 return NULL_TREE;
2146 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147 t = build_real (type, value);
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155 TREE_OVERFLOW (t) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 && !MODE_HAS_NANS (TYPE_MODE (type)))
2158 TREE_OVERFLOW (t) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 && REAL_VALUE_ISINF (value)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164 TREE_OVERFLOW (t) = 1;
2165 else
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167 return t;
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2173 static tree
2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2176 REAL_VALUE_TYPE value;
2177 tree t;
2179 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 &TREE_FIXED_CST (arg1));
2181 t = build_real (type, value);
2183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184 return t;
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2190 static tree
2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2193 FIXED_VALUE_TYPE value;
2194 tree t;
2195 bool overflow_p;
2197 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199 t = build_fixed (type, value);
2201 /* Propagate overflow flags. */
2202 if (overflow_p | TREE_OVERFLOW (arg1))
2203 TREE_OVERFLOW (t) = 1;
2204 return t;
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2210 static tree
2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2213 FIXED_VALUE_TYPE value;
2214 tree t;
2215 bool overflow_p;
2216 double_int di;
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2220 di.low = TREE_INT_CST_ELT (arg1, 0);
2221 if (TREE_INT_CST_NUNITS (arg1) == 1)
2222 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223 else
2224 di.high = TREE_INT_CST_ELT (arg1, 1);
2226 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 TYPE_SATURATING (type));
2229 t = build_fixed (type, value);
2231 /* Propagate overflow flags. */
2232 if (overflow_p | TREE_OVERFLOW (arg1))
2233 TREE_OVERFLOW (t) = 1;
2234 return t;
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2240 static tree
2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2243 FIXED_VALUE_TYPE value;
2244 tree t;
2245 bool overflow_p;
2247 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 &TREE_REAL_CST (arg1),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2261 static tree
2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2264 tree arg_type = TREE_TYPE (arg1);
2265 if (arg_type == type)
2266 return arg1;
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1)
2271 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273 return build_poly_int_cst (type,
2274 poly_wide_int::from (poly_int_cst_value (arg1),
2275 TYPE_PRECISION (type),
2276 TYPE_SIGN (arg_type)));
2278 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279 || TREE_CODE (type) == OFFSET_TYPE)
2281 if (TREE_CODE (arg1) == INTEGER_CST)
2282 return fold_convert_const_int_from_int (type, arg1);
2283 else if (TREE_CODE (arg1) == REAL_CST)
2284 return fold_convert_const_int_from_real (code, type, arg1);
2285 else if (TREE_CODE (arg1) == FIXED_CST)
2286 return fold_convert_const_int_from_fixed (type, arg1);
2288 else if (TREE_CODE (type) == REAL_TYPE)
2290 if (TREE_CODE (arg1) == INTEGER_CST)
2291 return build_real_from_int_cst (type, arg1);
2292 else if (TREE_CODE (arg1) == REAL_CST)
2293 return fold_convert_const_real_from_real (type, arg1);
2294 else if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_real_from_fixed (type, arg1);
2297 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2299 if (TREE_CODE (arg1) == FIXED_CST)
2300 return fold_convert_const_fixed_from_fixed (type, arg1);
2301 else if (TREE_CODE (arg1) == INTEGER_CST)
2302 return fold_convert_const_fixed_from_int (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_fixed_from_real (type, arg1);
2306 else if (TREE_CODE (type) == VECTOR_TYPE)
2308 if (TREE_CODE (arg1) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2311 tree elttype = TREE_TYPE (type);
2312 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2315 bool step_ok_p
2316 = (INTEGRAL_TYPE_P (elttype)
2317 && INTEGRAL_TYPE_P (arg1_elttype)
2318 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 tree_vector_builder v;
2320 if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 return NULL_TREE;
2322 unsigned int len = v.encoded_nelts ();
2323 for (unsigned int i = 0; i < len; ++i)
2325 tree elt = VECTOR_CST_ELT (arg1, i);
2326 tree cvt = fold_convert_const (code, elttype, elt);
2327 if (cvt == NULL_TREE)
2328 return NULL_TREE;
2329 v.quick_push (cvt);
2331 return v.build ();
2334 return NULL_TREE;
2337 /* Construct a vector of zero elements of vector type TYPE. */
2339 static tree
2340 build_zero_vector (tree type)
2342 tree t;
2344 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345 return build_vector_from_val (type, t);
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2350 bool
2351 fold_convertible_p (const_tree type, const_tree arg)
2353 tree orig = TREE_TYPE (arg);
2355 if (type == orig)
2356 return true;
2358 if (TREE_CODE (arg) == ERROR_MARK
2359 || TREE_CODE (type) == ERROR_MARK
2360 || TREE_CODE (orig) == ERROR_MARK)
2361 return false;
2363 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364 return true;
2366 switch (TREE_CODE (type))
2368 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369 case POINTER_TYPE: case REFERENCE_TYPE:
2370 case OFFSET_TYPE:
2371 return (INTEGRAL_TYPE_P (orig)
2372 || (POINTER_TYPE_P (orig)
2373 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 || TREE_CODE (orig) == OFFSET_TYPE);
2376 case REAL_TYPE:
2377 case FIXED_POINT_TYPE:
2378 case VOID_TYPE:
2379 return TREE_CODE (type) == TREE_CODE (orig);
2381 case VECTOR_TYPE:
2382 return (VECTOR_TYPE_P (orig)
2383 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2384 TYPE_VECTOR_SUBPARTS (orig))
2385 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2387 default:
2388 return false;
2392 /* Convert expression ARG to type TYPE. Used by the middle-end for
2393 simple conversions in preference to calling the front-end's convert. */
2395 tree
2396 fold_convert_loc (location_t loc, tree type, tree arg)
2398 tree orig = TREE_TYPE (arg);
2399 tree tem;
2401 if (type == orig)
2402 return arg;
2404 if (TREE_CODE (arg) == ERROR_MARK
2405 || TREE_CODE (type) == ERROR_MARK
2406 || TREE_CODE (orig) == ERROR_MARK)
2407 return error_mark_node;
2409 switch (TREE_CODE (type))
2411 case POINTER_TYPE:
2412 case REFERENCE_TYPE:
2413 /* Handle conversions between pointers to different address spaces. */
2414 if (POINTER_TYPE_P (orig)
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2417 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2418 /* fall through */
2420 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2421 case OFFSET_TYPE:
2422 if (TREE_CODE (arg) == INTEGER_CST)
2424 tem = fold_convert_const (NOP_EXPR, type, arg);
2425 if (tem != NULL_TREE)
2426 return tem;
2428 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2429 || TREE_CODE (orig) == OFFSET_TYPE)
2430 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2431 if (TREE_CODE (orig) == COMPLEX_TYPE)
2432 return fold_convert_loc (loc, type,
2433 fold_build1_loc (loc, REALPART_EXPR,
2434 TREE_TYPE (orig), arg));
2435 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2436 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2437 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2439 case REAL_TYPE:
2440 if (TREE_CODE (arg) == INTEGER_CST)
2442 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2443 if (tem != NULL_TREE)
2444 return tem;
2446 else if (TREE_CODE (arg) == REAL_CST)
2448 tem = fold_convert_const (NOP_EXPR, type, arg);
2449 if (tem != NULL_TREE)
2450 return tem;
2452 else if (TREE_CODE (arg) == FIXED_CST)
2454 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2455 if (tem != NULL_TREE)
2456 return tem;
2459 switch (TREE_CODE (orig))
2461 case INTEGER_TYPE:
2462 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2463 case POINTER_TYPE: case REFERENCE_TYPE:
2464 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2466 case REAL_TYPE:
2467 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2469 case FIXED_POINT_TYPE:
2470 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2472 case COMPLEX_TYPE:
2473 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2474 return fold_convert_loc (loc, type, tem);
2476 default:
2477 gcc_unreachable ();
2480 case FIXED_POINT_TYPE:
2481 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2482 || TREE_CODE (arg) == REAL_CST)
2484 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2485 if (tem != NULL_TREE)
2486 goto fold_convert_exit;
2489 switch (TREE_CODE (orig))
2491 case FIXED_POINT_TYPE:
2492 case INTEGER_TYPE:
2493 case ENUMERAL_TYPE:
2494 case BOOLEAN_TYPE:
2495 case REAL_TYPE:
2496 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2498 case COMPLEX_TYPE:
2499 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2500 return fold_convert_loc (loc, type, tem);
2502 default:
2503 gcc_unreachable ();
2506 case COMPLEX_TYPE:
2507 switch (TREE_CODE (orig))
2509 case INTEGER_TYPE:
2510 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case REAL_TYPE:
2513 case FIXED_POINT_TYPE:
2514 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2515 fold_convert_loc (loc, TREE_TYPE (type), arg),
2516 fold_convert_loc (loc, TREE_TYPE (type),
2517 integer_zero_node));
2518 case COMPLEX_TYPE:
2520 tree rpart, ipart;
2522 if (TREE_CODE (arg) == COMPLEX_EXPR)
2524 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2525 TREE_OPERAND (arg, 0));
2526 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2527 TREE_OPERAND (arg, 1));
2528 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2531 arg = save_expr (arg);
2532 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2533 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2534 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2535 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2536 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2539 default:
2540 gcc_unreachable ();
2543 case VECTOR_TYPE:
2544 if (integer_zerop (arg))
2545 return build_zero_vector (type);
2546 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2547 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2548 || TREE_CODE (orig) == VECTOR_TYPE);
2549 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2551 case VOID_TYPE:
2552 tem = fold_ignored_result (arg);
2553 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2555 default:
2556 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2557 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2558 gcc_unreachable ();
2560 fold_convert_exit:
2561 protected_set_expr_location_unshare (tem, loc);
2562 return tem;
2565 /* Return false if expr can be assumed not to be an lvalue, true
2566 otherwise. */
2568 static bool
2569 maybe_lvalue_p (const_tree x)
2571 /* We only need to wrap lvalue tree codes. */
2572 switch (TREE_CODE (x))
2574 case VAR_DECL:
2575 case PARM_DECL:
2576 case RESULT_DECL:
2577 case LABEL_DECL:
2578 case FUNCTION_DECL:
2579 case SSA_NAME:
2581 case COMPONENT_REF:
2582 case MEM_REF:
2583 case INDIRECT_REF:
2584 case ARRAY_REF:
2585 case ARRAY_RANGE_REF:
2586 case BIT_FIELD_REF:
2587 case OBJ_TYPE_REF:
2589 case REALPART_EXPR:
2590 case IMAGPART_EXPR:
2591 case PREINCREMENT_EXPR:
2592 case PREDECREMENT_EXPR:
2593 case SAVE_EXPR:
2594 case TRY_CATCH_EXPR:
2595 case WITH_CLEANUP_EXPR:
2596 case COMPOUND_EXPR:
2597 case MODIFY_EXPR:
2598 case TARGET_EXPR:
2599 case COND_EXPR:
2600 case BIND_EXPR:
2601 case VIEW_CONVERT_EXPR:
2602 break;
2604 default:
2605 /* Assume the worst for front-end tree codes. */
2606 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2607 break;
2608 return false;
2611 return true;
2614 /* Return an expr equal to X but certainly not valid as an lvalue. */
2616 tree
2617 non_lvalue_loc (location_t loc, tree x)
2619 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2620 us. */
2621 if (in_gimple_form)
2622 return x;
2624 if (! maybe_lvalue_p (x))
2625 return x;
2626 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2629 /* When pedantic, return an expr equal to X but certainly not valid as a
2630 pedantic lvalue. Otherwise, return X. */
2632 static tree
2633 pedantic_non_lvalue_loc (location_t loc, tree x)
2635 return protected_set_expr_location_unshare (x, loc);
2638 /* Given a tree comparison code, return the code that is the logical inverse.
2639 It is generally not safe to do this for floating-point comparisons, except
2640 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2641 ERROR_MARK in this case. */
2643 enum tree_code
2644 invert_tree_comparison (enum tree_code code, bool honor_nans)
2646 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2647 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2648 return ERROR_MARK;
2650 switch (code)
2652 case EQ_EXPR:
2653 return NE_EXPR;
2654 case NE_EXPR:
2655 return EQ_EXPR;
2656 case GT_EXPR:
2657 return honor_nans ? UNLE_EXPR : LE_EXPR;
2658 case GE_EXPR:
2659 return honor_nans ? UNLT_EXPR : LT_EXPR;
2660 case LT_EXPR:
2661 return honor_nans ? UNGE_EXPR : GE_EXPR;
2662 case LE_EXPR:
2663 return honor_nans ? UNGT_EXPR : GT_EXPR;
2664 case LTGT_EXPR:
2665 return UNEQ_EXPR;
2666 case UNEQ_EXPR:
2667 return LTGT_EXPR;
2668 case UNGT_EXPR:
2669 return LE_EXPR;
2670 case UNGE_EXPR:
2671 return LT_EXPR;
2672 case UNLT_EXPR:
2673 return GE_EXPR;
2674 case UNLE_EXPR:
2675 return GT_EXPR;
2676 case ORDERED_EXPR:
2677 return UNORDERED_EXPR;
2678 case UNORDERED_EXPR:
2679 return ORDERED_EXPR;
2680 default:
2681 gcc_unreachable ();
2685 /* Similar, but return the comparison that results if the operands are
2686 swapped. This is safe for floating-point. */
2688 enum tree_code
2689 swap_tree_comparison (enum tree_code code)
2691 switch (code)
2693 case EQ_EXPR:
2694 case NE_EXPR:
2695 case ORDERED_EXPR:
2696 case UNORDERED_EXPR:
2697 case LTGT_EXPR:
2698 case UNEQ_EXPR:
2699 return code;
2700 case GT_EXPR:
2701 return LT_EXPR;
2702 case GE_EXPR:
2703 return LE_EXPR;
2704 case LT_EXPR:
2705 return GT_EXPR;
2706 case LE_EXPR:
2707 return GE_EXPR;
2708 case UNGT_EXPR:
2709 return UNLT_EXPR;
2710 case UNGE_EXPR:
2711 return UNLE_EXPR;
2712 case UNLT_EXPR:
2713 return UNGT_EXPR;
2714 case UNLE_EXPR:
2715 return UNGE_EXPR;
2716 default:
2717 gcc_unreachable ();
2722 /* Convert a comparison tree code from an enum tree_code representation
2723 into a compcode bit-based encoding. This function is the inverse of
2724 compcode_to_comparison. */
2726 static enum comparison_code
2727 comparison_to_compcode (enum tree_code code)
2729 switch (code)
2731 case LT_EXPR:
2732 return COMPCODE_LT;
2733 case EQ_EXPR:
2734 return COMPCODE_EQ;
2735 case LE_EXPR:
2736 return COMPCODE_LE;
2737 case GT_EXPR:
2738 return COMPCODE_GT;
2739 case NE_EXPR:
2740 return COMPCODE_NE;
2741 case GE_EXPR:
2742 return COMPCODE_GE;
2743 case ORDERED_EXPR:
2744 return COMPCODE_ORD;
2745 case UNORDERED_EXPR:
2746 return COMPCODE_UNORD;
2747 case UNLT_EXPR:
2748 return COMPCODE_UNLT;
2749 case UNEQ_EXPR:
2750 return COMPCODE_UNEQ;
2751 case UNLE_EXPR:
2752 return COMPCODE_UNLE;
2753 case UNGT_EXPR:
2754 return COMPCODE_UNGT;
2755 case LTGT_EXPR:
2756 return COMPCODE_LTGT;
2757 case UNGE_EXPR:
2758 return COMPCODE_UNGE;
2759 default:
2760 gcc_unreachable ();
2764 /* Convert a compcode bit-based encoding of a comparison operator back
2765 to GCC's enum tree_code representation. This function is the
2766 inverse of comparison_to_compcode. */
2768 static enum tree_code
2769 compcode_to_comparison (enum comparison_code code)
2771 switch (code)
2773 case COMPCODE_LT:
2774 return LT_EXPR;
2775 case COMPCODE_EQ:
2776 return EQ_EXPR;
2777 case COMPCODE_LE:
2778 return LE_EXPR;
2779 case COMPCODE_GT:
2780 return GT_EXPR;
2781 case COMPCODE_NE:
2782 return NE_EXPR;
2783 case COMPCODE_GE:
2784 return GE_EXPR;
2785 case COMPCODE_ORD:
2786 return ORDERED_EXPR;
2787 case COMPCODE_UNORD:
2788 return UNORDERED_EXPR;
2789 case COMPCODE_UNLT:
2790 return UNLT_EXPR;
2791 case COMPCODE_UNEQ:
2792 return UNEQ_EXPR;
2793 case COMPCODE_UNLE:
2794 return UNLE_EXPR;
2795 case COMPCODE_UNGT:
2796 return UNGT_EXPR;
2797 case COMPCODE_LTGT:
2798 return LTGT_EXPR;
2799 case COMPCODE_UNGE:
2800 return UNGE_EXPR;
2801 default:
2802 gcc_unreachable ();
2806 /* Return true if COND1 tests the opposite condition of COND2. */
2808 bool
2809 inverse_conditions_p (const_tree cond1, const_tree cond2)
2811 return (COMPARISON_CLASS_P (cond1)
2812 && COMPARISON_CLASS_P (cond2)
2813 && (invert_tree_comparison
2814 (TREE_CODE (cond1),
2815 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2816 && operand_equal_p (TREE_OPERAND (cond1, 0),
2817 TREE_OPERAND (cond2, 0), 0)
2818 && operand_equal_p (TREE_OPERAND (cond1, 1),
2819 TREE_OPERAND (cond2, 1), 0));
2822 /* Return a tree for the comparison which is the combination of
2823 doing the AND or OR (depending on CODE) of the two operations LCODE
2824 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2825 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2826 if this makes the transformation invalid. */
2828 tree
2829 combine_comparisons (location_t loc,
2830 enum tree_code code, enum tree_code lcode,
2831 enum tree_code rcode, tree truth_type,
2832 tree ll_arg, tree lr_arg)
2834 bool honor_nans = HONOR_NANS (ll_arg);
2835 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2836 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2837 int compcode;
2839 switch (code)
2841 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2842 compcode = lcompcode & rcompcode;
2843 break;
2845 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2846 compcode = lcompcode | rcompcode;
2847 break;
2849 default:
2850 return NULL_TREE;
2853 if (!honor_nans)
2855 /* Eliminate unordered comparisons, as well as LTGT and ORD
2856 which are not used unless the mode has NaNs. */
2857 compcode &= ~COMPCODE_UNORD;
2858 if (compcode == COMPCODE_LTGT)
2859 compcode = COMPCODE_NE;
2860 else if (compcode == COMPCODE_ORD)
2861 compcode = COMPCODE_TRUE;
2863 else if (flag_trapping_math)
2865 /* Check that the original operation and the optimized ones will trap
2866 under the same condition. */
2867 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2868 && (lcompcode != COMPCODE_EQ)
2869 && (lcompcode != COMPCODE_ORD);
2870 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2871 && (rcompcode != COMPCODE_EQ)
2872 && (rcompcode != COMPCODE_ORD);
2873 bool trap = (compcode & COMPCODE_UNORD) == 0
2874 && (compcode != COMPCODE_EQ)
2875 && (compcode != COMPCODE_ORD);
2877 /* In a short-circuited boolean expression the LHS might be
2878 such that the RHS, if evaluated, will never trap. For
2879 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2880 if neither x nor y is NaN. (This is a mixed blessing: for
2881 example, the expression above will never trap, hence
2882 optimizing it to x < y would be invalid). */
2883 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2884 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2885 rtrap = false;
2887 /* If the comparison was short-circuited, and only the RHS
2888 trapped, we may now generate a spurious trap. */
2889 if (rtrap && !ltrap
2890 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2891 return NULL_TREE;
2893 /* If we changed the conditions that cause a trap, we lose. */
2894 if ((ltrap || rtrap) != trap)
2895 return NULL_TREE;
2898 if (compcode == COMPCODE_TRUE)
2899 return constant_boolean_node (true, truth_type);
2900 else if (compcode == COMPCODE_FALSE)
2901 return constant_boolean_node (false, truth_type);
2902 else
2904 enum tree_code tcode;
2906 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2907 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2911 /* Return nonzero if two operands (typically of the same tree node)
2912 are necessarily equal. FLAGS modifies behavior as follows:
2914 If OEP_ONLY_CONST is set, only return nonzero for constants.
2915 This function tests whether the operands are indistinguishable;
2916 it does not test whether they are equal using C's == operation.
2917 The distinction is important for IEEE floating point, because
2918 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2919 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2921 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2922 even though it may hold multiple values during a function.
2923 This is because a GCC tree node guarantees that nothing else is
2924 executed between the evaluation of its "operands" (which may often
2925 be evaluated in arbitrary order). Hence if the operands themselves
2926 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2927 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2928 unset means assuming isochronic (or instantaneous) tree equivalence.
2929 Unless comparing arbitrary expression trees, such as from different
2930 statements, this flag can usually be left unset.
2932 If OEP_PURE_SAME is set, then pure functions with identical arguments
2933 are considered the same. It is used when the caller has other ways
2934 to ensure that global memory is unchanged in between.
2936 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2937 not values of expressions.
2939 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2940 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2942 If OEP_BITWISE is set, then require the values to be bitwise identical
2943 rather than simply numerically equal. Do not take advantage of things
2944 like math-related flags or undefined behavior; only return true for
2945 values that are provably bitwise identical in all circumstances.
2947 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2948 any operand with side effect. This is unnecesarily conservative in the
2949 case we know that arg0 and arg1 are in disjoint code paths (such as in
2950 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2951 addresses with TREE_CONSTANT flag set so we know that &var == &var
2952 even if var is volatile. */
2954 bool
2955 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2956 unsigned int flags)
2958 bool r;
2959 if (verify_hash_value (arg0, arg1, flags, &r))
2960 return r;
2962 STRIP_ANY_LOCATION_WRAPPER (arg0);
2963 STRIP_ANY_LOCATION_WRAPPER (arg1);
2965 /* If either is ERROR_MARK, they aren't equal. */
2966 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2967 || TREE_TYPE (arg0) == error_mark_node
2968 || TREE_TYPE (arg1) == error_mark_node)
2969 return false;
2971 /* Similar, if either does not have a type (like a template id),
2972 they aren't equal. */
2973 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2974 return false;
2976 /* Bitwise identity makes no sense if the values have different layouts. */
2977 if ((flags & OEP_BITWISE)
2978 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2979 return false;
2981 /* We cannot consider pointers to different address space equal. */
2982 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2983 && POINTER_TYPE_P (TREE_TYPE (arg1))
2984 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2985 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2986 return false;
2988 /* Check equality of integer constants before bailing out due to
2989 precision differences. */
2990 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2992 /* Address of INTEGER_CST is not defined; check that we did not forget
2993 to drop the OEP_ADDRESS_OF flags. */
2994 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2995 return tree_int_cst_equal (arg0, arg1);
2998 if (!(flags & OEP_ADDRESS_OF))
3000 /* If both types don't have the same signedness, then we can't consider
3001 them equal. We must check this before the STRIP_NOPS calls
3002 because they may change the signedness of the arguments. As pointers
3003 strictly don't have a signedness, require either two pointers or
3004 two non-pointers as well. */
3005 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3006 || POINTER_TYPE_P (TREE_TYPE (arg0))
3007 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3008 return false;
3010 /* If both types don't have the same precision, then it is not safe
3011 to strip NOPs. */
3012 if (element_precision (TREE_TYPE (arg0))
3013 != element_precision (TREE_TYPE (arg1)))
3014 return false;
3016 STRIP_NOPS (arg0);
3017 STRIP_NOPS (arg1);
3019 #if 0
3020 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3021 sanity check once the issue is solved. */
3022 else
3023 /* Addresses of conversions and SSA_NAMEs (and many other things)
3024 are not defined. Check that we did not forget to drop the
3025 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3026 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3027 && TREE_CODE (arg0) != SSA_NAME);
3028 #endif
3030 /* In case both args are comparisons but with different comparison
3031 code, try to swap the comparison operands of one arg to produce
3032 a match and compare that variant. */
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034 && COMPARISON_CLASS_P (arg0)
3035 && COMPARISON_CLASS_P (arg1))
3037 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3039 if (TREE_CODE (arg0) == swap_code)
3040 return operand_equal_p (TREE_OPERAND (arg0, 0),
3041 TREE_OPERAND (arg1, 1), flags)
3042 && operand_equal_p (TREE_OPERAND (arg0, 1),
3043 TREE_OPERAND (arg1, 0), flags);
3046 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3048 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3049 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3051 else if (flags & OEP_ADDRESS_OF)
3053 /* If we are interested in comparing addresses ignore
3054 MEM_REF wrappings of the base that can appear just for
3055 TBAA reasons. */
3056 if (TREE_CODE (arg0) == MEM_REF
3057 && DECL_P (arg1)
3058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3060 && integer_zerop (TREE_OPERAND (arg0, 1)))
3061 return true;
3062 else if (TREE_CODE (arg1) == MEM_REF
3063 && DECL_P (arg0)
3064 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3066 && integer_zerop (TREE_OPERAND (arg1, 1)))
3067 return true;
3068 return false;
3070 else
3071 return false;
3074 /* When not checking adddresses, this is needed for conversions and for
3075 COMPONENT_REF. Might as well play it safe and always test this. */
3076 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3077 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3078 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3079 && !(flags & OEP_ADDRESS_OF)))
3080 return false;
3082 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3083 We don't care about side effects in that case because the SAVE_EXPR
3084 takes care of that for us. In all other cases, two expressions are
3085 equal if they have no side effects. If we have two identical
3086 expressions with side effects that should be treated the same due
3087 to the only side effects being identical SAVE_EXPR's, that will
3088 be detected in the recursive calls below.
3089 If we are taking an invariant address of two identical objects
3090 they are necessarily equal as well. */
3091 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3092 && (TREE_CODE (arg0) == SAVE_EXPR
3093 || (flags & OEP_MATCH_SIDE_EFFECTS)
3094 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3095 return true;
3097 /* Next handle constant cases, those for which we can return 1 even
3098 if ONLY_CONST is set. */
3099 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3100 switch (TREE_CODE (arg0))
3102 case INTEGER_CST:
3103 return tree_int_cst_equal (arg0, arg1);
3105 case FIXED_CST:
3106 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3107 TREE_FIXED_CST (arg1));
3109 case REAL_CST:
3110 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3111 return true;
3113 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3115 /* If we do not distinguish between signed and unsigned zero,
3116 consider them equal. */
3117 if (real_zerop (arg0) && real_zerop (arg1))
3118 return true;
3120 return false;
3122 case VECTOR_CST:
3124 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3125 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3126 return false;
3128 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3129 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3130 return false;
3132 unsigned int count = vector_cst_encoded_nelts (arg0);
3133 for (unsigned int i = 0; i < count; ++i)
3134 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3135 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3136 return false;
3137 return true;
3140 case COMPLEX_CST:
3141 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3142 flags)
3143 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3144 flags));
3146 case STRING_CST:
3147 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3148 && ! memcmp (TREE_STRING_POINTER (arg0),
3149 TREE_STRING_POINTER (arg1),
3150 TREE_STRING_LENGTH (arg0)));
3152 case ADDR_EXPR:
3153 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3154 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3155 flags | OEP_ADDRESS_OF
3156 | OEP_MATCH_SIDE_EFFECTS);
3157 case CONSTRUCTOR:
3158 /* In GIMPLE empty constructors are allowed in initializers of
3159 aggregates. */
3160 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3161 default:
3162 break;
3165 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3166 two instances of undefined behavior will give identical results. */
3167 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3168 return false;
3170 /* Define macros to test an operand from arg0 and arg1 for equality and a
3171 variant that allows null and views null as being different from any
3172 non-null value. In the latter case, if either is null, the both
3173 must be; otherwise, do the normal comparison. */
3174 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3175 TREE_OPERAND (arg1, N), flags)
3177 #define OP_SAME_WITH_NULL(N) \
3178 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3179 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3181 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3183 case tcc_unary:
3184 /* Two conversions are equal only if signedness and modes match. */
3185 switch (TREE_CODE (arg0))
3187 CASE_CONVERT:
3188 case FIX_TRUNC_EXPR:
3189 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3190 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3191 return false;
3192 break;
3193 default:
3194 break;
3197 return OP_SAME (0);
3200 case tcc_comparison:
3201 case tcc_binary:
3202 if (OP_SAME (0) && OP_SAME (1))
3203 return true;
3205 /* For commutative ops, allow the other order. */
3206 return (commutative_tree_code (TREE_CODE (arg0))
3207 && operand_equal_p (TREE_OPERAND (arg0, 0),
3208 TREE_OPERAND (arg1, 1), flags)
3209 && operand_equal_p (TREE_OPERAND (arg0, 1),
3210 TREE_OPERAND (arg1, 0), flags));
3212 case tcc_reference:
3213 /* If either of the pointer (or reference) expressions we are
3214 dereferencing contain a side effect, these cannot be equal,
3215 but their addresses can be. */
3216 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3217 && (TREE_SIDE_EFFECTS (arg0)
3218 || TREE_SIDE_EFFECTS (arg1)))
3219 return false;
3221 switch (TREE_CODE (arg0))
3223 case INDIRECT_REF:
3224 if (!(flags & OEP_ADDRESS_OF))
3226 if (TYPE_ALIGN (TREE_TYPE (arg0))
3227 != TYPE_ALIGN (TREE_TYPE (arg1)))
3228 return false;
3229 /* Verify that the access types are compatible. */
3230 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3231 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3232 return false;
3234 flags &= ~OEP_ADDRESS_OF;
3235 return OP_SAME (0);
3237 case IMAGPART_EXPR:
3238 /* Require the same offset. */
3239 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3240 TYPE_SIZE (TREE_TYPE (arg1)),
3241 flags & ~OEP_ADDRESS_OF))
3242 return false;
3244 /* Fallthru. */
3245 case REALPART_EXPR:
3246 case VIEW_CONVERT_EXPR:
3247 return OP_SAME (0);
3249 case TARGET_MEM_REF:
3250 case MEM_REF:
3251 if (!(flags & OEP_ADDRESS_OF))
3253 /* Require equal access sizes */
3254 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3255 && (!TYPE_SIZE (TREE_TYPE (arg0))
3256 || !TYPE_SIZE (TREE_TYPE (arg1))
3257 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3258 TYPE_SIZE (TREE_TYPE (arg1)),
3259 flags)))
3260 return false;
3261 /* Verify that access happens in similar types. */
3262 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3263 return false;
3264 /* Verify that accesses are TBAA compatible. */
3265 if (!alias_ptr_types_compatible_p
3266 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3267 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3268 || (MR_DEPENDENCE_CLIQUE (arg0)
3269 != MR_DEPENDENCE_CLIQUE (arg1))
3270 || (MR_DEPENDENCE_BASE (arg0)
3271 != MR_DEPENDENCE_BASE (arg1)))
3272 return false;
3273 /* Verify that alignment is compatible. */
3274 if (TYPE_ALIGN (TREE_TYPE (arg0))
3275 != TYPE_ALIGN (TREE_TYPE (arg1)))
3276 return false;
3278 flags &= ~OEP_ADDRESS_OF;
3279 return (OP_SAME (0) && OP_SAME (1)
3280 /* TARGET_MEM_REF require equal extra operands. */
3281 && (TREE_CODE (arg0) != TARGET_MEM_REF
3282 || (OP_SAME_WITH_NULL (2)
3283 && OP_SAME_WITH_NULL (3)
3284 && OP_SAME_WITH_NULL (4))));
3286 case ARRAY_REF:
3287 case ARRAY_RANGE_REF:
3288 if (!OP_SAME (0))
3289 return false;
3290 flags &= ~OEP_ADDRESS_OF;
3291 /* Compare the array index by value if it is constant first as we
3292 may have different types but same value here. */
3293 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3294 TREE_OPERAND (arg1, 1))
3295 || OP_SAME (1))
3296 && OP_SAME_WITH_NULL (2)
3297 && OP_SAME_WITH_NULL (3)
3298 /* Compare low bound and element size as with OEP_ADDRESS_OF
3299 we have to account for the offset of the ref. */
3300 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3301 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3302 || (operand_equal_p (array_ref_low_bound
3303 (CONST_CAST_TREE (arg0)),
3304 array_ref_low_bound
3305 (CONST_CAST_TREE (arg1)), flags)
3306 && operand_equal_p (array_ref_element_size
3307 (CONST_CAST_TREE (arg0)),
3308 array_ref_element_size
3309 (CONST_CAST_TREE (arg1)),
3310 flags))));
3312 case COMPONENT_REF:
3313 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3314 may be NULL when we're called to compare MEM_EXPRs. */
3315 if (!OP_SAME_WITH_NULL (0)
3316 || !OP_SAME (1))
3317 return false;
3318 flags &= ~OEP_ADDRESS_OF;
3319 return OP_SAME_WITH_NULL (2);
3321 case BIT_FIELD_REF:
3322 if (!OP_SAME (0))
3323 return false;
3324 flags &= ~OEP_ADDRESS_OF;
3325 return OP_SAME (1) && OP_SAME (2);
3327 /* Virtual table call. */
3328 case OBJ_TYPE_REF:
3330 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3331 OBJ_TYPE_REF_EXPR (arg1), flags))
3332 return false;
3333 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3334 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3335 return false;
3336 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3337 OBJ_TYPE_REF_OBJECT (arg1), flags))
3338 return false;
3339 if (!types_same_for_odr (obj_type_ref_class (arg0),
3340 obj_type_ref_class (arg1)))
3341 return false;
3342 return true;
3345 default:
3346 return false;
3349 case tcc_expression:
3350 switch (TREE_CODE (arg0))
3352 case ADDR_EXPR:
3353 /* Be sure we pass right ADDRESS_OF flag. */
3354 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3355 return operand_equal_p (TREE_OPERAND (arg0, 0),
3356 TREE_OPERAND (arg1, 0),
3357 flags | OEP_ADDRESS_OF);
3359 case TRUTH_NOT_EXPR:
3360 return OP_SAME (0);
3362 case TRUTH_ANDIF_EXPR:
3363 case TRUTH_ORIF_EXPR:
3364 return OP_SAME (0) && OP_SAME (1);
3366 case WIDEN_MULT_PLUS_EXPR:
3367 case WIDEN_MULT_MINUS_EXPR:
3368 if (!OP_SAME (2))
3369 return false;
3370 /* The multiplcation operands are commutative. */
3371 /* FALLTHRU */
3373 case TRUTH_AND_EXPR:
3374 case TRUTH_OR_EXPR:
3375 case TRUTH_XOR_EXPR:
3376 if (OP_SAME (0) && OP_SAME (1))
3377 return true;
3379 /* Otherwise take into account this is a commutative operation. */
3380 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 1), flags)
3382 && operand_equal_p (TREE_OPERAND (arg0, 1),
3383 TREE_OPERAND (arg1, 0), flags));
3385 case COND_EXPR:
3386 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3387 return false;
3388 flags &= ~OEP_ADDRESS_OF;
3389 return OP_SAME (0);
3391 case BIT_INSERT_EXPR:
3392 /* BIT_INSERT_EXPR has an implict operand as the type precision
3393 of op1. Need to check to make sure they are the same. */
3394 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3395 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3396 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3397 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3398 return false;
3399 /* FALLTHRU */
3401 case VEC_COND_EXPR:
3402 case DOT_PROD_EXPR:
3403 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3405 case MODIFY_EXPR:
3406 case INIT_EXPR:
3407 case COMPOUND_EXPR:
3408 case PREDECREMENT_EXPR:
3409 case PREINCREMENT_EXPR:
3410 case POSTDECREMENT_EXPR:
3411 case POSTINCREMENT_EXPR:
3412 if (flags & OEP_LEXICOGRAPHIC)
3413 return OP_SAME (0) && OP_SAME (1);
3414 return false;
3416 case CLEANUP_POINT_EXPR:
3417 case EXPR_STMT:
3418 case SAVE_EXPR:
3419 if (flags & OEP_LEXICOGRAPHIC)
3420 return OP_SAME (0);
3421 return false;
3423 default:
3424 return false;
3427 case tcc_vl_exp:
3428 switch (TREE_CODE (arg0))
3430 case CALL_EXPR:
3431 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3432 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3433 /* If not both CALL_EXPRs are either internal or normal function
3434 functions, then they are not equal. */
3435 return false;
3436 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3438 /* If the CALL_EXPRs call different internal functions, then they
3439 are not equal. */
3440 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3441 return false;
3443 else
3445 /* If the CALL_EXPRs call different functions, then they are not
3446 equal. */
3447 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3448 flags))
3449 return false;
3452 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3454 unsigned int cef = call_expr_flags (arg0);
3455 if (flags & OEP_PURE_SAME)
3456 cef &= ECF_CONST | ECF_PURE;
3457 else
3458 cef &= ECF_CONST;
3459 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3460 return false;
3463 /* Now see if all the arguments are the same. */
3465 const_call_expr_arg_iterator iter0, iter1;
3466 const_tree a0, a1;
3467 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3468 a1 = first_const_call_expr_arg (arg1, &iter1);
3469 a0 && a1;
3470 a0 = next_const_call_expr_arg (&iter0),
3471 a1 = next_const_call_expr_arg (&iter1))
3472 if (! operand_equal_p (a0, a1, flags))
3473 return false;
3475 /* If we get here and both argument lists are exhausted
3476 then the CALL_EXPRs are equal. */
3477 return ! (a0 || a1);
3479 default:
3480 return false;
3483 case tcc_declaration:
3484 /* Consider __builtin_sqrt equal to sqrt. */
3485 return (TREE_CODE (arg0) == FUNCTION_DECL
3486 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3487 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3488 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3489 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3491 case tcc_exceptional:
3492 if (TREE_CODE (arg0) == CONSTRUCTOR)
3494 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3495 return false;
3497 /* In GIMPLE constructors are used only to build vectors from
3498 elements. Individual elements in the constructor must be
3499 indexed in increasing order and form an initial sequence.
3501 We make no effort to compare constructors in generic.
3502 (see sem_variable::equals in ipa-icf which can do so for
3503 constants). */
3504 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3505 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3506 return false;
3508 /* Be sure that vectors constructed have the same representation.
3509 We only tested element precision and modes to match.
3510 Vectors may be BLKmode and thus also check that the number of
3511 parts match. */
3512 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3513 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3514 return false;
3516 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3517 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3518 unsigned int len = vec_safe_length (v0);
3520 if (len != vec_safe_length (v1))
3521 return false;
3523 for (unsigned int i = 0; i < len; i++)
3525 constructor_elt *c0 = &(*v0)[i];
3526 constructor_elt *c1 = &(*v1)[i];
3528 if (!operand_equal_p (c0->value, c1->value, flags)
3529 /* In GIMPLE the indexes can be either NULL or matching i.
3530 Double check this so we won't get false
3531 positives for GENERIC. */
3532 || (c0->index
3533 && (TREE_CODE (c0->index) != INTEGER_CST
3534 || compare_tree_int (c0->index, i)))
3535 || (c1->index
3536 && (TREE_CODE (c1->index) != INTEGER_CST
3537 || compare_tree_int (c1->index, i))))
3538 return false;
3540 return true;
3542 else if (TREE_CODE (arg0) == STATEMENT_LIST
3543 && (flags & OEP_LEXICOGRAPHIC))
3545 /* Compare the STATEMENT_LISTs. */
3546 tree_stmt_iterator tsi1, tsi2;
3547 tree body1 = CONST_CAST_TREE (arg0);
3548 tree body2 = CONST_CAST_TREE (arg1);
3549 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3550 tsi_next (&tsi1), tsi_next (&tsi2))
3552 /* The lists don't have the same number of statements. */
3553 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3554 return false;
3555 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3556 return true;
3557 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3558 flags & (OEP_LEXICOGRAPHIC
3559 | OEP_NO_HASH_CHECK)))
3560 return false;
3563 return false;
3565 case tcc_statement:
3566 switch (TREE_CODE (arg0))
3568 case RETURN_EXPR:
3569 if (flags & OEP_LEXICOGRAPHIC)
3570 return OP_SAME_WITH_NULL (0);
3571 return false;
3572 case DEBUG_BEGIN_STMT:
3573 if (flags & OEP_LEXICOGRAPHIC)
3574 return true;
3575 return false;
3576 default:
3577 return false;
3580 default:
3581 return false;
3584 #undef OP_SAME
3585 #undef OP_SAME_WITH_NULL
3588 /* Generate a hash value for an expression. This can be used iteratively
3589 by passing a previous result as the HSTATE argument. */
3591 void
3592 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3593 unsigned int flags)
3595 int i;
3596 enum tree_code code;
3597 enum tree_code_class tclass;
3599 if (t == NULL_TREE || t == error_mark_node)
3601 hstate.merge_hash (0);
3602 return;
3605 STRIP_ANY_LOCATION_WRAPPER (t);
3607 if (!(flags & OEP_ADDRESS_OF))
3608 STRIP_NOPS (t);
3610 code = TREE_CODE (t);
3612 switch (code)
3614 /* Alas, constants aren't shared, so we can't rely on pointer
3615 identity. */
3616 case VOID_CST:
3617 hstate.merge_hash (0);
3618 return;
3619 case INTEGER_CST:
3620 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3621 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3622 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3623 return;
3624 case REAL_CST:
3626 unsigned int val2;
3627 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3628 val2 = rvc_zero;
3629 else
3630 val2 = real_hash (TREE_REAL_CST_PTR (t));
3631 hstate.merge_hash (val2);
3632 return;
3634 case FIXED_CST:
3636 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3637 hstate.merge_hash (val2);
3638 return;
3640 case STRING_CST:
3641 hstate.add ((const void *) TREE_STRING_POINTER (t),
3642 TREE_STRING_LENGTH (t));
3643 return;
3644 case COMPLEX_CST:
3645 hash_operand (TREE_REALPART (t), hstate, flags);
3646 hash_operand (TREE_IMAGPART (t), hstate, flags);
3647 return;
3648 case VECTOR_CST:
3650 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3651 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3652 unsigned int count = vector_cst_encoded_nelts (t);
3653 for (unsigned int i = 0; i < count; ++i)
3654 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3655 return;
3657 case SSA_NAME:
3658 /* We can just compare by pointer. */
3659 hstate.add_hwi (SSA_NAME_VERSION (t));
3660 return;
3661 case PLACEHOLDER_EXPR:
3662 /* The node itself doesn't matter. */
3663 return;
3664 case BLOCK:
3665 case OMP_CLAUSE:
3666 /* Ignore. */
3667 return;
3668 case TREE_LIST:
3669 /* A list of expressions, for a CALL_EXPR or as the elements of a
3670 VECTOR_CST. */
3671 for (; t; t = TREE_CHAIN (t))
3672 hash_operand (TREE_VALUE (t), hstate, flags);
3673 return;
3674 case CONSTRUCTOR:
3676 unsigned HOST_WIDE_INT idx;
3677 tree field, value;
3678 flags &= ~OEP_ADDRESS_OF;
3679 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3680 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3682 /* In GIMPLE the indexes can be either NULL or matching i. */
3683 if (field == NULL_TREE)
3684 field = bitsize_int (idx);
3685 hash_operand (field, hstate, flags);
3686 hash_operand (value, hstate, flags);
3688 return;
3690 case STATEMENT_LIST:
3692 tree_stmt_iterator i;
3693 for (i = tsi_start (CONST_CAST_TREE (t));
3694 !tsi_end_p (i); tsi_next (&i))
3695 hash_operand (tsi_stmt (i), hstate, flags);
3696 return;
3698 case TREE_VEC:
3699 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3700 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3701 return;
3702 case IDENTIFIER_NODE:
3703 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3704 return;
3705 case FUNCTION_DECL:
3706 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3707 Otherwise nodes that compare equal according to operand_equal_p might
3708 get different hash codes. However, don't do this for machine specific
3709 or front end builtins, since the function code is overloaded in those
3710 cases. */
3711 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3712 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3714 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3715 code = TREE_CODE (t);
3717 /* FALL THROUGH */
3718 default:
3719 if (POLY_INT_CST_P (t))
3721 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3722 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3723 return;
3725 tclass = TREE_CODE_CLASS (code);
3727 if (tclass == tcc_declaration)
3729 /* DECL's have a unique ID */
3730 hstate.add_hwi (DECL_UID (t));
3732 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3734 /* For comparisons that can be swapped, use the lower
3735 tree code. */
3736 enum tree_code ccode = swap_tree_comparison (code);
3737 if (code < ccode)
3738 ccode = code;
3739 hstate.add_object (ccode);
3740 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3741 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3743 else if (CONVERT_EXPR_CODE_P (code))
3745 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3746 operand_equal_p. */
3747 enum tree_code ccode = NOP_EXPR;
3748 hstate.add_object (ccode);
3750 /* Don't hash the type, that can lead to having nodes which
3751 compare equal according to operand_equal_p, but which
3752 have different hash codes. Make sure to include signedness
3753 in the hash computation. */
3754 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3755 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3757 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3758 else if (code == MEM_REF
3759 && (flags & OEP_ADDRESS_OF) != 0
3760 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3761 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3762 && integer_zerop (TREE_OPERAND (t, 1)))
3763 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3764 hstate, flags);
3765 /* Don't ICE on FE specific trees, or their arguments etc.
3766 during operand_equal_p hash verification. */
3767 else if (!IS_EXPR_CODE_CLASS (tclass))
3768 gcc_assert (flags & OEP_HASH_CHECK);
3769 else
3771 unsigned int sflags = flags;
3773 hstate.add_object (code);
3775 switch (code)
3777 case ADDR_EXPR:
3778 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3779 flags |= OEP_ADDRESS_OF;
3780 sflags = flags;
3781 break;
3783 case INDIRECT_REF:
3784 case MEM_REF:
3785 case TARGET_MEM_REF:
3786 flags &= ~OEP_ADDRESS_OF;
3787 sflags = flags;
3788 break;
3790 case ARRAY_REF:
3791 case ARRAY_RANGE_REF:
3792 case COMPONENT_REF:
3793 case BIT_FIELD_REF:
3794 sflags &= ~OEP_ADDRESS_OF;
3795 break;
3797 case COND_EXPR:
3798 flags &= ~OEP_ADDRESS_OF;
3799 break;
3801 case WIDEN_MULT_PLUS_EXPR:
3802 case WIDEN_MULT_MINUS_EXPR:
3804 /* The multiplication operands are commutative. */
3805 inchash::hash one, two;
3806 hash_operand (TREE_OPERAND (t, 0), one, flags);
3807 hash_operand (TREE_OPERAND (t, 1), two, flags);
3808 hstate.add_commutative (one, two);
3809 hash_operand (TREE_OPERAND (t, 2), two, flags);
3810 return;
3813 case CALL_EXPR:
3814 if (CALL_EXPR_FN (t) == NULL_TREE)
3815 hstate.add_int (CALL_EXPR_IFN (t));
3816 break;
3818 case TARGET_EXPR:
3819 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3820 Usually different TARGET_EXPRs just should use
3821 different temporaries in their slots. */
3822 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3823 return;
3825 /* Virtual table call. */
3826 case OBJ_TYPE_REF:
3827 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3828 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3829 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3830 return;
3831 default:
3832 break;
3835 /* Don't hash the type, that can lead to having nodes which
3836 compare equal according to operand_equal_p, but which
3837 have different hash codes. */
3838 if (code == NON_LVALUE_EXPR)
3840 /* Make sure to include signness in the hash computation. */
3841 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3842 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3845 else if (commutative_tree_code (code))
3847 /* It's a commutative expression. We want to hash it the same
3848 however it appears. We do this by first hashing both operands
3849 and then rehashing based on the order of their independent
3850 hashes. */
3851 inchash::hash one, two;
3852 hash_operand (TREE_OPERAND (t, 0), one, flags);
3853 hash_operand (TREE_OPERAND (t, 1), two, flags);
3854 hstate.add_commutative (one, two);
3856 else
3857 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3858 hash_operand (TREE_OPERAND (t, i), hstate,
3859 i == 0 ? flags : sflags);
3861 return;
3865 bool
3866 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3867 unsigned int flags, bool *ret)
3869 /* When checking, verify at the outermost operand_equal_p call that
3870 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3871 hash value. */
3872 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3874 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3876 if (arg0 != arg1)
3878 inchash::hash hstate0 (0), hstate1 (0);
3879 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3880 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3881 hashval_t h0 = hstate0.end ();
3882 hashval_t h1 = hstate1.end ();
3883 gcc_assert (h0 == h1);
3885 *ret = true;
3887 else
3888 *ret = false;
3890 return true;
3893 return false;
3897 static operand_compare default_compare_instance;
3899 /* Conveinece wrapper around operand_compare class because usually we do
3900 not need to play with the valueizer. */
3902 bool
3903 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3905 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3908 namespace inchash
3911 /* Generate a hash value for an expression. This can be used iteratively
3912 by passing a previous result as the HSTATE argument.
3914 This function is intended to produce the same hash for expressions which
3915 would compare equal using operand_equal_p. */
3916 void
3917 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3919 default_compare_instance.hash_operand (t, hstate, flags);
3924 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3925 with a different signedness or a narrower precision. */
3927 static bool
3928 operand_equal_for_comparison_p (tree arg0, tree arg1)
3930 if (operand_equal_p (arg0, arg1, 0))
3931 return true;
3933 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3934 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3935 return false;
3937 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3938 and see if the inner values are the same. This removes any
3939 signedness comparison, which doesn't matter here. */
3940 tree op0 = arg0;
3941 tree op1 = arg1;
3942 STRIP_NOPS (op0);
3943 STRIP_NOPS (op1);
3944 if (operand_equal_p (op0, op1, 0))
3945 return true;
3947 /* Discard a single widening conversion from ARG1 and see if the inner
3948 value is the same as ARG0. */
3949 if (CONVERT_EXPR_P (arg1)
3950 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3951 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3952 < TYPE_PRECISION (TREE_TYPE (arg1))
3953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3954 return true;
3956 return false;
3959 /* See if ARG is an expression that is either a comparison or is performing
3960 arithmetic on comparisons. The comparisons must only be comparing
3961 two different values, which will be stored in *CVAL1 and *CVAL2; if
3962 they are nonzero it means that some operands have already been found.
3963 No variables may be used anywhere else in the expression except in the
3964 comparisons.
3966 If this is true, return 1. Otherwise, return zero. */
3968 static bool
3969 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3971 enum tree_code code = TREE_CODE (arg);
3972 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3974 /* We can handle some of the tcc_expression cases here. */
3975 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3976 tclass = tcc_unary;
3977 else if (tclass == tcc_expression
3978 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3979 || code == COMPOUND_EXPR))
3980 tclass = tcc_binary;
3982 switch (tclass)
3984 case tcc_unary:
3985 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3987 case tcc_binary:
3988 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3989 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3991 case tcc_constant:
3992 return true;
3994 case tcc_expression:
3995 if (code == COND_EXPR)
3996 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3997 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3998 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3999 return false;
4001 case tcc_comparison:
4002 /* First see if we can handle the first operand, then the second. For
4003 the second operand, we know *CVAL1 can't be zero. It must be that
4004 one side of the comparison is each of the values; test for the
4005 case where this isn't true by failing if the two operands
4006 are the same. */
4008 if (operand_equal_p (TREE_OPERAND (arg, 0),
4009 TREE_OPERAND (arg, 1), 0))
4010 return false;
4012 if (*cval1 == 0)
4013 *cval1 = TREE_OPERAND (arg, 0);
4014 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4016 else if (*cval2 == 0)
4017 *cval2 = TREE_OPERAND (arg, 0);
4018 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4020 else
4021 return false;
4023 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4025 else if (*cval2 == 0)
4026 *cval2 = TREE_OPERAND (arg, 1);
4027 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4029 else
4030 return false;
4032 return true;
4034 default:
4035 return false;
4039 /* ARG is a tree that is known to contain just arithmetic operations and
4040 comparisons. Evaluate the operations in the tree substituting NEW0 for
4041 any occurrence of OLD0 as an operand of a comparison and likewise for
4042 NEW1 and OLD1. */
4044 static tree
4045 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4046 tree old1, tree new1)
4048 tree type = TREE_TYPE (arg);
4049 enum tree_code code = TREE_CODE (arg);
4050 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4052 /* We can handle some of the tcc_expression cases here. */
4053 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4054 tclass = tcc_unary;
4055 else if (tclass == tcc_expression
4056 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4057 tclass = tcc_binary;
4059 switch (tclass)
4061 case tcc_unary:
4062 return fold_build1_loc (loc, code, type,
4063 eval_subst (loc, TREE_OPERAND (arg, 0),
4064 old0, new0, old1, new1));
4066 case tcc_binary:
4067 return fold_build2_loc (loc, code, type,
4068 eval_subst (loc, TREE_OPERAND (arg, 0),
4069 old0, new0, old1, new1),
4070 eval_subst (loc, TREE_OPERAND (arg, 1),
4071 old0, new0, old1, new1));
4073 case tcc_expression:
4074 switch (code)
4076 case SAVE_EXPR:
4077 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4078 old1, new1);
4080 case COMPOUND_EXPR:
4081 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4082 old1, new1);
4084 case COND_EXPR:
4085 return fold_build3_loc (loc, code, type,
4086 eval_subst (loc, TREE_OPERAND (arg, 0),
4087 old0, new0, old1, new1),
4088 eval_subst (loc, TREE_OPERAND (arg, 1),
4089 old0, new0, old1, new1),
4090 eval_subst (loc, TREE_OPERAND (arg, 2),
4091 old0, new0, old1, new1));
4092 default:
4093 break;
4095 /* Fall through - ??? */
4097 case tcc_comparison:
4099 tree arg0 = TREE_OPERAND (arg, 0);
4100 tree arg1 = TREE_OPERAND (arg, 1);
4102 /* We need to check both for exact equality and tree equality. The
4103 former will be true if the operand has a side-effect. In that
4104 case, we know the operand occurred exactly once. */
4106 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4107 arg0 = new0;
4108 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4109 arg0 = new1;
4111 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4112 arg1 = new0;
4113 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4114 arg1 = new1;
4116 return fold_build2_loc (loc, code, type, arg0, arg1);
4119 default:
4120 return arg;
4124 /* Return a tree for the case when the result of an expression is RESULT
4125 converted to TYPE and OMITTED was previously an operand of the expression
4126 but is now not needed (e.g., we folded OMITTED * 0).
4128 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4129 the conversion of RESULT to TYPE. */
4131 tree
4132 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4134 tree t = fold_convert_loc (loc, type, result);
4136 /* If the resulting operand is an empty statement, just return the omitted
4137 statement casted to void. */
4138 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4139 return build1_loc (loc, NOP_EXPR, void_type_node,
4140 fold_ignored_result (omitted));
4142 if (TREE_SIDE_EFFECTS (omitted))
4143 return build2_loc (loc, COMPOUND_EXPR, type,
4144 fold_ignored_result (omitted), t);
4146 return non_lvalue_loc (loc, t);
4149 /* Return a tree for the case when the result of an expression is RESULT
4150 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4151 of the expression but are now not needed.
4153 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4154 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4155 evaluated before OMITTED2. Otherwise, if neither has side effects,
4156 just do the conversion of RESULT to TYPE. */
4158 tree
4159 omit_two_operands_loc (location_t loc, tree type, tree result,
4160 tree omitted1, tree omitted2)
4162 tree t = fold_convert_loc (loc, type, result);
4164 if (TREE_SIDE_EFFECTS (omitted2))
4165 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4166 if (TREE_SIDE_EFFECTS (omitted1))
4167 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4169 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4173 /* Return a simplified tree node for the truth-negation of ARG. This
4174 never alters ARG itself. We assume that ARG is an operation that
4175 returns a truth value (0 or 1).
4177 FIXME: one would think we would fold the result, but it causes
4178 problems with the dominator optimizer. */
4180 static tree
4181 fold_truth_not_expr (location_t loc, tree arg)
4183 tree type = TREE_TYPE (arg);
4184 enum tree_code code = TREE_CODE (arg);
4185 location_t loc1, loc2;
4187 /* If this is a comparison, we can simply invert it, except for
4188 floating-point non-equality comparisons, in which case we just
4189 enclose a TRUTH_NOT_EXPR around what we have. */
4191 if (TREE_CODE_CLASS (code) == tcc_comparison)
4193 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4194 if (FLOAT_TYPE_P (op_type)
4195 && flag_trapping_math
4196 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4197 && code != NE_EXPR && code != EQ_EXPR)
4198 return NULL_TREE;
4200 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4201 if (code == ERROR_MARK)
4202 return NULL_TREE;
4204 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4205 TREE_OPERAND (arg, 1));
4206 if (TREE_NO_WARNING (arg))
4207 TREE_NO_WARNING (ret) = 1;
4208 return ret;
4211 switch (code)
4213 case INTEGER_CST:
4214 return constant_boolean_node (integer_zerop (arg), type);
4216 case TRUTH_AND_EXPR:
4217 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4218 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4219 return build2_loc (loc, TRUTH_OR_EXPR, type,
4220 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4221 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4223 case TRUTH_OR_EXPR:
4224 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4225 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4226 return build2_loc (loc, TRUTH_AND_EXPR, type,
4227 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4228 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4230 case TRUTH_XOR_EXPR:
4231 /* Here we can invert either operand. We invert the first operand
4232 unless the second operand is a TRUTH_NOT_EXPR in which case our
4233 result is the XOR of the first operand with the inside of the
4234 negation of the second operand. */
4236 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4237 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4238 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4239 else
4240 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4241 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4242 TREE_OPERAND (arg, 1));
4244 case TRUTH_ANDIF_EXPR:
4245 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4246 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4247 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4248 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4249 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4251 case TRUTH_ORIF_EXPR:
4252 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4253 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4254 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4255 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4256 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4258 case TRUTH_NOT_EXPR:
4259 return TREE_OPERAND (arg, 0);
4261 case COND_EXPR:
4263 tree arg1 = TREE_OPERAND (arg, 1);
4264 tree arg2 = TREE_OPERAND (arg, 2);
4266 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4267 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4269 /* A COND_EXPR may have a throw as one operand, which
4270 then has void type. Just leave void operands
4271 as they are. */
4272 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4273 VOID_TYPE_P (TREE_TYPE (arg1))
4274 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4275 VOID_TYPE_P (TREE_TYPE (arg2))
4276 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4279 case COMPOUND_EXPR:
4280 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4281 return build2_loc (loc, COMPOUND_EXPR, type,
4282 TREE_OPERAND (arg, 0),
4283 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4285 case NON_LVALUE_EXPR:
4286 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4287 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4289 CASE_CONVERT:
4290 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4291 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4293 /* fall through */
4295 case FLOAT_EXPR:
4296 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4297 return build1_loc (loc, TREE_CODE (arg), type,
4298 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4300 case BIT_AND_EXPR:
4301 if (!integer_onep (TREE_OPERAND (arg, 1)))
4302 return NULL_TREE;
4303 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4305 case SAVE_EXPR:
4306 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4308 case CLEANUP_POINT_EXPR:
4309 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4310 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4311 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4313 default:
4314 return NULL_TREE;
4318 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4319 assume that ARG is an operation that returns a truth value (0 or 1
4320 for scalars, 0 or -1 for vectors). Return the folded expression if
4321 folding is successful. Otherwise, return NULL_TREE. */
4323 static tree
4324 fold_invert_truthvalue (location_t loc, tree arg)
4326 tree type = TREE_TYPE (arg);
4327 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4328 ? BIT_NOT_EXPR
4329 : TRUTH_NOT_EXPR,
4330 type, arg);
4333 /* Return a simplified tree node for the truth-negation of ARG. This
4334 never alters ARG itself. We assume that ARG is an operation that
4335 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4337 tree
4338 invert_truthvalue_loc (location_t loc, tree arg)
4340 if (TREE_CODE (arg) == ERROR_MARK)
4341 return arg;
4343 tree type = TREE_TYPE (arg);
4344 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4345 ? BIT_NOT_EXPR
4346 : TRUTH_NOT_EXPR,
4347 type, arg);
4350 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4351 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4352 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4353 is the original memory reference used to preserve the alias set of
4354 the access. */
4356 static tree
4357 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4358 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4359 int unsignedp, int reversep)
4361 tree result, bftype;
4363 /* Attempt not to lose the access path if possible. */
4364 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4366 tree ninner = TREE_OPERAND (orig_inner, 0);
4367 machine_mode nmode;
4368 poly_int64 nbitsize, nbitpos;
4369 tree noffset;
4370 int nunsignedp, nreversep, nvolatilep = 0;
4371 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4372 &noffset, &nmode, &nunsignedp,
4373 &nreversep, &nvolatilep);
4374 if (base == inner
4375 && noffset == NULL_TREE
4376 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4377 && !reversep
4378 && !nreversep
4379 && !nvolatilep)
4381 inner = ninner;
4382 bitpos -= nbitpos;
4386 alias_set_type iset = get_alias_set (orig_inner);
4387 if (iset == 0 && get_alias_set (inner) != iset)
4388 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4389 build_fold_addr_expr (inner),
4390 build_int_cst (ptr_type_node, 0));
4392 if (known_eq (bitpos, 0) && !reversep)
4394 tree size = TYPE_SIZE (TREE_TYPE (inner));
4395 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4396 || POINTER_TYPE_P (TREE_TYPE (inner)))
4397 && tree_fits_shwi_p (size)
4398 && tree_to_shwi (size) == bitsize)
4399 return fold_convert_loc (loc, type, inner);
4402 bftype = type;
4403 if (TYPE_PRECISION (bftype) != bitsize
4404 || TYPE_UNSIGNED (bftype) == !unsignedp)
4405 bftype = build_nonstandard_integer_type (bitsize, 0);
4407 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4408 bitsize_int (bitsize), bitsize_int (bitpos));
4409 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4411 if (bftype != type)
4412 result = fold_convert_loc (loc, type, result);
4414 return result;
4417 /* Optimize a bit-field compare.
4419 There are two cases: First is a compare against a constant and the
4420 second is a comparison of two items where the fields are at the same
4421 bit position relative to the start of a chunk (byte, halfword, word)
4422 large enough to contain it. In these cases we can avoid the shift
4423 implicit in bitfield extractions.
4425 For constants, we emit a compare of the shifted constant with the
4426 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4427 compared. For two fields at the same position, we do the ANDs with the
4428 similar mask and compare the result of the ANDs.
4430 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4431 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4432 are the left and right operands of the comparison, respectively.
4434 If the optimization described above can be done, we return the resulting
4435 tree. Otherwise we return zero. */
4437 static tree
4438 optimize_bit_field_compare (location_t loc, enum tree_code code,
4439 tree compare_type, tree lhs, tree rhs)
4441 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4442 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4443 tree type = TREE_TYPE (lhs);
4444 tree unsigned_type;
4445 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4446 machine_mode lmode, rmode;
4447 scalar_int_mode nmode;
4448 int lunsignedp, runsignedp;
4449 int lreversep, rreversep;
4450 int lvolatilep = 0, rvolatilep = 0;
4451 tree linner, rinner = NULL_TREE;
4452 tree mask;
4453 tree offset;
4455 /* Get all the information about the extractions being done. If the bit size
4456 is the same as the size of the underlying object, we aren't doing an
4457 extraction at all and so can do nothing. We also don't want to
4458 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4459 then will no longer be able to replace it. */
4460 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4461 &lunsignedp, &lreversep, &lvolatilep);
4462 if (linner == lhs
4463 || !known_size_p (plbitsize)
4464 || !plbitsize.is_constant (&lbitsize)
4465 || !plbitpos.is_constant (&lbitpos)
4466 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4467 || offset != 0
4468 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4469 || lvolatilep)
4470 return 0;
4472 if (const_p)
4473 rreversep = lreversep;
4474 else
4476 /* If this is not a constant, we can only do something if bit positions,
4477 sizes, signedness and storage order are the same. */
4478 rinner
4479 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4480 &runsignedp, &rreversep, &rvolatilep);
4482 if (rinner == rhs
4483 || maybe_ne (lbitpos, rbitpos)
4484 || maybe_ne (lbitsize, rbitsize)
4485 || lunsignedp != runsignedp
4486 || lreversep != rreversep
4487 || offset != 0
4488 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4489 || rvolatilep)
4490 return 0;
4493 /* Honor the C++ memory model and mimic what RTL expansion does. */
4494 poly_uint64 bitstart = 0;
4495 poly_uint64 bitend = 0;
4496 if (TREE_CODE (lhs) == COMPONENT_REF)
4498 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4499 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4500 return 0;
4503 /* See if we can find a mode to refer to this field. We should be able to,
4504 but fail if we can't. */
4505 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4506 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4507 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4508 TYPE_ALIGN (TREE_TYPE (rinner))),
4509 BITS_PER_WORD, false, &nmode))
4510 return 0;
4512 /* Set signed and unsigned types of the precision of this mode for the
4513 shifts below. */
4514 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4516 /* Compute the bit position and size for the new reference and our offset
4517 within it. If the new reference is the same size as the original, we
4518 won't optimize anything, so return zero. */
4519 nbitsize = GET_MODE_BITSIZE (nmode);
4520 nbitpos = lbitpos & ~ (nbitsize - 1);
4521 lbitpos -= nbitpos;
4522 if (nbitsize == lbitsize)
4523 return 0;
4525 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4526 lbitpos = nbitsize - lbitsize - lbitpos;
4528 /* Make the mask to be used against the extracted field. */
4529 mask = build_int_cst_type (unsigned_type, -1);
4530 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4531 mask = const_binop (RSHIFT_EXPR, mask,
4532 size_int (nbitsize - lbitsize - lbitpos));
4534 if (! const_p)
4536 if (nbitpos < 0)
4537 return 0;
4539 /* If not comparing with constant, just rework the comparison
4540 and return. */
4541 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4542 nbitsize, nbitpos, 1, lreversep);
4543 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4544 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4545 nbitsize, nbitpos, 1, rreversep);
4546 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4547 return fold_build2_loc (loc, code, compare_type, t1, t2);
4550 /* Otherwise, we are handling the constant case. See if the constant is too
4551 big for the field. Warn and return a tree for 0 (false) if so. We do
4552 this not only for its own sake, but to avoid having to test for this
4553 error case below. If we didn't, we might generate wrong code.
4555 For unsigned fields, the constant shifted right by the field length should
4556 be all zero. For signed fields, the high-order bits should agree with
4557 the sign bit. */
4559 if (lunsignedp)
4561 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4563 warning (0, "comparison is always %d due to width of bit-field",
4564 code == NE_EXPR);
4565 return constant_boolean_node (code == NE_EXPR, compare_type);
4568 else
4570 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4571 if (tem != 0 && tem != -1)
4573 warning (0, "comparison is always %d due to width of bit-field",
4574 code == NE_EXPR);
4575 return constant_boolean_node (code == NE_EXPR, compare_type);
4579 if (nbitpos < 0)
4580 return 0;
4582 /* Single-bit compares should always be against zero. */
4583 if (lbitsize == 1 && ! integer_zerop (rhs))
4585 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4586 rhs = build_int_cst (type, 0);
4589 /* Make a new bitfield reference, shift the constant over the
4590 appropriate number of bits and mask it with the computed mask
4591 (in case this was a signed field). If we changed it, make a new one. */
4592 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4593 nbitsize, nbitpos, 1, lreversep);
4595 rhs = const_binop (BIT_AND_EXPR,
4596 const_binop (LSHIFT_EXPR,
4597 fold_convert_loc (loc, unsigned_type, rhs),
4598 size_int (lbitpos)),
4599 mask);
4601 lhs = build2_loc (loc, code, compare_type,
4602 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4603 return lhs;
4606 /* Subroutine for fold_truth_andor_1: decode a field reference.
4608 If EXP is a comparison reference, we return the innermost reference.
4610 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4611 set to the starting bit number.
4613 If the innermost field can be completely contained in a mode-sized
4614 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4616 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4617 otherwise it is not changed.
4619 *PUNSIGNEDP is set to the signedness of the field.
4621 *PREVERSEP is set to the storage order of the field.
4623 *PMASK is set to the mask used. This is either contained in a
4624 BIT_AND_EXPR or derived from the width of the field.
4626 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4628 Return 0 if this is not a component reference or is one that we can't
4629 do anything with. */
4631 static tree
4632 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4633 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4634 int *punsignedp, int *preversep, int *pvolatilep,
4635 tree *pmask, tree *pand_mask)
4637 tree exp = *exp_;
4638 tree outer_type = 0;
4639 tree and_mask = 0;
4640 tree mask, inner, offset;
4641 tree unsigned_type;
4642 unsigned int precision;
4644 /* All the optimizations using this function assume integer fields.
4645 There are problems with FP fields since the type_for_size call
4646 below can fail for, e.g., XFmode. */
4647 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4648 return NULL_TREE;
4650 /* We are interested in the bare arrangement of bits, so strip everything
4651 that doesn't affect the machine mode. However, record the type of the
4652 outermost expression if it may matter below. */
4653 if (CONVERT_EXPR_P (exp)
4654 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4655 outer_type = TREE_TYPE (exp);
4656 STRIP_NOPS (exp);
4658 if (TREE_CODE (exp) == BIT_AND_EXPR)
4660 and_mask = TREE_OPERAND (exp, 1);
4661 exp = TREE_OPERAND (exp, 0);
4662 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4663 if (TREE_CODE (and_mask) != INTEGER_CST)
4664 return NULL_TREE;
4667 poly_int64 poly_bitsize, poly_bitpos;
4668 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4669 pmode, punsignedp, preversep, pvolatilep);
4670 if ((inner == exp && and_mask == 0)
4671 || !poly_bitsize.is_constant (pbitsize)
4672 || !poly_bitpos.is_constant (pbitpos)
4673 || *pbitsize < 0
4674 || offset != 0
4675 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4676 /* Reject out-of-bound accesses (PR79731). */
4677 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4678 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4679 *pbitpos + *pbitsize) < 0))
4680 return NULL_TREE;
4682 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4683 if (unsigned_type == NULL_TREE)
4684 return NULL_TREE;
4686 *exp_ = exp;
4688 /* If the number of bits in the reference is the same as the bitsize of
4689 the outer type, then the outer type gives the signedness. Otherwise
4690 (in case of a small bitfield) the signedness is unchanged. */
4691 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4692 *punsignedp = TYPE_UNSIGNED (outer_type);
4694 /* Compute the mask to access the bitfield. */
4695 precision = TYPE_PRECISION (unsigned_type);
4697 mask = build_int_cst_type (unsigned_type, -1);
4699 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4700 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4702 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4703 if (and_mask != 0)
4704 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4705 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4707 *pmask = mask;
4708 *pand_mask = and_mask;
4709 return inner;
4712 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4713 bit positions and MASK is SIGNED. */
4715 static bool
4716 all_ones_mask_p (const_tree mask, unsigned int size)
4718 tree type = TREE_TYPE (mask);
4719 unsigned int precision = TYPE_PRECISION (type);
4721 /* If this function returns true when the type of the mask is
4722 UNSIGNED, then there will be errors. In particular see
4723 gcc.c-torture/execute/990326-1.c. There does not appear to be
4724 any documentation paper trail as to why this is so. But the pre
4725 wide-int worked with that restriction and it has been preserved
4726 here. */
4727 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4728 return false;
4730 return wi::mask (size, false, precision) == wi::to_wide (mask);
4733 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4734 represents the sign bit of EXP's type. If EXP represents a sign
4735 or zero extension, also test VAL against the unextended type.
4736 The return value is the (sub)expression whose sign bit is VAL,
4737 or NULL_TREE otherwise. */
4739 tree
4740 sign_bit_p (tree exp, const_tree val)
4742 int width;
4743 tree t;
4745 /* Tree EXP must have an integral type. */
4746 t = TREE_TYPE (exp);
4747 if (! INTEGRAL_TYPE_P (t))
4748 return NULL_TREE;
4750 /* Tree VAL must be an integer constant. */
4751 if (TREE_CODE (val) != INTEGER_CST
4752 || TREE_OVERFLOW (val))
4753 return NULL_TREE;
4755 width = TYPE_PRECISION (t);
4756 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4757 return exp;
4759 /* Handle extension from a narrower type. */
4760 if (TREE_CODE (exp) == NOP_EXPR
4761 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4762 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4764 return NULL_TREE;
4767 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4768 to be evaluated unconditionally. */
4770 static bool
4771 simple_operand_p (const_tree exp)
4773 /* Strip any conversions that don't change the machine mode. */
4774 STRIP_NOPS (exp);
4776 return (CONSTANT_CLASS_P (exp)
4777 || TREE_CODE (exp) == SSA_NAME
4778 || (DECL_P (exp)
4779 && ! TREE_ADDRESSABLE (exp)
4780 && ! TREE_THIS_VOLATILE (exp)
4781 && ! DECL_NONLOCAL (exp)
4782 /* Don't regard global variables as simple. They may be
4783 allocated in ways unknown to the compiler (shared memory,
4784 #pragma weak, etc). */
4785 && ! TREE_PUBLIC (exp)
4786 && ! DECL_EXTERNAL (exp)
4787 /* Weakrefs are not safe to be read, since they can be NULL.
4788 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4789 have DECL_WEAK flag set. */
4790 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4791 /* Loading a static variable is unduly expensive, but global
4792 registers aren't expensive. */
4793 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4796 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4797 to be evaluated unconditionally.
4798 I addition to simple_operand_p, we assume that comparisons, conversions,
4799 and logic-not operations are simple, if their operands are simple, too. */
4801 static bool
4802 simple_operand_p_2 (tree exp)
4804 enum tree_code code;
4806 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4807 return false;
4809 while (CONVERT_EXPR_P (exp))
4810 exp = TREE_OPERAND (exp, 0);
4812 code = TREE_CODE (exp);
4814 if (TREE_CODE_CLASS (code) == tcc_comparison)
4815 return (simple_operand_p (TREE_OPERAND (exp, 0))
4816 && simple_operand_p (TREE_OPERAND (exp, 1)));
4818 if (code == TRUTH_NOT_EXPR)
4819 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4821 return simple_operand_p (exp);
4825 /* The following functions are subroutines to fold_range_test and allow it to
4826 try to change a logical combination of comparisons into a range test.
4828 For example, both
4829 X == 2 || X == 3 || X == 4 || X == 5
4831 X >= 2 && X <= 5
4832 are converted to
4833 (unsigned) (X - 2) <= 3
4835 We describe each set of comparisons as being either inside or outside
4836 a range, using a variable named like IN_P, and then describe the
4837 range with a lower and upper bound. If one of the bounds is omitted,
4838 it represents either the highest or lowest value of the type.
4840 In the comments below, we represent a range by two numbers in brackets
4841 preceded by a "+" to designate being inside that range, or a "-" to
4842 designate being outside that range, so the condition can be inverted by
4843 flipping the prefix. An omitted bound is represented by a "-". For
4844 example, "- [-, 10]" means being outside the range starting at the lowest
4845 possible value and ending at 10, in other words, being greater than 10.
4846 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4847 always false.
4849 We set up things so that the missing bounds are handled in a consistent
4850 manner so neither a missing bound nor "true" and "false" need to be
4851 handled using a special case. */
4853 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4854 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4855 and UPPER1_P are nonzero if the respective argument is an upper bound
4856 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4857 must be specified for a comparison. ARG1 will be converted to ARG0's
4858 type if both are specified. */
4860 static tree
4861 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4862 tree arg1, int upper1_p)
4864 tree tem;
4865 int result;
4866 int sgn0, sgn1;
4868 /* If neither arg represents infinity, do the normal operation.
4869 Else, if not a comparison, return infinity. Else handle the special
4870 comparison rules. Note that most of the cases below won't occur, but
4871 are handled for consistency. */
4873 if (arg0 != 0 && arg1 != 0)
4875 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4876 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4877 STRIP_NOPS (tem);
4878 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4881 if (TREE_CODE_CLASS (code) != tcc_comparison)
4882 return 0;
4884 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4885 for neither. In real maths, we cannot assume open ended ranges are
4886 the same. But, this is computer arithmetic, where numbers are finite.
4887 We can therefore make the transformation of any unbounded range with
4888 the value Z, Z being greater than any representable number. This permits
4889 us to treat unbounded ranges as equal. */
4890 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4891 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4892 switch (code)
4894 case EQ_EXPR:
4895 result = sgn0 == sgn1;
4896 break;
4897 case NE_EXPR:
4898 result = sgn0 != sgn1;
4899 break;
4900 case LT_EXPR:
4901 result = sgn0 < sgn1;
4902 break;
4903 case LE_EXPR:
4904 result = sgn0 <= sgn1;
4905 break;
4906 case GT_EXPR:
4907 result = sgn0 > sgn1;
4908 break;
4909 case GE_EXPR:
4910 result = sgn0 >= sgn1;
4911 break;
4912 default:
4913 gcc_unreachable ();
4916 return constant_boolean_node (result, type);
4919 /* Helper routine for make_range. Perform one step for it, return
4920 new expression if the loop should continue or NULL_TREE if it should
4921 stop. */
4923 tree
4924 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4925 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4926 bool *strict_overflow_p)
4928 tree arg0_type = TREE_TYPE (arg0);
4929 tree n_low, n_high, low = *p_low, high = *p_high;
4930 int in_p = *p_in_p, n_in_p;
4932 switch (code)
4934 case TRUTH_NOT_EXPR:
4935 /* We can only do something if the range is testing for zero. */
4936 if (low == NULL_TREE || high == NULL_TREE
4937 || ! integer_zerop (low) || ! integer_zerop (high))
4938 return NULL_TREE;
4939 *p_in_p = ! in_p;
4940 return arg0;
4942 case EQ_EXPR: case NE_EXPR:
4943 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4944 /* We can only do something if the range is testing for zero
4945 and if the second operand is an integer constant. Note that
4946 saying something is "in" the range we make is done by
4947 complementing IN_P since it will set in the initial case of
4948 being not equal to zero; "out" is leaving it alone. */
4949 if (low == NULL_TREE || high == NULL_TREE
4950 || ! integer_zerop (low) || ! integer_zerop (high)
4951 || TREE_CODE (arg1) != INTEGER_CST)
4952 return NULL_TREE;
4954 switch (code)
4956 case NE_EXPR: /* - [c, c] */
4957 low = high = arg1;
4958 break;
4959 case EQ_EXPR: /* + [c, c] */
4960 in_p = ! in_p, low = high = arg1;
4961 break;
4962 case GT_EXPR: /* - [-, c] */
4963 low = 0, high = arg1;
4964 break;
4965 case GE_EXPR: /* + [c, -] */
4966 in_p = ! in_p, low = arg1, high = 0;
4967 break;
4968 case LT_EXPR: /* - [c, -] */
4969 low = arg1, high = 0;
4970 break;
4971 case LE_EXPR: /* + [-, c] */
4972 in_p = ! in_p, low = 0, high = arg1;
4973 break;
4974 default:
4975 gcc_unreachable ();
4978 /* If this is an unsigned comparison, we also know that EXP is
4979 greater than or equal to zero. We base the range tests we make
4980 on that fact, so we record it here so we can parse existing
4981 range tests. We test arg0_type since often the return type
4982 of, e.g. EQ_EXPR, is boolean. */
4983 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4985 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4986 in_p, low, high, 1,
4987 build_int_cst (arg0_type, 0),
4988 NULL_TREE))
4989 return NULL_TREE;
4991 in_p = n_in_p, low = n_low, high = n_high;
4993 /* If the high bound is missing, but we have a nonzero low
4994 bound, reverse the range so it goes from zero to the low bound
4995 minus 1. */
4996 if (high == 0 && low && ! integer_zerop (low))
4998 in_p = ! in_p;
4999 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5000 build_int_cst (TREE_TYPE (low), 1), 0);
5001 low = build_int_cst (arg0_type, 0);
5005 *p_low = low;
5006 *p_high = high;
5007 *p_in_p = in_p;
5008 return arg0;
5010 case NEGATE_EXPR:
5011 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5012 low and high are non-NULL, then normalize will DTRT. */
5013 if (!TYPE_UNSIGNED (arg0_type)
5014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5016 if (low == NULL_TREE)
5017 low = TYPE_MIN_VALUE (arg0_type);
5018 if (high == NULL_TREE)
5019 high = TYPE_MAX_VALUE (arg0_type);
5022 /* (-x) IN [a,b] -> x in [-b, -a] */
5023 n_low = range_binop (MINUS_EXPR, exp_type,
5024 build_int_cst (exp_type, 0),
5025 0, high, 1);
5026 n_high = range_binop (MINUS_EXPR, exp_type,
5027 build_int_cst (exp_type, 0),
5028 0, low, 0);
5029 if (n_high != 0 && TREE_OVERFLOW (n_high))
5030 return NULL_TREE;
5031 goto normalize;
5033 case BIT_NOT_EXPR:
5034 /* ~ X -> -X - 1 */
5035 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5036 build_int_cst (exp_type, 1));
5038 case PLUS_EXPR:
5039 case MINUS_EXPR:
5040 if (TREE_CODE (arg1) != INTEGER_CST)
5041 return NULL_TREE;
5043 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5044 move a constant to the other side. */
5045 if (!TYPE_UNSIGNED (arg0_type)
5046 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5047 return NULL_TREE;
5049 /* If EXP is signed, any overflow in the computation is undefined,
5050 so we don't worry about it so long as our computations on
5051 the bounds don't overflow. For unsigned, overflow is defined
5052 and this is exactly the right thing. */
5053 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5054 arg0_type, low, 0, arg1, 0);
5055 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5056 arg0_type, high, 1, arg1, 0);
5057 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5058 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5059 return NULL_TREE;
5061 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5062 *strict_overflow_p = true;
5064 normalize:
5065 /* Check for an unsigned range which has wrapped around the maximum
5066 value thus making n_high < n_low, and normalize it. */
5067 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5069 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5070 build_int_cst (TREE_TYPE (n_high), 1), 0);
5071 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5072 build_int_cst (TREE_TYPE (n_low), 1), 0);
5074 /* If the range is of the form +/- [ x+1, x ], we won't
5075 be able to normalize it. But then, it represents the
5076 whole range or the empty set, so make it
5077 +/- [ -, - ]. */
5078 if (tree_int_cst_equal (n_low, low)
5079 && tree_int_cst_equal (n_high, high))
5080 low = high = 0;
5081 else
5082 in_p = ! in_p;
5084 else
5085 low = n_low, high = n_high;
5087 *p_low = low;
5088 *p_high = high;
5089 *p_in_p = in_p;
5090 return arg0;
5092 CASE_CONVERT:
5093 case NON_LVALUE_EXPR:
5094 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5095 return NULL_TREE;
5097 if (! INTEGRAL_TYPE_P (arg0_type)
5098 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5099 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5100 return NULL_TREE;
5102 n_low = low, n_high = high;
5104 if (n_low != 0)
5105 n_low = fold_convert_loc (loc, arg0_type, n_low);
5107 if (n_high != 0)
5108 n_high = fold_convert_loc (loc, arg0_type, n_high);
5110 /* If we're converting arg0 from an unsigned type, to exp,
5111 a signed type, we will be doing the comparison as unsigned.
5112 The tests above have already verified that LOW and HIGH
5113 are both positive.
5115 So we have to ensure that we will handle large unsigned
5116 values the same way that the current signed bounds treat
5117 negative values. */
5119 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5121 tree high_positive;
5122 tree equiv_type;
5123 /* For fixed-point modes, we need to pass the saturating flag
5124 as the 2nd parameter. */
5125 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5126 equiv_type
5127 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5128 TYPE_SATURATING (arg0_type));
5129 else
5130 equiv_type
5131 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5133 /* A range without an upper bound is, naturally, unbounded.
5134 Since convert would have cropped a very large value, use
5135 the max value for the destination type. */
5136 high_positive
5137 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5138 : TYPE_MAX_VALUE (arg0_type);
5140 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5141 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5142 fold_convert_loc (loc, arg0_type,
5143 high_positive),
5144 build_int_cst (arg0_type, 1));
5146 /* If the low bound is specified, "and" the range with the
5147 range for which the original unsigned value will be
5148 positive. */
5149 if (low != 0)
5151 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5152 1, fold_convert_loc (loc, arg0_type,
5153 integer_zero_node),
5154 high_positive))
5155 return NULL_TREE;
5157 in_p = (n_in_p == in_p);
5159 else
5161 /* Otherwise, "or" the range with the range of the input
5162 that will be interpreted as negative. */
5163 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5164 1, fold_convert_loc (loc, arg0_type,
5165 integer_zero_node),
5166 high_positive))
5167 return NULL_TREE;
5169 in_p = (in_p != n_in_p);
5173 *p_low = n_low;
5174 *p_high = n_high;
5175 *p_in_p = in_p;
5176 return arg0;
5178 default:
5179 return NULL_TREE;
5183 /* Given EXP, a logical expression, set the range it is testing into
5184 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5185 actually being tested. *PLOW and *PHIGH will be made of the same
5186 type as the returned expression. If EXP is not a comparison, we
5187 will most likely not be returning a useful value and range. Set
5188 *STRICT_OVERFLOW_P to true if the return value is only valid
5189 because signed overflow is undefined; otherwise, do not change
5190 *STRICT_OVERFLOW_P. */
5192 tree
5193 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5194 bool *strict_overflow_p)
5196 enum tree_code code;
5197 tree arg0, arg1 = NULL_TREE;
5198 tree exp_type, nexp;
5199 int in_p;
5200 tree low, high;
5201 location_t loc = EXPR_LOCATION (exp);
5203 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5204 and see if we can refine the range. Some of the cases below may not
5205 happen, but it doesn't seem worth worrying about this. We "continue"
5206 the outer loop when we've changed something; otherwise we "break"
5207 the switch, which will "break" the while. */
5209 in_p = 0;
5210 low = high = build_int_cst (TREE_TYPE (exp), 0);
5212 while (1)
5214 code = TREE_CODE (exp);
5215 exp_type = TREE_TYPE (exp);
5216 arg0 = NULL_TREE;
5218 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5220 if (TREE_OPERAND_LENGTH (exp) > 0)
5221 arg0 = TREE_OPERAND (exp, 0);
5222 if (TREE_CODE_CLASS (code) == tcc_binary
5223 || TREE_CODE_CLASS (code) == tcc_comparison
5224 || (TREE_CODE_CLASS (code) == tcc_expression
5225 && TREE_OPERAND_LENGTH (exp) > 1))
5226 arg1 = TREE_OPERAND (exp, 1);
5228 if (arg0 == NULL_TREE)
5229 break;
5231 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5232 &high, &in_p, strict_overflow_p);
5233 if (nexp == NULL_TREE)
5234 break;
5235 exp = nexp;
5238 /* If EXP is a constant, we can evaluate whether this is true or false. */
5239 if (TREE_CODE (exp) == INTEGER_CST)
5241 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5242 exp, 0, low, 0))
5243 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5244 exp, 1, high, 1)));
5245 low = high = 0;
5246 exp = 0;
5249 *pin_p = in_p, *plow = low, *phigh = high;
5250 return exp;
5253 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5254 a bitwise check i.e. when
5255 LOW == 0xXX...X00...0
5256 HIGH == 0xXX...X11...1
5257 Return corresponding mask in MASK and stem in VALUE. */
5259 static bool
5260 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5261 tree *value)
5263 if (TREE_CODE (low) != INTEGER_CST
5264 || TREE_CODE (high) != INTEGER_CST)
5265 return false;
5267 unsigned prec = TYPE_PRECISION (type);
5268 wide_int lo = wi::to_wide (low, prec);
5269 wide_int hi = wi::to_wide (high, prec);
5271 wide_int end_mask = lo ^ hi;
5272 if ((end_mask & (end_mask + 1)) != 0
5273 || (lo & end_mask) != 0)
5274 return false;
5276 wide_int stem_mask = ~end_mask;
5277 wide_int stem = lo & stem_mask;
5278 if (stem != (hi & stem_mask))
5279 return false;
5281 *mask = wide_int_to_tree (type, stem_mask);
5282 *value = wide_int_to_tree (type, stem);
5284 return true;
5287 /* Helper routine for build_range_check and match.pd. Return the type to
5288 perform the check or NULL if it shouldn't be optimized. */
5290 tree
5291 range_check_type (tree etype)
5293 /* First make sure that arithmetics in this type is valid, then make sure
5294 that it wraps around. */
5295 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5296 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5298 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5300 tree utype, minv, maxv;
5302 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5303 for the type in question, as we rely on this here. */
5304 utype = unsigned_type_for (etype);
5305 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5306 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5307 build_int_cst (TREE_TYPE (maxv), 1), 1);
5308 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5310 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5311 minv, 1, maxv, 1)))
5312 etype = utype;
5313 else
5314 return NULL_TREE;
5316 else if (POINTER_TYPE_P (etype))
5317 etype = unsigned_type_for (etype);
5318 return etype;
5321 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5322 type, TYPE, return an expression to test if EXP is in (or out of, depending
5323 on IN_P) the range. Return 0 if the test couldn't be created. */
5325 tree
5326 build_range_check (location_t loc, tree type, tree exp, int in_p,
5327 tree low, tree high)
5329 tree etype = TREE_TYPE (exp), mask, value;
5331 /* Disable this optimization for function pointer expressions
5332 on targets that require function pointer canonicalization. */
5333 if (targetm.have_canonicalize_funcptr_for_compare ()
5334 && POINTER_TYPE_P (etype)
5335 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5336 return NULL_TREE;
5338 if (! in_p)
5340 value = build_range_check (loc, type, exp, 1, low, high);
5341 if (value != 0)
5342 return invert_truthvalue_loc (loc, value);
5344 return 0;
5347 if (low == 0 && high == 0)
5348 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5350 if (low == 0)
5351 return fold_build2_loc (loc, LE_EXPR, type, exp,
5352 fold_convert_loc (loc, etype, high));
5354 if (high == 0)
5355 return fold_build2_loc (loc, GE_EXPR, type, exp,
5356 fold_convert_loc (loc, etype, low));
5358 if (operand_equal_p (low, high, 0))
5359 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5360 fold_convert_loc (loc, etype, low));
5362 if (TREE_CODE (exp) == BIT_AND_EXPR
5363 && maskable_range_p (low, high, etype, &mask, &value))
5364 return fold_build2_loc (loc, EQ_EXPR, type,
5365 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5366 exp, mask),
5367 value);
5369 if (integer_zerop (low))
5371 if (! TYPE_UNSIGNED (etype))
5373 etype = unsigned_type_for (etype);
5374 high = fold_convert_loc (loc, etype, high);
5375 exp = fold_convert_loc (loc, etype, exp);
5377 return build_range_check (loc, type, exp, 1, 0, high);
5380 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5381 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5383 int prec = TYPE_PRECISION (etype);
5385 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5387 if (TYPE_UNSIGNED (etype))
5389 tree signed_etype = signed_type_for (etype);
5390 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5391 etype
5392 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5393 else
5394 etype = signed_etype;
5395 exp = fold_convert_loc (loc, etype, exp);
5397 return fold_build2_loc (loc, GT_EXPR, type, exp,
5398 build_int_cst (etype, 0));
5402 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5403 This requires wrap-around arithmetics for the type of the expression. */
5404 etype = range_check_type (etype);
5405 if (etype == NULL_TREE)
5406 return NULL_TREE;
5408 high = fold_convert_loc (loc, etype, high);
5409 low = fold_convert_loc (loc, etype, low);
5410 exp = fold_convert_loc (loc, etype, exp);
5412 value = const_binop (MINUS_EXPR, high, low);
5414 if (value != 0 && !TREE_OVERFLOW (value))
5415 return build_range_check (loc, type,
5416 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5417 1, build_int_cst (etype, 0), value);
5419 return 0;
5422 /* Return the predecessor of VAL in its type, handling the infinite case. */
5424 static tree
5425 range_predecessor (tree val)
5427 tree type = TREE_TYPE (val);
5429 if (INTEGRAL_TYPE_P (type)
5430 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5431 return 0;
5432 else
5433 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5434 build_int_cst (TREE_TYPE (val), 1), 0);
5437 /* Return the successor of VAL in its type, handling the infinite case. */
5439 static tree
5440 range_successor (tree val)
5442 tree type = TREE_TYPE (val);
5444 if (INTEGRAL_TYPE_P (type)
5445 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5446 return 0;
5447 else
5448 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5449 build_int_cst (TREE_TYPE (val), 1), 0);
5452 /* Given two ranges, see if we can merge them into one. Return 1 if we
5453 can, 0 if we can't. Set the output range into the specified parameters. */
5455 bool
5456 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5457 tree high0, int in1_p, tree low1, tree high1)
5459 int no_overlap;
5460 int subset;
5461 int temp;
5462 tree tem;
5463 int in_p;
5464 tree low, high;
5465 int lowequal = ((low0 == 0 && low1 == 0)
5466 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5467 low0, 0, low1, 0)));
5468 int highequal = ((high0 == 0 && high1 == 0)
5469 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5470 high0, 1, high1, 1)));
5472 /* Make range 0 be the range that starts first, or ends last if they
5473 start at the same value. Swap them if it isn't. */
5474 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5475 low0, 0, low1, 0))
5476 || (lowequal
5477 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5478 high1, 1, high0, 1))))
5480 temp = in0_p, in0_p = in1_p, in1_p = temp;
5481 tem = low0, low0 = low1, low1 = tem;
5482 tem = high0, high0 = high1, high1 = tem;
5485 /* If the second range is != high1 where high1 is the type maximum of
5486 the type, try first merging with < high1 range. */
5487 if (low1
5488 && high1
5489 && TREE_CODE (low1) == INTEGER_CST
5490 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5491 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5492 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5493 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5494 && operand_equal_p (low1, high1, 0))
5496 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5497 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5498 !in1_p, NULL_TREE, range_predecessor (low1)))
5499 return true;
5500 /* Similarly for the second range != low1 where low1 is the type minimum
5501 of the type, try first merging with > low1 range. */
5502 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5503 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5504 !in1_p, range_successor (low1), NULL_TREE))
5505 return true;
5508 /* Now flag two cases, whether the ranges are disjoint or whether the
5509 second range is totally subsumed in the first. Note that the tests
5510 below are simplified by the ones above. */
5511 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5512 high0, 1, low1, 0));
5513 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5514 high1, 1, high0, 1));
5516 /* We now have four cases, depending on whether we are including or
5517 excluding the two ranges. */
5518 if (in0_p && in1_p)
5520 /* If they don't overlap, the result is false. If the second range
5521 is a subset it is the result. Otherwise, the range is from the start
5522 of the second to the end of the first. */
5523 if (no_overlap)
5524 in_p = 0, low = high = 0;
5525 else if (subset)
5526 in_p = 1, low = low1, high = high1;
5527 else
5528 in_p = 1, low = low1, high = high0;
5531 else if (in0_p && ! in1_p)
5533 /* If they don't overlap, the result is the first range. If they are
5534 equal, the result is false. If the second range is a subset of the
5535 first, and the ranges begin at the same place, we go from just after
5536 the end of the second range to the end of the first. If the second
5537 range is not a subset of the first, or if it is a subset and both
5538 ranges end at the same place, the range starts at the start of the
5539 first range and ends just before the second range.
5540 Otherwise, we can't describe this as a single range. */
5541 if (no_overlap)
5542 in_p = 1, low = low0, high = high0;
5543 else if (lowequal && highequal)
5544 in_p = 0, low = high = 0;
5545 else if (subset && lowequal)
5547 low = range_successor (high1);
5548 high = high0;
5549 in_p = 1;
5550 if (low == 0)
5552 /* We are in the weird situation where high0 > high1 but
5553 high1 has no successor. Punt. */
5554 return 0;
5557 else if (! subset || highequal)
5559 low = low0;
5560 high = range_predecessor (low1);
5561 in_p = 1;
5562 if (high == 0)
5564 /* low0 < low1 but low1 has no predecessor. Punt. */
5565 return 0;
5568 else
5569 return 0;
5572 else if (! in0_p && in1_p)
5574 /* If they don't overlap, the result is the second range. If the second
5575 is a subset of the first, the result is false. Otherwise,
5576 the range starts just after the first range and ends at the
5577 end of the second. */
5578 if (no_overlap)
5579 in_p = 1, low = low1, high = high1;
5580 else if (subset || highequal)
5581 in_p = 0, low = high = 0;
5582 else
5584 low = range_successor (high0);
5585 high = high1;
5586 in_p = 1;
5587 if (low == 0)
5589 /* high1 > high0 but high0 has no successor. Punt. */
5590 return 0;
5595 else
5597 /* The case where we are excluding both ranges. Here the complex case
5598 is if they don't overlap. In that case, the only time we have a
5599 range is if they are adjacent. If the second is a subset of the
5600 first, the result is the first. Otherwise, the range to exclude
5601 starts at the beginning of the first range and ends at the end of the
5602 second. */
5603 if (no_overlap)
5605 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5606 range_successor (high0),
5607 1, low1, 0)))
5608 in_p = 0, low = low0, high = high1;
5609 else
5611 /* Canonicalize - [min, x] into - [-, x]. */
5612 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5613 switch (TREE_CODE (TREE_TYPE (low0)))
5615 case ENUMERAL_TYPE:
5616 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5617 GET_MODE_BITSIZE
5618 (TYPE_MODE (TREE_TYPE (low0)))))
5619 break;
5620 /* FALLTHROUGH */
5621 case INTEGER_TYPE:
5622 if (tree_int_cst_equal (low0,
5623 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5624 low0 = 0;
5625 break;
5626 case POINTER_TYPE:
5627 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5628 && integer_zerop (low0))
5629 low0 = 0;
5630 break;
5631 default:
5632 break;
5635 /* Canonicalize - [x, max] into - [x, -]. */
5636 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5637 switch (TREE_CODE (TREE_TYPE (high1)))
5639 case ENUMERAL_TYPE:
5640 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5641 GET_MODE_BITSIZE
5642 (TYPE_MODE (TREE_TYPE (high1)))))
5643 break;
5644 /* FALLTHROUGH */
5645 case INTEGER_TYPE:
5646 if (tree_int_cst_equal (high1,
5647 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5648 high1 = 0;
5649 break;
5650 case POINTER_TYPE:
5651 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5652 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5653 high1, 1,
5654 build_int_cst (TREE_TYPE (high1), 1),
5655 1)))
5656 high1 = 0;
5657 break;
5658 default:
5659 break;
5662 /* The ranges might be also adjacent between the maximum and
5663 minimum values of the given type. For
5664 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5665 return + [x + 1, y - 1]. */
5666 if (low0 == 0 && high1 == 0)
5668 low = range_successor (high0);
5669 high = range_predecessor (low1);
5670 if (low == 0 || high == 0)
5671 return 0;
5673 in_p = 1;
5675 else
5676 return 0;
5679 else if (subset)
5680 in_p = 0, low = low0, high = high0;
5681 else
5682 in_p = 0, low = low0, high = high1;
5685 *pin_p = in_p, *plow = low, *phigh = high;
5686 return 1;
5690 /* Subroutine of fold, looking inside expressions of the form
5691 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5692 of the COND_EXPR. This function is being used also to optimize
5693 A op B ? C : A, by reversing the comparison first.
5695 Return a folded expression whose code is not a COND_EXPR
5696 anymore, or NULL_TREE if no folding opportunity is found. */
5698 static tree
5699 fold_cond_expr_with_comparison (location_t loc, tree type,
5700 tree arg0, tree arg1, tree arg2)
5702 enum tree_code comp_code = TREE_CODE (arg0);
5703 tree arg00 = TREE_OPERAND (arg0, 0);
5704 tree arg01 = TREE_OPERAND (arg0, 1);
5705 tree arg1_type = TREE_TYPE (arg1);
5706 tree tem;
5708 STRIP_NOPS (arg1);
5709 STRIP_NOPS (arg2);
5711 /* If we have A op 0 ? A : -A, consider applying the following
5712 transformations:
5714 A == 0? A : -A same as -A
5715 A != 0? A : -A same as A
5716 A >= 0? A : -A same as abs (A)
5717 A > 0? A : -A same as abs (A)
5718 A <= 0? A : -A same as -abs (A)
5719 A < 0? A : -A same as -abs (A)
5721 None of these transformations work for modes with signed
5722 zeros. If A is +/-0, the first two transformations will
5723 change the sign of the result (from +0 to -0, or vice
5724 versa). The last four will fix the sign of the result,
5725 even though the original expressions could be positive or
5726 negative, depending on the sign of A.
5728 Note that all these transformations are correct if A is
5729 NaN, since the two alternatives (A and -A) are also NaNs. */
5730 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5731 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5732 ? real_zerop (arg01)
5733 : integer_zerop (arg01))
5734 && ((TREE_CODE (arg2) == NEGATE_EXPR
5735 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5736 /* In the case that A is of the form X-Y, '-A' (arg2) may
5737 have already been folded to Y-X, check for that. */
5738 || (TREE_CODE (arg1) == MINUS_EXPR
5739 && TREE_CODE (arg2) == MINUS_EXPR
5740 && operand_equal_p (TREE_OPERAND (arg1, 0),
5741 TREE_OPERAND (arg2, 1), 0)
5742 && operand_equal_p (TREE_OPERAND (arg1, 1),
5743 TREE_OPERAND (arg2, 0), 0))))
5744 switch (comp_code)
5746 case EQ_EXPR:
5747 case UNEQ_EXPR:
5748 tem = fold_convert_loc (loc, arg1_type, arg1);
5749 return fold_convert_loc (loc, type, negate_expr (tem));
5750 case NE_EXPR:
5751 case LTGT_EXPR:
5752 return fold_convert_loc (loc, type, arg1);
5753 case UNGE_EXPR:
5754 case UNGT_EXPR:
5755 if (flag_trapping_math)
5756 break;
5757 /* Fall through. */
5758 case GE_EXPR:
5759 case GT_EXPR:
5760 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5761 break;
5762 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5763 return fold_convert_loc (loc, type, tem);
5764 case UNLE_EXPR:
5765 case UNLT_EXPR:
5766 if (flag_trapping_math)
5767 break;
5768 /* FALLTHRU */
5769 case LE_EXPR:
5770 case LT_EXPR:
5771 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5772 break;
5773 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5774 return negate_expr (fold_convert_loc (loc, type, tem));
5775 default:
5776 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5777 break;
5780 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5781 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5782 both transformations are correct when A is NaN: A != 0
5783 is then true, and A == 0 is false. */
5785 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5786 && integer_zerop (arg01) && integer_zerop (arg2))
5788 if (comp_code == NE_EXPR)
5789 return fold_convert_loc (loc, type, arg1);
5790 else if (comp_code == EQ_EXPR)
5791 return build_zero_cst (type);
5794 /* Try some transformations of A op B ? A : B.
5796 A == B? A : B same as B
5797 A != B? A : B same as A
5798 A >= B? A : B same as max (A, B)
5799 A > B? A : B same as max (B, A)
5800 A <= B? A : B same as min (A, B)
5801 A < B? A : B same as min (B, A)
5803 As above, these transformations don't work in the presence
5804 of signed zeros. For example, if A and B are zeros of
5805 opposite sign, the first two transformations will change
5806 the sign of the result. In the last four, the original
5807 expressions give different results for (A=+0, B=-0) and
5808 (A=-0, B=+0), but the transformed expressions do not.
5810 The first two transformations are correct if either A or B
5811 is a NaN. In the first transformation, the condition will
5812 be false, and B will indeed be chosen. In the case of the
5813 second transformation, the condition A != B will be true,
5814 and A will be chosen.
5816 The conversions to max() and min() are not correct if B is
5817 a number and A is not. The conditions in the original
5818 expressions will be false, so all four give B. The min()
5819 and max() versions would give a NaN instead. */
5820 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5821 && operand_equal_for_comparison_p (arg01, arg2)
5822 /* Avoid these transformations if the COND_EXPR may be used
5823 as an lvalue in the C++ front-end. PR c++/19199. */
5824 && (in_gimple_form
5825 || VECTOR_TYPE_P (type)
5826 || (! lang_GNU_CXX ()
5827 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5828 || ! maybe_lvalue_p (arg1)
5829 || ! maybe_lvalue_p (arg2)))
5831 tree comp_op0 = arg00;
5832 tree comp_op1 = arg01;
5833 tree comp_type = TREE_TYPE (comp_op0);
5835 switch (comp_code)
5837 case EQ_EXPR:
5838 return fold_convert_loc (loc, type, arg2);
5839 case NE_EXPR:
5840 return fold_convert_loc (loc, type, arg1);
5841 case LE_EXPR:
5842 case LT_EXPR:
5843 case UNLE_EXPR:
5844 case UNLT_EXPR:
5845 /* In C++ a ?: expression can be an lvalue, so put the
5846 operand which will be used if they are equal first
5847 so that we can convert this back to the
5848 corresponding COND_EXPR. */
5849 if (!HONOR_NANS (arg1))
5851 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5852 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5853 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5854 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5855 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5856 comp_op1, comp_op0);
5857 return fold_convert_loc (loc, type, tem);
5859 break;
5860 case GE_EXPR:
5861 case GT_EXPR:
5862 case UNGE_EXPR:
5863 case UNGT_EXPR:
5864 if (!HONOR_NANS (arg1))
5866 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5867 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5868 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5869 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5870 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5871 comp_op1, comp_op0);
5872 return fold_convert_loc (loc, type, tem);
5874 break;
5875 case UNEQ_EXPR:
5876 if (!HONOR_NANS (arg1))
5877 return fold_convert_loc (loc, type, arg2);
5878 break;
5879 case LTGT_EXPR:
5880 if (!HONOR_NANS (arg1))
5881 return fold_convert_loc (loc, type, arg1);
5882 break;
5883 default:
5884 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5885 break;
5889 return NULL_TREE;
5894 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5895 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5896 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5897 false) >= 2)
5898 #endif
5900 /* EXP is some logical combination of boolean tests. See if we can
5901 merge it into some range test. Return the new tree if so. */
5903 static tree
5904 fold_range_test (location_t loc, enum tree_code code, tree type,
5905 tree op0, tree op1)
5907 int or_op = (code == TRUTH_ORIF_EXPR
5908 || code == TRUTH_OR_EXPR);
5909 int in0_p, in1_p, in_p;
5910 tree low0, low1, low, high0, high1, high;
5911 bool strict_overflow_p = false;
5912 tree tem, lhs, rhs;
5913 const char * const warnmsg = G_("assuming signed overflow does not occur "
5914 "when simplifying range test");
5916 if (!INTEGRAL_TYPE_P (type))
5917 return 0;
5919 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5920 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5922 /* If this is an OR operation, invert both sides; we will invert
5923 again at the end. */
5924 if (or_op)
5925 in0_p = ! in0_p, in1_p = ! in1_p;
5927 /* If both expressions are the same, if we can merge the ranges, and we
5928 can build the range test, return it or it inverted. If one of the
5929 ranges is always true or always false, consider it to be the same
5930 expression as the other. */
5931 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5932 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5933 in1_p, low1, high1)
5934 && (tem = (build_range_check (loc, type,
5935 lhs != 0 ? lhs
5936 : rhs != 0 ? rhs : integer_zero_node,
5937 in_p, low, high))) != 0)
5939 if (strict_overflow_p)
5940 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5941 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5944 /* On machines where the branch cost is expensive, if this is a
5945 short-circuited branch and the underlying object on both sides
5946 is the same, make a non-short-circuit operation. */
5947 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5948 if (param_logical_op_non_short_circuit != -1)
5949 logical_op_non_short_circuit
5950 = param_logical_op_non_short_circuit;
5951 if (logical_op_non_short_circuit
5952 && !flag_sanitize_coverage
5953 && lhs != 0 && rhs != 0
5954 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5955 && operand_equal_p (lhs, rhs, 0))
5957 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5958 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5959 which cases we can't do this. */
5960 if (simple_operand_p (lhs))
5961 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5962 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5963 type, op0, op1);
5965 else if (!lang_hooks.decls.global_bindings_p ()
5966 && !CONTAINS_PLACEHOLDER_P (lhs))
5968 tree common = save_expr (lhs);
5970 if ((lhs = build_range_check (loc, type, common,
5971 or_op ? ! in0_p : in0_p,
5972 low0, high0)) != 0
5973 && (rhs = build_range_check (loc, type, common,
5974 or_op ? ! in1_p : in1_p,
5975 low1, high1)) != 0)
5977 if (strict_overflow_p)
5978 fold_overflow_warning (warnmsg,
5979 WARN_STRICT_OVERFLOW_COMPARISON);
5980 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5981 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5982 type, lhs, rhs);
5987 return 0;
5990 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5991 bit value. Arrange things so the extra bits will be set to zero if and
5992 only if C is signed-extended to its full width. If MASK is nonzero,
5993 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5995 static tree
5996 unextend (tree c, int p, int unsignedp, tree mask)
5998 tree type = TREE_TYPE (c);
5999 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6000 tree temp;
6002 if (p == modesize || unsignedp)
6003 return c;
6005 /* We work by getting just the sign bit into the low-order bit, then
6006 into the high-order bit, then sign-extend. We then XOR that value
6007 with C. */
6008 temp = build_int_cst (TREE_TYPE (c),
6009 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6011 /* We must use a signed type in order to get an arithmetic right shift.
6012 However, we must also avoid introducing accidental overflows, so that
6013 a subsequent call to integer_zerop will work. Hence we must
6014 do the type conversion here. At this point, the constant is either
6015 zero or one, and the conversion to a signed type can never overflow.
6016 We could get an overflow if this conversion is done anywhere else. */
6017 if (TYPE_UNSIGNED (type))
6018 temp = fold_convert (signed_type_for (type), temp);
6020 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6021 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6022 if (mask != 0)
6023 temp = const_binop (BIT_AND_EXPR, temp,
6024 fold_convert (TREE_TYPE (c), mask));
6025 /* If necessary, convert the type back to match the type of C. */
6026 if (TYPE_UNSIGNED (type))
6027 temp = fold_convert (type, temp);
6029 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6032 /* For an expression that has the form
6033 (A && B) || ~B
6035 (A || B) && ~B,
6036 we can drop one of the inner expressions and simplify to
6037 A || ~B
6039 A && ~B
6040 LOC is the location of the resulting expression. OP is the inner
6041 logical operation; the left-hand side in the examples above, while CMPOP
6042 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6043 removing a condition that guards another, as in
6044 (A != NULL && A->...) || A == NULL
6045 which we must not transform. If RHS_ONLY is true, only eliminate the
6046 right-most operand of the inner logical operation. */
6048 static tree
6049 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6050 bool rhs_only)
6052 tree type = TREE_TYPE (cmpop);
6053 enum tree_code code = TREE_CODE (cmpop);
6054 enum tree_code truthop_code = TREE_CODE (op);
6055 tree lhs = TREE_OPERAND (op, 0);
6056 tree rhs = TREE_OPERAND (op, 1);
6057 tree orig_lhs = lhs, orig_rhs = rhs;
6058 enum tree_code rhs_code = TREE_CODE (rhs);
6059 enum tree_code lhs_code = TREE_CODE (lhs);
6060 enum tree_code inv_code;
6062 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6063 return NULL_TREE;
6065 if (TREE_CODE_CLASS (code) != tcc_comparison)
6066 return NULL_TREE;
6068 if (rhs_code == truthop_code)
6070 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6071 if (newrhs != NULL_TREE)
6073 rhs = newrhs;
6074 rhs_code = TREE_CODE (rhs);
6077 if (lhs_code == truthop_code && !rhs_only)
6079 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6080 if (newlhs != NULL_TREE)
6082 lhs = newlhs;
6083 lhs_code = TREE_CODE (lhs);
6087 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6088 if (inv_code == rhs_code
6089 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6090 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6091 return lhs;
6092 if (!rhs_only && inv_code == lhs_code
6093 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6094 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6095 return rhs;
6096 if (rhs != orig_rhs || lhs != orig_lhs)
6097 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6098 lhs, rhs);
6099 return NULL_TREE;
6102 /* Find ways of folding logical expressions of LHS and RHS:
6103 Try to merge two comparisons to the same innermost item.
6104 Look for range tests like "ch >= '0' && ch <= '9'".
6105 Look for combinations of simple terms on machines with expensive branches
6106 and evaluate the RHS unconditionally.
6108 For example, if we have p->a == 2 && p->b == 4 and we can make an
6109 object large enough to span both A and B, we can do this with a comparison
6110 against the object ANDed with the a mask.
6112 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6113 operations to do this with one comparison.
6115 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6116 function and the one above.
6118 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6119 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6121 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6122 two operands.
6124 We return the simplified tree or 0 if no optimization is possible. */
6126 static tree
6127 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6128 tree lhs, tree rhs)
6130 /* If this is the "or" of two comparisons, we can do something if
6131 the comparisons are NE_EXPR. If this is the "and", we can do something
6132 if the comparisons are EQ_EXPR. I.e.,
6133 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6135 WANTED_CODE is this operation code. For single bit fields, we can
6136 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6137 comparison for one-bit fields. */
6139 enum tree_code wanted_code;
6140 enum tree_code lcode, rcode;
6141 tree ll_arg, lr_arg, rl_arg, rr_arg;
6142 tree ll_inner, lr_inner, rl_inner, rr_inner;
6143 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6144 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6145 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6146 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6147 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6148 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6149 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6150 scalar_int_mode lnmode, rnmode;
6151 tree ll_mask, lr_mask, rl_mask, rr_mask;
6152 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6153 tree l_const, r_const;
6154 tree lntype, rntype, result;
6155 HOST_WIDE_INT first_bit, end_bit;
6156 int volatilep;
6158 /* Start by getting the comparison codes. Fail if anything is volatile.
6159 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6160 it were surrounded with a NE_EXPR. */
6162 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6163 return 0;
6165 lcode = TREE_CODE (lhs);
6166 rcode = TREE_CODE (rhs);
6168 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6170 lhs = build2 (NE_EXPR, truth_type, lhs,
6171 build_int_cst (TREE_TYPE (lhs), 0));
6172 lcode = NE_EXPR;
6175 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6177 rhs = build2 (NE_EXPR, truth_type, rhs,
6178 build_int_cst (TREE_TYPE (rhs), 0));
6179 rcode = NE_EXPR;
6182 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6183 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6184 return 0;
6186 ll_arg = TREE_OPERAND (lhs, 0);
6187 lr_arg = TREE_OPERAND (lhs, 1);
6188 rl_arg = TREE_OPERAND (rhs, 0);
6189 rr_arg = TREE_OPERAND (rhs, 1);
6191 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6192 if (simple_operand_p (ll_arg)
6193 && simple_operand_p (lr_arg))
6195 if (operand_equal_p (ll_arg, rl_arg, 0)
6196 && operand_equal_p (lr_arg, rr_arg, 0))
6198 result = combine_comparisons (loc, code, lcode, rcode,
6199 truth_type, ll_arg, lr_arg);
6200 if (result)
6201 return result;
6203 else if (operand_equal_p (ll_arg, rr_arg, 0)
6204 && operand_equal_p (lr_arg, rl_arg, 0))
6206 result = combine_comparisons (loc, code, lcode,
6207 swap_tree_comparison (rcode),
6208 truth_type, ll_arg, lr_arg);
6209 if (result)
6210 return result;
6214 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6215 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6217 /* If the RHS can be evaluated unconditionally and its operands are
6218 simple, it wins to evaluate the RHS unconditionally on machines
6219 with expensive branches. In this case, this isn't a comparison
6220 that can be merged. */
6222 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6223 false) >= 2
6224 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6225 && simple_operand_p (rl_arg)
6226 && simple_operand_p (rr_arg))
6228 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6229 if (code == TRUTH_OR_EXPR
6230 && lcode == NE_EXPR && integer_zerop (lr_arg)
6231 && rcode == NE_EXPR && integer_zerop (rr_arg)
6232 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6233 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6234 return build2_loc (loc, NE_EXPR, truth_type,
6235 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6236 ll_arg, rl_arg),
6237 build_int_cst (TREE_TYPE (ll_arg), 0));
6239 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6240 if (code == TRUTH_AND_EXPR
6241 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6242 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6243 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6244 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6245 return build2_loc (loc, EQ_EXPR, truth_type,
6246 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6247 ll_arg, rl_arg),
6248 build_int_cst (TREE_TYPE (ll_arg), 0));
6251 /* See if the comparisons can be merged. Then get all the parameters for
6252 each side. */
6254 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6255 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6256 return 0;
6258 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6259 volatilep = 0;
6260 ll_inner = decode_field_reference (loc, &ll_arg,
6261 &ll_bitsize, &ll_bitpos, &ll_mode,
6262 &ll_unsignedp, &ll_reversep, &volatilep,
6263 &ll_mask, &ll_and_mask);
6264 lr_inner = decode_field_reference (loc, &lr_arg,
6265 &lr_bitsize, &lr_bitpos, &lr_mode,
6266 &lr_unsignedp, &lr_reversep, &volatilep,
6267 &lr_mask, &lr_and_mask);
6268 rl_inner = decode_field_reference (loc, &rl_arg,
6269 &rl_bitsize, &rl_bitpos, &rl_mode,
6270 &rl_unsignedp, &rl_reversep, &volatilep,
6271 &rl_mask, &rl_and_mask);
6272 rr_inner = decode_field_reference (loc, &rr_arg,
6273 &rr_bitsize, &rr_bitpos, &rr_mode,
6274 &rr_unsignedp, &rr_reversep, &volatilep,
6275 &rr_mask, &rr_and_mask);
6277 /* It must be true that the inner operation on the lhs of each
6278 comparison must be the same if we are to be able to do anything.
6279 Then see if we have constants. If not, the same must be true for
6280 the rhs's. */
6281 if (volatilep
6282 || ll_reversep != rl_reversep
6283 || ll_inner == 0 || rl_inner == 0
6284 || ! operand_equal_p (ll_inner, rl_inner, 0))
6285 return 0;
6287 if (TREE_CODE (lr_arg) == INTEGER_CST
6288 && TREE_CODE (rr_arg) == INTEGER_CST)
6290 l_const = lr_arg, r_const = rr_arg;
6291 lr_reversep = ll_reversep;
6293 else if (lr_reversep != rr_reversep
6294 || lr_inner == 0 || rr_inner == 0
6295 || ! operand_equal_p (lr_inner, rr_inner, 0))
6296 return 0;
6297 else
6298 l_const = r_const = 0;
6300 /* If either comparison code is not correct for our logical operation,
6301 fail. However, we can convert a one-bit comparison against zero into
6302 the opposite comparison against that bit being set in the field. */
6304 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6305 if (lcode != wanted_code)
6307 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6309 /* Make the left operand unsigned, since we are only interested
6310 in the value of one bit. Otherwise we are doing the wrong
6311 thing below. */
6312 ll_unsignedp = 1;
6313 l_const = ll_mask;
6315 else
6316 return 0;
6319 /* This is analogous to the code for l_const above. */
6320 if (rcode != wanted_code)
6322 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6324 rl_unsignedp = 1;
6325 r_const = rl_mask;
6327 else
6328 return 0;
6331 /* See if we can find a mode that contains both fields being compared on
6332 the left. If we can't, fail. Otherwise, update all constants and masks
6333 to be relative to a field of that size. */
6334 first_bit = MIN (ll_bitpos, rl_bitpos);
6335 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6336 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6337 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6338 volatilep, &lnmode))
6339 return 0;
6341 lnbitsize = GET_MODE_BITSIZE (lnmode);
6342 lnbitpos = first_bit & ~ (lnbitsize - 1);
6343 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6344 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6346 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6348 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6349 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6352 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6353 size_int (xll_bitpos));
6354 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6355 size_int (xrl_bitpos));
6357 if (l_const)
6359 l_const = fold_convert_loc (loc, lntype, l_const);
6360 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6361 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6362 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6363 fold_build1_loc (loc, BIT_NOT_EXPR,
6364 lntype, ll_mask))))
6366 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6368 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6371 if (r_const)
6373 r_const = fold_convert_loc (loc, lntype, r_const);
6374 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6375 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6376 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6377 fold_build1_loc (loc, BIT_NOT_EXPR,
6378 lntype, rl_mask))))
6380 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6382 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6386 /* If the right sides are not constant, do the same for it. Also,
6387 disallow this optimization if a size, signedness or storage order
6388 mismatch occurs between the left and right sides. */
6389 if (l_const == 0)
6391 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6392 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6393 || ll_reversep != lr_reversep
6394 /* Make sure the two fields on the right
6395 correspond to the left without being swapped. */
6396 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6397 return 0;
6399 first_bit = MIN (lr_bitpos, rr_bitpos);
6400 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6401 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6402 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6403 volatilep, &rnmode))
6404 return 0;
6406 rnbitsize = GET_MODE_BITSIZE (rnmode);
6407 rnbitpos = first_bit & ~ (rnbitsize - 1);
6408 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6409 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6411 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6413 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6414 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6417 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6418 rntype, lr_mask),
6419 size_int (xlr_bitpos));
6420 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6421 rntype, rr_mask),
6422 size_int (xrr_bitpos));
6424 /* Make a mask that corresponds to both fields being compared.
6425 Do this for both items being compared. If the operands are the
6426 same size and the bits being compared are in the same position
6427 then we can do this by masking both and comparing the masked
6428 results. */
6429 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6430 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6431 if (lnbitsize == rnbitsize
6432 && xll_bitpos == xlr_bitpos
6433 && lnbitpos >= 0
6434 && rnbitpos >= 0)
6436 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6437 lntype, lnbitsize, lnbitpos,
6438 ll_unsignedp || rl_unsignedp, ll_reversep);
6439 if (! all_ones_mask_p (ll_mask, lnbitsize))
6440 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6442 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6443 rntype, rnbitsize, rnbitpos,
6444 lr_unsignedp || rr_unsignedp, lr_reversep);
6445 if (! all_ones_mask_p (lr_mask, rnbitsize))
6446 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6448 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6451 /* There is still another way we can do something: If both pairs of
6452 fields being compared are adjacent, we may be able to make a wider
6453 field containing them both.
6455 Note that we still must mask the lhs/rhs expressions. Furthermore,
6456 the mask must be shifted to account for the shift done by
6457 make_bit_field_ref. */
6458 if (((ll_bitsize + ll_bitpos == rl_bitpos
6459 && lr_bitsize + lr_bitpos == rr_bitpos)
6460 || (ll_bitpos == rl_bitpos + rl_bitsize
6461 && lr_bitpos == rr_bitpos + rr_bitsize))
6462 && ll_bitpos >= 0
6463 && rl_bitpos >= 0
6464 && lr_bitpos >= 0
6465 && rr_bitpos >= 0)
6467 tree type;
6469 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6470 ll_bitsize + rl_bitsize,
6471 MIN (ll_bitpos, rl_bitpos),
6472 ll_unsignedp, ll_reversep);
6473 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6474 lr_bitsize + rr_bitsize,
6475 MIN (lr_bitpos, rr_bitpos),
6476 lr_unsignedp, lr_reversep);
6478 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6479 size_int (MIN (xll_bitpos, xrl_bitpos)));
6480 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6481 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6483 /* Convert to the smaller type before masking out unwanted bits. */
6484 type = lntype;
6485 if (lntype != rntype)
6487 if (lnbitsize > rnbitsize)
6489 lhs = fold_convert_loc (loc, rntype, lhs);
6490 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6491 type = rntype;
6493 else if (lnbitsize < rnbitsize)
6495 rhs = fold_convert_loc (loc, lntype, rhs);
6496 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6497 type = lntype;
6501 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6502 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6504 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6505 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6507 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6510 return 0;
6513 /* Handle the case of comparisons with constants. If there is something in
6514 common between the masks, those bits of the constants must be the same.
6515 If not, the condition is always false. Test for this to avoid generating
6516 incorrect code below. */
6517 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6518 if (! integer_zerop (result)
6519 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6520 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6522 if (wanted_code == NE_EXPR)
6524 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6525 return constant_boolean_node (true, truth_type);
6527 else
6529 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6530 return constant_boolean_node (false, truth_type);
6534 if (lnbitpos < 0)
6535 return 0;
6537 /* Construct the expression we will return. First get the component
6538 reference we will make. Unless the mask is all ones the width of
6539 that field, perform the mask operation. Then compare with the
6540 merged constant. */
6541 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6542 lntype, lnbitsize, lnbitpos,
6543 ll_unsignedp || rl_unsignedp, ll_reversep);
6545 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6546 if (! all_ones_mask_p (ll_mask, lnbitsize))
6547 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6549 return build2_loc (loc, wanted_code, truth_type, result,
6550 const_binop (BIT_IOR_EXPR, l_const, r_const));
6553 /* T is an integer expression that is being multiplied, divided, or taken a
6554 modulus (CODE says which and what kind of divide or modulus) by a
6555 constant C. See if we can eliminate that operation by folding it with
6556 other operations already in T. WIDE_TYPE, if non-null, is a type that
6557 should be used for the computation if wider than our type.
6559 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6560 (X * 2) + (Y * 4). We must, however, be assured that either the original
6561 expression would not overflow or that overflow is undefined for the type
6562 in the language in question.
6564 If we return a non-null expression, it is an equivalent form of the
6565 original computation, but need not be in the original type.
6567 We set *STRICT_OVERFLOW_P to true if the return values depends on
6568 signed overflow being undefined. Otherwise we do not change
6569 *STRICT_OVERFLOW_P. */
6571 static tree
6572 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6573 bool *strict_overflow_p)
6575 /* To avoid exponential search depth, refuse to allow recursion past
6576 three levels. Beyond that (1) it's highly unlikely that we'll find
6577 something interesting and (2) we've probably processed it before
6578 when we built the inner expression. */
6580 static int depth;
6581 tree ret;
6583 if (depth > 3)
6584 return NULL;
6586 depth++;
6587 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6588 depth--;
6590 return ret;
6593 static tree
6594 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6595 bool *strict_overflow_p)
6597 tree type = TREE_TYPE (t);
6598 enum tree_code tcode = TREE_CODE (t);
6599 tree ctype = (wide_type != 0
6600 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6601 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6602 ? wide_type : type);
6603 tree t1, t2;
6604 int same_p = tcode == code;
6605 tree op0 = NULL_TREE, op1 = NULL_TREE;
6606 bool sub_strict_overflow_p;
6608 /* Don't deal with constants of zero here; they confuse the code below. */
6609 if (integer_zerop (c))
6610 return NULL_TREE;
6612 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6613 op0 = TREE_OPERAND (t, 0);
6615 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6616 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6618 /* Note that we need not handle conditional operations here since fold
6619 already handles those cases. So just do arithmetic here. */
6620 switch (tcode)
6622 case INTEGER_CST:
6623 /* For a constant, we can always simplify if we are a multiply
6624 or (for divide and modulus) if it is a multiple of our constant. */
6625 if (code == MULT_EXPR
6626 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6627 TYPE_SIGN (type)))
6629 tree tem = const_binop (code, fold_convert (ctype, t),
6630 fold_convert (ctype, c));
6631 /* If the multiplication overflowed, we lost information on it.
6632 See PR68142 and PR69845. */
6633 if (TREE_OVERFLOW (tem))
6634 return NULL_TREE;
6635 return tem;
6637 break;
6639 CASE_CONVERT: case NON_LVALUE_EXPR:
6640 /* If op0 is an expression ... */
6641 if ((COMPARISON_CLASS_P (op0)
6642 || UNARY_CLASS_P (op0)
6643 || BINARY_CLASS_P (op0)
6644 || VL_EXP_CLASS_P (op0)
6645 || EXPRESSION_CLASS_P (op0))
6646 /* ... and has wrapping overflow, and its type is smaller
6647 than ctype, then we cannot pass through as widening. */
6648 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6649 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6650 && (TYPE_PRECISION (ctype)
6651 > TYPE_PRECISION (TREE_TYPE (op0))))
6652 /* ... or this is a truncation (t is narrower than op0),
6653 then we cannot pass through this narrowing. */
6654 || (TYPE_PRECISION (type)
6655 < TYPE_PRECISION (TREE_TYPE (op0)))
6656 /* ... or signedness changes for division or modulus,
6657 then we cannot pass through this conversion. */
6658 || (code != MULT_EXPR
6659 && (TYPE_UNSIGNED (ctype)
6660 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6661 /* ... or has undefined overflow while the converted to
6662 type has not, we cannot do the operation in the inner type
6663 as that would introduce undefined overflow. */
6664 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6665 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6666 && !TYPE_OVERFLOW_UNDEFINED (type))))
6667 break;
6669 /* Pass the constant down and see if we can make a simplification. If
6670 we can, replace this expression with the inner simplification for
6671 possible later conversion to our or some other type. */
6672 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6673 && TREE_CODE (t2) == INTEGER_CST
6674 && !TREE_OVERFLOW (t2)
6675 && (t1 = extract_muldiv (op0, t2, code,
6676 code == MULT_EXPR ? ctype : NULL_TREE,
6677 strict_overflow_p)) != 0)
6678 return t1;
6679 break;
6681 case ABS_EXPR:
6682 /* If widening the type changes it from signed to unsigned, then we
6683 must avoid building ABS_EXPR itself as unsigned. */
6684 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6686 tree cstype = (*signed_type_for) (ctype);
6687 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6688 != 0)
6690 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6691 return fold_convert (ctype, t1);
6693 break;
6695 /* If the constant is negative, we cannot simplify this. */
6696 if (tree_int_cst_sgn (c) == -1)
6697 break;
6698 /* FALLTHROUGH */
6699 case NEGATE_EXPR:
6700 /* For division and modulus, type can't be unsigned, as e.g.
6701 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6702 For signed types, even with wrapping overflow, this is fine. */
6703 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6704 break;
6705 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6706 != 0)
6707 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6708 break;
6710 case MIN_EXPR: case MAX_EXPR:
6711 /* If widening the type changes the signedness, then we can't perform
6712 this optimization as that changes the result. */
6713 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6714 break;
6716 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6717 sub_strict_overflow_p = false;
6718 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6719 &sub_strict_overflow_p)) != 0
6720 && (t2 = extract_muldiv (op1, c, code, wide_type,
6721 &sub_strict_overflow_p)) != 0)
6723 if (tree_int_cst_sgn (c) < 0)
6724 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6725 if (sub_strict_overflow_p)
6726 *strict_overflow_p = true;
6727 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6728 fold_convert (ctype, t2));
6730 break;
6732 case LSHIFT_EXPR: case RSHIFT_EXPR:
6733 /* If the second operand is constant, this is a multiplication
6734 or floor division, by a power of two, so we can treat it that
6735 way unless the multiplier or divisor overflows. Signed
6736 left-shift overflow is implementation-defined rather than
6737 undefined in C90, so do not convert signed left shift into
6738 multiplication. */
6739 if (TREE_CODE (op1) == INTEGER_CST
6740 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6741 /* const_binop may not detect overflow correctly,
6742 so check for it explicitly here. */
6743 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6744 wi::to_wide (op1))
6745 && (t1 = fold_convert (ctype,
6746 const_binop (LSHIFT_EXPR, size_one_node,
6747 op1))) != 0
6748 && !TREE_OVERFLOW (t1))
6749 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6750 ? MULT_EXPR : FLOOR_DIV_EXPR,
6751 ctype,
6752 fold_convert (ctype, op0),
6753 t1),
6754 c, code, wide_type, strict_overflow_p);
6755 break;
6757 case PLUS_EXPR: case MINUS_EXPR:
6758 /* See if we can eliminate the operation on both sides. If we can, we
6759 can return a new PLUS or MINUS. If we can't, the only remaining
6760 cases where we can do anything are if the second operand is a
6761 constant. */
6762 sub_strict_overflow_p = false;
6763 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6764 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6765 if (t1 != 0 && t2 != 0
6766 && TYPE_OVERFLOW_WRAPS (ctype)
6767 && (code == MULT_EXPR
6768 /* If not multiplication, we can only do this if both operands
6769 are divisible by c. */
6770 || (multiple_of_p (ctype, op0, c)
6771 && multiple_of_p (ctype, op1, c))))
6773 if (sub_strict_overflow_p)
6774 *strict_overflow_p = true;
6775 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6776 fold_convert (ctype, t2));
6779 /* If this was a subtraction, negate OP1 and set it to be an addition.
6780 This simplifies the logic below. */
6781 if (tcode == MINUS_EXPR)
6783 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6784 /* If OP1 was not easily negatable, the constant may be OP0. */
6785 if (TREE_CODE (op0) == INTEGER_CST)
6787 std::swap (op0, op1);
6788 std::swap (t1, t2);
6792 if (TREE_CODE (op1) != INTEGER_CST)
6793 break;
6795 /* If either OP1 or C are negative, this optimization is not safe for
6796 some of the division and remainder types while for others we need
6797 to change the code. */
6798 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6800 if (code == CEIL_DIV_EXPR)
6801 code = FLOOR_DIV_EXPR;
6802 else if (code == FLOOR_DIV_EXPR)
6803 code = CEIL_DIV_EXPR;
6804 else if (code != MULT_EXPR
6805 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6806 break;
6809 /* If it's a multiply or a division/modulus operation of a multiple
6810 of our constant, do the operation and verify it doesn't overflow. */
6811 if (code == MULT_EXPR
6812 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6813 TYPE_SIGN (type)))
6815 op1 = const_binop (code, fold_convert (ctype, op1),
6816 fold_convert (ctype, c));
6817 /* We allow the constant to overflow with wrapping semantics. */
6818 if (op1 == 0
6819 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6820 break;
6822 else
6823 break;
6825 /* If we have an unsigned type, we cannot widen the operation since it
6826 will change the result if the original computation overflowed. */
6827 if (TYPE_UNSIGNED (ctype) && ctype != type)
6828 break;
6830 /* The last case is if we are a multiply. In that case, we can
6831 apply the distributive law to commute the multiply and addition
6832 if the multiplication of the constants doesn't overflow
6833 and overflow is defined. With undefined overflow
6834 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6835 But fold_plusminus_mult_expr would factor back any power-of-two
6836 value so do not distribute in the first place in this case. */
6837 if (code == MULT_EXPR
6838 && TYPE_OVERFLOW_WRAPS (ctype)
6839 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6840 return fold_build2 (tcode, ctype,
6841 fold_build2 (code, ctype,
6842 fold_convert (ctype, op0),
6843 fold_convert (ctype, c)),
6844 op1);
6846 break;
6848 case MULT_EXPR:
6849 /* We have a special case here if we are doing something like
6850 (C * 8) % 4 since we know that's zero. */
6851 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6852 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6853 /* If the multiplication can overflow we cannot optimize this. */
6854 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6855 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6856 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6857 TYPE_SIGN (type)))
6859 *strict_overflow_p = true;
6860 return omit_one_operand (type, integer_zero_node, op0);
6863 /* ... fall through ... */
6865 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6866 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6867 /* If we can extract our operation from the LHS, do so and return a
6868 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6869 do something only if the second operand is a constant. */
6870 if (same_p
6871 && TYPE_OVERFLOW_WRAPS (ctype)
6872 && (t1 = extract_muldiv (op0, c, code, wide_type,
6873 strict_overflow_p)) != 0)
6874 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6875 fold_convert (ctype, op1));
6876 else if (tcode == MULT_EXPR && code == MULT_EXPR
6877 && TYPE_OVERFLOW_WRAPS (ctype)
6878 && (t1 = extract_muldiv (op1, c, code, wide_type,
6879 strict_overflow_p)) != 0)
6880 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6881 fold_convert (ctype, t1));
6882 else if (TREE_CODE (op1) != INTEGER_CST)
6883 return 0;
6885 /* If these are the same operation types, we can associate them
6886 assuming no overflow. */
6887 if (tcode == code)
6889 bool overflow_p = false;
6890 wi::overflow_type overflow_mul;
6891 signop sign = TYPE_SIGN (ctype);
6892 unsigned prec = TYPE_PRECISION (ctype);
6893 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6894 wi::to_wide (c, prec),
6895 sign, &overflow_mul);
6896 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6897 if (overflow_mul
6898 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6899 overflow_p = true;
6900 if (!overflow_p)
6901 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6902 wide_int_to_tree (ctype, mul));
6905 /* If these operations "cancel" each other, we have the main
6906 optimizations of this pass, which occur when either constant is a
6907 multiple of the other, in which case we replace this with either an
6908 operation or CODE or TCODE.
6910 If we have an unsigned type, we cannot do this since it will change
6911 the result if the original computation overflowed. */
6912 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6913 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6914 || (tcode == MULT_EXPR
6915 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6916 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6917 && code != MULT_EXPR)))
6919 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6920 TYPE_SIGN (type)))
6922 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6923 *strict_overflow_p = true;
6924 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6925 fold_convert (ctype,
6926 const_binop (TRUNC_DIV_EXPR,
6927 op1, c)));
6929 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6930 TYPE_SIGN (type)))
6932 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6933 *strict_overflow_p = true;
6934 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6935 fold_convert (ctype,
6936 const_binop (TRUNC_DIV_EXPR,
6937 c, op1)));
6940 break;
6942 default:
6943 break;
6946 return 0;
6949 /* Return a node which has the indicated constant VALUE (either 0 or
6950 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6951 and is of the indicated TYPE. */
6953 tree
6954 constant_boolean_node (bool value, tree type)
6956 if (type == integer_type_node)
6957 return value ? integer_one_node : integer_zero_node;
6958 else if (type == boolean_type_node)
6959 return value ? boolean_true_node : boolean_false_node;
6960 else if (TREE_CODE (type) == VECTOR_TYPE)
6961 return build_vector_from_val (type,
6962 build_int_cst (TREE_TYPE (type),
6963 value ? -1 : 0));
6964 else
6965 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6969 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6970 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6971 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6972 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6973 COND is the first argument to CODE; otherwise (as in the example
6974 given here), it is the second argument. TYPE is the type of the
6975 original expression. Return NULL_TREE if no simplification is
6976 possible. */
6978 static tree
6979 fold_binary_op_with_conditional_arg (location_t loc,
6980 enum tree_code code,
6981 tree type, tree op0, tree op1,
6982 tree cond, tree arg, int cond_first_p)
6984 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6985 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6986 tree test, true_value, false_value;
6987 tree lhs = NULL_TREE;
6988 tree rhs = NULL_TREE;
6989 enum tree_code cond_code = COND_EXPR;
6991 /* Do not move possibly trapping operations into the conditional as this
6992 pessimizes code and causes gimplification issues when applied late. */
6993 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6994 ANY_INTEGRAL_TYPE_P (type)
6995 && TYPE_OVERFLOW_TRAPS (type), op1))
6996 return NULL_TREE;
6998 if (TREE_CODE (cond) == COND_EXPR
6999 || TREE_CODE (cond) == VEC_COND_EXPR)
7001 test = TREE_OPERAND (cond, 0);
7002 true_value = TREE_OPERAND (cond, 1);
7003 false_value = TREE_OPERAND (cond, 2);
7004 /* If this operand throws an expression, then it does not make
7005 sense to try to perform a logical or arithmetic operation
7006 involving it. */
7007 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7008 lhs = true_value;
7009 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7010 rhs = false_value;
7012 else if (!(TREE_CODE (type) != VECTOR_TYPE
7013 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7015 tree testtype = TREE_TYPE (cond);
7016 test = cond;
7017 true_value = constant_boolean_node (true, testtype);
7018 false_value = constant_boolean_node (false, testtype);
7020 else
7021 /* Detect the case of mixing vector and scalar types - bail out. */
7022 return NULL_TREE;
7024 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7025 cond_code = VEC_COND_EXPR;
7027 /* This transformation is only worthwhile if we don't have to wrap ARG
7028 in a SAVE_EXPR and the operation can be simplified without recursing
7029 on at least one of the branches once its pushed inside the COND_EXPR. */
7030 if (!TREE_CONSTANT (arg)
7031 && (TREE_SIDE_EFFECTS (arg)
7032 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7033 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7034 return NULL_TREE;
7036 arg = fold_convert_loc (loc, arg_type, arg);
7037 if (lhs == 0)
7039 true_value = fold_convert_loc (loc, cond_type, true_value);
7040 if (cond_first_p)
7041 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7042 else
7043 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7045 if (rhs == 0)
7047 false_value = fold_convert_loc (loc, cond_type, false_value);
7048 if (cond_first_p)
7049 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7050 else
7051 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7054 /* Check that we have simplified at least one of the branches. */
7055 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7056 return NULL_TREE;
7058 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7062 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7064 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7065 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7066 ADDEND is the same as X.
7068 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7069 and finite. The problematic cases are when X is zero, and its mode
7070 has signed zeros. In the case of rounding towards -infinity,
7071 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7072 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7074 bool
7075 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7077 if (!real_zerop (addend))
7078 return false;
7080 /* Don't allow the fold with -fsignaling-nans. */
7081 if (HONOR_SNANS (type))
7082 return false;
7084 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7085 if (!HONOR_SIGNED_ZEROS (type))
7086 return true;
7088 /* There is no case that is safe for all rounding modes. */
7089 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7090 return false;
7092 /* In a vector or complex, we would need to check the sign of all zeros. */
7093 if (TREE_CODE (addend) == VECTOR_CST)
7094 addend = uniform_vector_p (addend);
7095 if (!addend || TREE_CODE (addend) != REAL_CST)
7096 return false;
7098 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7099 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7100 negate = !negate;
7102 /* The mode has signed zeros, and we have to honor their sign.
7103 In this situation, there is only one case we can return true for.
7104 X - 0 is the same as X with default rounding. */
7105 return negate;
7108 /* Subroutine of match.pd that optimizes comparisons of a division by
7109 a nonzero integer constant against an integer constant, i.e.
7110 X/C1 op C2.
7112 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7113 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7115 enum tree_code
7116 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7117 tree *hi, bool *neg_overflow)
7119 tree prod, tmp, type = TREE_TYPE (c1);
7120 signop sign = TYPE_SIGN (type);
7121 wi::overflow_type overflow;
7123 /* We have to do this the hard way to detect unsigned overflow.
7124 prod = int_const_binop (MULT_EXPR, c1, c2); */
7125 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7126 prod = force_fit_type (type, val, -1, overflow);
7127 *neg_overflow = false;
7129 if (sign == UNSIGNED)
7131 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7132 *lo = prod;
7134 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7135 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7136 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7138 else if (tree_int_cst_sgn (c1) >= 0)
7140 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7141 switch (tree_int_cst_sgn (c2))
7143 case -1:
7144 *neg_overflow = true;
7145 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7146 *hi = prod;
7147 break;
7149 case 0:
7150 *lo = fold_negate_const (tmp, type);
7151 *hi = tmp;
7152 break;
7154 case 1:
7155 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7156 *lo = prod;
7157 break;
7159 default:
7160 gcc_unreachable ();
7163 else
7165 /* A negative divisor reverses the relational operators. */
7166 code = swap_tree_comparison (code);
7168 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7169 switch (tree_int_cst_sgn (c2))
7171 case -1:
7172 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7173 *lo = prod;
7174 break;
7176 case 0:
7177 *hi = fold_negate_const (tmp, type);
7178 *lo = tmp;
7179 break;
7181 case 1:
7182 *neg_overflow = true;
7183 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7184 *hi = prod;
7185 break;
7187 default:
7188 gcc_unreachable ();
7192 if (code != EQ_EXPR && code != NE_EXPR)
7193 return code;
7195 if (TREE_OVERFLOW (*lo)
7196 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7197 *lo = NULL_TREE;
7198 if (TREE_OVERFLOW (*hi)
7199 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7200 *hi = NULL_TREE;
7202 return code;
7206 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7207 equality/inequality test, then return a simplified form of the test
7208 using a sign testing. Otherwise return NULL. TYPE is the desired
7209 result type. */
7211 static tree
7212 fold_single_bit_test_into_sign_test (location_t loc,
7213 enum tree_code code, tree arg0, tree arg1,
7214 tree result_type)
7216 /* If this is testing a single bit, we can optimize the test. */
7217 if ((code == NE_EXPR || code == EQ_EXPR)
7218 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7219 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7221 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7222 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7223 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7225 if (arg00 != NULL_TREE
7226 /* This is only a win if casting to a signed type is cheap,
7227 i.e. when arg00's type is not a partial mode. */
7228 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7230 tree stype = signed_type_for (TREE_TYPE (arg00));
7231 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7232 result_type,
7233 fold_convert_loc (loc, stype, arg00),
7234 build_int_cst (stype, 0));
7238 return NULL_TREE;
7241 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7242 equality/inequality test, then return a simplified form of
7243 the test using shifts and logical operations. Otherwise return
7244 NULL. TYPE is the desired result type. */
7246 tree
7247 fold_single_bit_test (location_t loc, enum tree_code code,
7248 tree arg0, tree arg1, tree result_type)
7250 /* If this is testing a single bit, we can optimize the test. */
7251 if ((code == NE_EXPR || code == EQ_EXPR)
7252 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7253 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7255 tree inner = TREE_OPERAND (arg0, 0);
7256 tree type = TREE_TYPE (arg0);
7257 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7258 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7259 int ops_unsigned;
7260 tree signed_type, unsigned_type, intermediate_type;
7261 tree tem, one;
7263 /* First, see if we can fold the single bit test into a sign-bit
7264 test. */
7265 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7266 result_type);
7267 if (tem)
7268 return tem;
7270 /* Otherwise we have (A & C) != 0 where C is a single bit,
7271 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7272 Similarly for (A & C) == 0. */
7274 /* If INNER is a right shift of a constant and it plus BITNUM does
7275 not overflow, adjust BITNUM and INNER. */
7276 if (TREE_CODE (inner) == RSHIFT_EXPR
7277 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7278 && bitnum < TYPE_PRECISION (type)
7279 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7280 TYPE_PRECISION (type) - bitnum))
7282 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7283 inner = TREE_OPERAND (inner, 0);
7286 /* If we are going to be able to omit the AND below, we must do our
7287 operations as unsigned. If we must use the AND, we have a choice.
7288 Normally unsigned is faster, but for some machines signed is. */
7289 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7290 && !flag_syntax_only) ? 0 : 1;
7292 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7293 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7294 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7295 inner = fold_convert_loc (loc, intermediate_type, inner);
7297 if (bitnum != 0)
7298 inner = build2 (RSHIFT_EXPR, intermediate_type,
7299 inner, size_int (bitnum));
7301 one = build_int_cst (intermediate_type, 1);
7303 if (code == EQ_EXPR)
7304 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7306 /* Put the AND last so it can combine with more things. */
7307 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7309 /* Make sure to return the proper type. */
7310 inner = fold_convert_loc (loc, result_type, inner);
7312 return inner;
7314 return NULL_TREE;
7317 /* Test whether it is preferable two swap two operands, ARG0 and
7318 ARG1, for example because ARG0 is an integer constant and ARG1
7319 isn't. */
7321 bool
7322 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7324 if (CONSTANT_CLASS_P (arg1))
7325 return 0;
7326 if (CONSTANT_CLASS_P (arg0))
7327 return 1;
7329 STRIP_NOPS (arg0);
7330 STRIP_NOPS (arg1);
7332 if (TREE_CONSTANT (arg1))
7333 return 0;
7334 if (TREE_CONSTANT (arg0))
7335 return 1;
7337 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7338 for commutative and comparison operators. Ensuring a canonical
7339 form allows the optimizers to find additional redundancies without
7340 having to explicitly check for both orderings. */
7341 if (TREE_CODE (arg0) == SSA_NAME
7342 && TREE_CODE (arg1) == SSA_NAME
7343 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7344 return 1;
7346 /* Put SSA_NAMEs last. */
7347 if (TREE_CODE (arg1) == SSA_NAME)
7348 return 0;
7349 if (TREE_CODE (arg0) == SSA_NAME)
7350 return 1;
7352 /* Put variables last. */
7353 if (DECL_P (arg1))
7354 return 0;
7355 if (DECL_P (arg0))
7356 return 1;
7358 return 0;
7362 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7363 means A >= Y && A != MAX, but in this case we know that
7364 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7366 static tree
7367 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7369 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7371 if (TREE_CODE (bound) == LT_EXPR)
7372 a = TREE_OPERAND (bound, 0);
7373 else if (TREE_CODE (bound) == GT_EXPR)
7374 a = TREE_OPERAND (bound, 1);
7375 else
7376 return NULL_TREE;
7378 typea = TREE_TYPE (a);
7379 if (!INTEGRAL_TYPE_P (typea)
7380 && !POINTER_TYPE_P (typea))
7381 return NULL_TREE;
7383 if (TREE_CODE (ineq) == LT_EXPR)
7385 a1 = TREE_OPERAND (ineq, 1);
7386 y = TREE_OPERAND (ineq, 0);
7388 else if (TREE_CODE (ineq) == GT_EXPR)
7390 a1 = TREE_OPERAND (ineq, 0);
7391 y = TREE_OPERAND (ineq, 1);
7393 else
7394 return NULL_TREE;
7396 if (TREE_TYPE (a1) != typea)
7397 return NULL_TREE;
7399 if (POINTER_TYPE_P (typea))
7401 /* Convert the pointer types into integer before taking the difference. */
7402 tree ta = fold_convert_loc (loc, ssizetype, a);
7403 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7404 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7406 else
7407 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7409 if (!diff || !integer_onep (diff))
7410 return NULL_TREE;
7412 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7415 /* Fold a sum or difference of at least one multiplication.
7416 Returns the folded tree or NULL if no simplification could be made. */
7418 static tree
7419 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7420 tree arg0, tree arg1)
7422 tree arg00, arg01, arg10, arg11;
7423 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7425 /* (A * C) +- (B * C) -> (A+-B) * C.
7426 (A * C) +- A -> A * (C+-1).
7427 We are most concerned about the case where C is a constant,
7428 but other combinations show up during loop reduction. Since
7429 it is not difficult, try all four possibilities. */
7431 if (TREE_CODE (arg0) == MULT_EXPR)
7433 arg00 = TREE_OPERAND (arg0, 0);
7434 arg01 = TREE_OPERAND (arg0, 1);
7436 else if (TREE_CODE (arg0) == INTEGER_CST)
7438 arg00 = build_one_cst (type);
7439 arg01 = arg0;
7441 else
7443 /* We cannot generate constant 1 for fract. */
7444 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7445 return NULL_TREE;
7446 arg00 = arg0;
7447 arg01 = build_one_cst (type);
7449 if (TREE_CODE (arg1) == MULT_EXPR)
7451 arg10 = TREE_OPERAND (arg1, 0);
7452 arg11 = TREE_OPERAND (arg1, 1);
7454 else if (TREE_CODE (arg1) == INTEGER_CST)
7456 arg10 = build_one_cst (type);
7457 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7458 the purpose of this canonicalization. */
7459 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7460 && negate_expr_p (arg1)
7461 && code == PLUS_EXPR)
7463 arg11 = negate_expr (arg1);
7464 code = MINUS_EXPR;
7466 else
7467 arg11 = arg1;
7469 else
7471 /* We cannot generate constant 1 for fract. */
7472 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7473 return NULL_TREE;
7474 arg10 = arg1;
7475 arg11 = build_one_cst (type);
7477 same = NULL_TREE;
7479 /* Prefer factoring a common non-constant. */
7480 if (operand_equal_p (arg00, arg10, 0))
7481 same = arg00, alt0 = arg01, alt1 = arg11;
7482 else if (operand_equal_p (arg01, arg11, 0))
7483 same = arg01, alt0 = arg00, alt1 = arg10;
7484 else if (operand_equal_p (arg00, arg11, 0))
7485 same = arg00, alt0 = arg01, alt1 = arg10;
7486 else if (operand_equal_p (arg01, arg10, 0))
7487 same = arg01, alt0 = arg00, alt1 = arg11;
7489 /* No identical multiplicands; see if we can find a common
7490 power-of-two factor in non-power-of-two multiplies. This
7491 can help in multi-dimensional array access. */
7492 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7494 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7495 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7496 HOST_WIDE_INT tmp;
7497 bool swap = false;
7498 tree maybe_same;
7500 /* Move min of absolute values to int11. */
7501 if (absu_hwi (int01) < absu_hwi (int11))
7503 tmp = int01, int01 = int11, int11 = tmp;
7504 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7505 maybe_same = arg01;
7506 swap = true;
7508 else
7509 maybe_same = arg11;
7511 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7512 if (factor > 1
7513 && pow2p_hwi (factor)
7514 && (int01 & (factor - 1)) == 0
7515 /* The remainder should not be a constant, otherwise we
7516 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7517 increased the number of multiplications necessary. */
7518 && TREE_CODE (arg10) != INTEGER_CST)
7520 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7521 build_int_cst (TREE_TYPE (arg00),
7522 int01 / int11));
7523 alt1 = arg10;
7524 same = maybe_same;
7525 if (swap)
7526 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7530 if (!same)
7531 return NULL_TREE;
7533 if (! ANY_INTEGRAL_TYPE_P (type)
7534 || TYPE_OVERFLOW_WRAPS (type)
7535 /* We are neither factoring zero nor minus one. */
7536 || TREE_CODE (same) == INTEGER_CST)
7537 return fold_build2_loc (loc, MULT_EXPR, type,
7538 fold_build2_loc (loc, code, type,
7539 fold_convert_loc (loc, type, alt0),
7540 fold_convert_loc (loc, type, alt1)),
7541 fold_convert_loc (loc, type, same));
7543 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7544 same may be minus one and thus the multiplication may overflow. Perform
7545 the sum operation in an unsigned type. */
7546 tree utype = unsigned_type_for (type);
7547 tree tem = fold_build2_loc (loc, code, utype,
7548 fold_convert_loc (loc, utype, alt0),
7549 fold_convert_loc (loc, utype, alt1));
7550 /* If the sum evaluated to a constant that is not -INF the multiplication
7551 cannot overflow. */
7552 if (TREE_CODE (tem) == INTEGER_CST
7553 && (wi::to_wide (tem)
7554 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7555 return fold_build2_loc (loc, MULT_EXPR, type,
7556 fold_convert (type, tem), same);
7558 /* Do not resort to unsigned multiplication because
7559 we lose the no-overflow property of the expression. */
7560 return NULL_TREE;
7563 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7564 specified by EXPR into the buffer PTR of length LEN bytes.
7565 Return the number of bytes placed in the buffer, or zero
7566 upon failure. */
7568 static int
7569 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7571 tree type = TREE_TYPE (expr);
7572 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7573 int byte, offset, word, words;
7574 unsigned char value;
7576 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7577 return 0;
7578 if (off == -1)
7579 off = 0;
7581 if (ptr == NULL)
7582 /* Dry run. */
7583 return MIN (len, total_bytes - off);
7585 words = total_bytes / UNITS_PER_WORD;
7587 for (byte = 0; byte < total_bytes; byte++)
7589 int bitpos = byte * BITS_PER_UNIT;
7590 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7591 number of bytes. */
7592 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7594 if (total_bytes > UNITS_PER_WORD)
7596 word = byte / UNITS_PER_WORD;
7597 if (WORDS_BIG_ENDIAN)
7598 word = (words - 1) - word;
7599 offset = word * UNITS_PER_WORD;
7600 if (BYTES_BIG_ENDIAN)
7601 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7602 else
7603 offset += byte % UNITS_PER_WORD;
7605 else
7606 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7607 if (offset >= off && offset - off < len)
7608 ptr[offset - off] = value;
7610 return MIN (len, total_bytes - off);
7614 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7615 specified by EXPR into the buffer PTR of length LEN bytes.
7616 Return the number of bytes placed in the buffer, or zero
7617 upon failure. */
7619 static int
7620 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7622 tree type = TREE_TYPE (expr);
7623 scalar_mode mode = SCALAR_TYPE_MODE (type);
7624 int total_bytes = GET_MODE_SIZE (mode);
7625 FIXED_VALUE_TYPE value;
7626 tree i_value, i_type;
7628 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7629 return 0;
7631 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7633 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7634 return 0;
7636 value = TREE_FIXED_CST (expr);
7637 i_value = double_int_to_tree (i_type, value.data);
7639 return native_encode_int (i_value, ptr, len, off);
7643 /* Subroutine of native_encode_expr. Encode the REAL_CST
7644 specified by EXPR into the buffer PTR of length LEN bytes.
7645 Return the number of bytes placed in the buffer, or zero
7646 upon failure. */
7648 static int
7649 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7651 tree type = TREE_TYPE (expr);
7652 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7653 int byte, offset, word, words, bitpos;
7654 unsigned char value;
7656 /* There are always 32 bits in each long, no matter the size of
7657 the hosts long. We handle floating point representations with
7658 up to 192 bits. */
7659 long tmp[6];
7661 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7662 return 0;
7663 if (off == -1)
7664 off = 0;
7666 if (ptr == NULL)
7667 /* Dry run. */
7668 return MIN (len, total_bytes - off);
7670 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7672 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7674 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7675 bitpos += BITS_PER_UNIT)
7677 byte = (bitpos / BITS_PER_UNIT) & 3;
7678 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7680 if (UNITS_PER_WORD < 4)
7682 word = byte / UNITS_PER_WORD;
7683 if (WORDS_BIG_ENDIAN)
7684 word = (words - 1) - word;
7685 offset = word * UNITS_PER_WORD;
7686 if (BYTES_BIG_ENDIAN)
7687 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7688 else
7689 offset += byte % UNITS_PER_WORD;
7691 else
7693 offset = byte;
7694 if (BYTES_BIG_ENDIAN)
7696 /* Reverse bytes within each long, or within the entire float
7697 if it's smaller than a long (for HFmode). */
7698 offset = MIN (3, total_bytes - 1) - offset;
7699 gcc_assert (offset >= 0);
7702 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7703 if (offset >= off
7704 && offset - off < len)
7705 ptr[offset - off] = value;
7707 return MIN (len, total_bytes - off);
7710 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7711 specified by EXPR into the buffer PTR of length LEN bytes.
7712 Return the number of bytes placed in the buffer, or zero
7713 upon failure. */
7715 static int
7716 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7718 int rsize, isize;
7719 tree part;
7721 part = TREE_REALPART (expr);
7722 rsize = native_encode_expr (part, ptr, len, off);
7723 if (off == -1 && rsize == 0)
7724 return 0;
7725 part = TREE_IMAGPART (expr);
7726 if (off != -1)
7727 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7728 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7729 len - rsize, off);
7730 if (off == -1 && isize != rsize)
7731 return 0;
7732 return rsize + isize;
7735 /* Like native_encode_vector, but only encode the first COUNT elements.
7736 The other arguments are as for native_encode_vector. */
7738 static int
7739 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7740 int off, unsigned HOST_WIDE_INT count)
7742 tree itype = TREE_TYPE (TREE_TYPE (expr));
7743 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7744 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7746 /* This is the only case in which elements can be smaller than a byte.
7747 Element 0 is always in the lsb of the containing byte. */
7748 unsigned int elt_bits = TYPE_PRECISION (itype);
7749 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7750 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7751 return 0;
7753 if (off == -1)
7754 off = 0;
7756 /* Zero the buffer and then set bits later where necessary. */
7757 int extract_bytes = MIN (len, total_bytes - off);
7758 if (ptr)
7759 memset (ptr, 0, extract_bytes);
7761 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7762 unsigned int first_elt = off * elts_per_byte;
7763 unsigned int extract_elts = extract_bytes * elts_per_byte;
7764 for (unsigned int i = 0; i < extract_elts; ++i)
7766 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7767 if (TREE_CODE (elt) != INTEGER_CST)
7768 return 0;
7770 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7772 unsigned int bit = i * elt_bits;
7773 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7776 return extract_bytes;
7779 int offset = 0;
7780 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7781 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7783 if (off >= size)
7785 off -= size;
7786 continue;
7788 tree elem = VECTOR_CST_ELT (expr, i);
7789 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7790 len - offset, off);
7791 if ((off == -1 && res != size) || res == 0)
7792 return 0;
7793 offset += res;
7794 if (offset >= len)
7795 return (off == -1 && i < count - 1) ? 0 : offset;
7796 if (off != -1)
7797 off = 0;
7799 return offset;
7802 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7803 specified by EXPR into the buffer PTR of length LEN bytes.
7804 Return the number of bytes placed in the buffer, or zero
7805 upon failure. */
7807 static int
7808 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7810 unsigned HOST_WIDE_INT count;
7811 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7812 return 0;
7813 return native_encode_vector_part (expr, ptr, len, off, count);
7817 /* Subroutine of native_encode_expr. Encode the STRING_CST
7818 specified by EXPR into the buffer PTR of length LEN bytes.
7819 Return the number of bytes placed in the buffer, or zero
7820 upon failure. */
7822 static int
7823 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7825 tree type = TREE_TYPE (expr);
7827 /* Wide-char strings are encoded in target byte-order so native
7828 encoding them is trivial. */
7829 if (BITS_PER_UNIT != CHAR_BIT
7830 || TREE_CODE (type) != ARRAY_TYPE
7831 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7832 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7833 return 0;
7835 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7836 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7837 return 0;
7838 if (off == -1)
7839 off = 0;
7840 len = MIN (total_bytes - off, len);
7841 if (ptr == NULL)
7842 /* Dry run. */;
7843 else
7845 int written = 0;
7846 if (off < TREE_STRING_LENGTH (expr))
7848 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7849 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7851 memset (ptr + written, 0, len - written);
7853 return len;
7857 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7858 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7859 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7860 anything, just do a dry run. If OFF is not -1 then start
7861 the encoding at byte offset OFF and encode at most LEN bytes.
7862 Return the number of bytes placed in the buffer, or zero upon failure. */
7865 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7867 /* We don't support starting at negative offset and -1 is special. */
7868 if (off < -1)
7869 return 0;
7871 switch (TREE_CODE (expr))
7873 case INTEGER_CST:
7874 return native_encode_int (expr, ptr, len, off);
7876 case REAL_CST:
7877 return native_encode_real (expr, ptr, len, off);
7879 case FIXED_CST:
7880 return native_encode_fixed (expr, ptr, len, off);
7882 case COMPLEX_CST:
7883 return native_encode_complex (expr, ptr, len, off);
7885 case VECTOR_CST:
7886 return native_encode_vector (expr, ptr, len, off);
7888 case STRING_CST:
7889 return native_encode_string (expr, ptr, len, off);
7891 default:
7892 return 0;
7896 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7897 NON_LVALUE_EXPRs and nops. */
7900 native_encode_initializer (tree init, unsigned char *ptr, int len,
7901 int off)
7903 /* We don't support starting at negative offset and -1 is special. */
7904 if (off < -1 || init == NULL_TREE)
7905 return 0;
7907 STRIP_NOPS (init);
7908 switch (TREE_CODE (init))
7910 case VIEW_CONVERT_EXPR:
7911 case NON_LVALUE_EXPR:
7912 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
7913 default:
7914 return native_encode_expr (init, ptr, len, off);
7915 case CONSTRUCTOR:
7916 tree type = TREE_TYPE (init);
7917 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7918 if (total_bytes < 0)
7919 return 0;
7920 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7921 return 0;
7922 int o = off == -1 ? 0 : off;
7923 if (TREE_CODE (type) == ARRAY_TYPE)
7925 HOST_WIDE_INT min_index;
7926 unsigned HOST_WIDE_INT cnt;
7927 HOST_WIDE_INT curpos = 0, fieldsize;
7928 constructor_elt *ce;
7930 if (TYPE_DOMAIN (type) == NULL_TREE
7931 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
7932 return 0;
7934 fieldsize = int_size_in_bytes (TREE_TYPE (type));
7935 if (fieldsize <= 0)
7936 return 0;
7938 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
7939 if (ptr != NULL)
7940 memset (ptr, '\0', MIN (total_bytes - off, len));
7942 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
7944 tree val = ce->value;
7945 tree index = ce->index;
7946 HOST_WIDE_INT pos = curpos, count = 0;
7947 bool full = false;
7948 if (index && TREE_CODE (index) == RANGE_EXPR)
7950 if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
7951 || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
7952 return 0;
7953 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
7954 * fieldsize;
7955 count = (tree_to_shwi (TREE_OPERAND (index, 1))
7956 - tree_to_shwi (TREE_OPERAND (index, 0)));
7958 else if (index)
7960 if (!tree_fits_shwi_p (index))
7961 return 0;
7962 pos = (tree_to_shwi (index) - min_index) * fieldsize;
7965 curpos = pos;
7966 if (val)
7969 if (off == -1
7970 || (curpos >= off
7971 && (curpos + fieldsize
7972 <= (HOST_WIDE_INT) off + len)))
7974 if (full)
7976 if (ptr)
7977 memcpy (ptr + (curpos - o), ptr + (pos - o),
7978 fieldsize);
7980 else if (!native_encode_initializer (val,
7982 ? ptr + curpos - o
7983 : NULL,
7984 fieldsize,
7985 off == -1 ? -1
7986 : 0))
7987 return 0;
7988 else
7990 full = true;
7991 pos = curpos;
7994 else if (curpos + fieldsize > off
7995 && curpos < (HOST_WIDE_INT) off + len)
7997 /* Partial overlap. */
7998 unsigned char *p = NULL;
7999 int no = 0;
8000 int l;
8001 if (curpos >= off)
8003 if (ptr)
8004 p = ptr + curpos - off;
8005 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8006 fieldsize);
8008 else
8010 p = ptr;
8011 no = off - curpos;
8012 l = len;
8014 if (!native_encode_initializer (val, p, l, no))
8015 return 0;
8017 curpos += fieldsize;
8019 while (count-- != 0);
8021 return MIN (total_bytes - off, len);
8023 else if (TREE_CODE (type) == RECORD_TYPE
8024 || TREE_CODE (type) == UNION_TYPE)
8026 unsigned HOST_WIDE_INT cnt;
8027 constructor_elt *ce;
8029 if (ptr != NULL)
8030 memset (ptr, '\0', MIN (total_bytes - off, len));
8031 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8033 tree field = ce->index;
8034 tree val = ce->value;
8035 HOST_WIDE_INT pos, fieldsize;
8037 if (field == NULL_TREE)
8038 return 0;
8040 pos = int_byte_position (field);
8041 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8042 continue;
8044 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8045 && TYPE_DOMAIN (TREE_TYPE (field))
8046 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8047 return 0;
8048 if (DECL_SIZE_UNIT (field) == NULL_TREE
8049 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8050 return 0;
8051 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8052 if (fieldsize == 0)
8053 continue;
8055 if (off != -1 && pos + fieldsize <= off)
8056 continue;
8058 if (DECL_BIT_FIELD (field))
8059 return 0;
8061 if (val == NULL_TREE)
8062 continue;
8064 if (off == -1
8065 || (pos >= off
8066 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8068 if (!native_encode_initializer (val, ptr ? ptr + pos - o
8069 : NULL,
8070 fieldsize,
8071 off == -1 ? -1 : 0))
8072 return 0;
8074 else
8076 /* Partial overlap. */
8077 unsigned char *p = NULL;
8078 int no = 0;
8079 int l;
8080 if (pos >= off)
8082 if (ptr)
8083 p = ptr + pos - off;
8084 l = MIN ((HOST_WIDE_INT) off + len - pos,
8085 fieldsize);
8087 else
8089 p = ptr;
8090 no = off - pos;
8091 l = len;
8093 if (!native_encode_initializer (val, p, l, no))
8094 return 0;
8097 return MIN (total_bytes - off, len);
8099 return 0;
8104 /* Subroutine of native_interpret_expr. Interpret the contents of
8105 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8106 If the buffer cannot be interpreted, return NULL_TREE. */
8108 static tree
8109 native_interpret_int (tree type, const unsigned char *ptr, int len)
8111 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8113 if (total_bytes > len
8114 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8115 return NULL_TREE;
8117 wide_int result = wi::from_buffer (ptr, total_bytes);
8119 return wide_int_to_tree (type, result);
8123 /* Subroutine of native_interpret_expr. Interpret the contents of
8124 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8125 If the buffer cannot be interpreted, return NULL_TREE. */
8127 static tree
8128 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8130 scalar_mode mode = SCALAR_TYPE_MODE (type);
8131 int total_bytes = GET_MODE_SIZE (mode);
8132 double_int result;
8133 FIXED_VALUE_TYPE fixed_value;
8135 if (total_bytes > len
8136 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8137 return NULL_TREE;
8139 result = double_int::from_buffer (ptr, total_bytes);
8140 fixed_value = fixed_from_double_int (result, mode);
8142 return build_fixed (type, fixed_value);
8146 /* Subroutine of native_interpret_expr. Interpret the contents of
8147 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8148 If the buffer cannot be interpreted, return NULL_TREE. */
8150 static tree
8151 native_interpret_real (tree type, const unsigned char *ptr, int len)
8153 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8154 int total_bytes = GET_MODE_SIZE (mode);
8155 unsigned char value;
8156 /* There are always 32 bits in each long, no matter the size of
8157 the hosts long. We handle floating point representations with
8158 up to 192 bits. */
8159 REAL_VALUE_TYPE r;
8160 long tmp[6];
8162 if (total_bytes > len || total_bytes > 24)
8163 return NULL_TREE;
8164 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8166 memset (tmp, 0, sizeof (tmp));
8167 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8168 bitpos += BITS_PER_UNIT)
8170 /* Both OFFSET and BYTE index within a long;
8171 bitpos indexes the whole float. */
8172 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8173 if (UNITS_PER_WORD < 4)
8175 int word = byte / UNITS_PER_WORD;
8176 if (WORDS_BIG_ENDIAN)
8177 word = (words - 1) - word;
8178 offset = word * UNITS_PER_WORD;
8179 if (BYTES_BIG_ENDIAN)
8180 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8181 else
8182 offset += byte % UNITS_PER_WORD;
8184 else
8186 offset = byte;
8187 if (BYTES_BIG_ENDIAN)
8189 /* Reverse bytes within each long, or within the entire float
8190 if it's smaller than a long (for HFmode). */
8191 offset = MIN (3, total_bytes - 1) - offset;
8192 gcc_assert (offset >= 0);
8195 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8197 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8200 real_from_target (&r, tmp, mode);
8201 return build_real (type, r);
8205 /* Subroutine of native_interpret_expr. Interpret the contents of
8206 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8207 If the buffer cannot be interpreted, return NULL_TREE. */
8209 static tree
8210 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8212 tree etype, rpart, ipart;
8213 int size;
8215 etype = TREE_TYPE (type);
8216 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8217 if (size * 2 > len)
8218 return NULL_TREE;
8219 rpart = native_interpret_expr (etype, ptr, size);
8220 if (!rpart)
8221 return NULL_TREE;
8222 ipart = native_interpret_expr (etype, ptr+size, size);
8223 if (!ipart)
8224 return NULL_TREE;
8225 return build_complex (type, rpart, ipart);
8228 /* Read a vector of type TYPE from the target memory image given by BYTES,
8229 which contains LEN bytes. The vector is known to be encodable using
8230 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8232 Return the vector on success, otherwise return null. */
8234 static tree
8235 native_interpret_vector_part (tree type, const unsigned char *bytes,
8236 unsigned int len, unsigned int npatterns,
8237 unsigned int nelts_per_pattern)
8239 tree elt_type = TREE_TYPE (type);
8240 if (VECTOR_BOOLEAN_TYPE_P (type)
8241 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8243 /* This is the only case in which elements can be smaller than a byte.
8244 Element 0 is always in the lsb of the containing byte. */
8245 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8246 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8247 return NULL_TREE;
8249 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8250 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8252 unsigned int bit_index = i * elt_bits;
8253 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8254 unsigned int lsb = bit_index % BITS_PER_UNIT;
8255 builder.quick_push (bytes[byte_index] & (1 << lsb)
8256 ? build_all_ones_cst (elt_type)
8257 : build_zero_cst (elt_type));
8259 return builder.build ();
8262 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8263 if (elt_bytes * npatterns * nelts_per_pattern > len)
8264 return NULL_TREE;
8266 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8267 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8269 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8270 if (!elt)
8271 return NULL_TREE;
8272 builder.quick_push (elt);
8273 bytes += elt_bytes;
8275 return builder.build ();
8278 /* Subroutine of native_interpret_expr. Interpret the contents of
8279 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8280 If the buffer cannot be interpreted, return NULL_TREE. */
8282 static tree
8283 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8285 tree etype;
8286 unsigned int size;
8287 unsigned HOST_WIDE_INT count;
8289 etype = TREE_TYPE (type);
8290 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8291 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8292 || size * count > len)
8293 return NULL_TREE;
8295 return native_interpret_vector_part (type, ptr, len, count, 1);
8299 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8300 the buffer PTR of length LEN as a constant of type TYPE. For
8301 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8302 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8303 return NULL_TREE. */
8305 tree
8306 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8308 switch (TREE_CODE (type))
8310 case INTEGER_TYPE:
8311 case ENUMERAL_TYPE:
8312 case BOOLEAN_TYPE:
8313 case POINTER_TYPE:
8314 case REFERENCE_TYPE:
8315 return native_interpret_int (type, ptr, len);
8317 case REAL_TYPE:
8318 return native_interpret_real (type, ptr, len);
8320 case FIXED_POINT_TYPE:
8321 return native_interpret_fixed (type, ptr, len);
8323 case COMPLEX_TYPE:
8324 return native_interpret_complex (type, ptr, len);
8326 case VECTOR_TYPE:
8327 return native_interpret_vector (type, ptr, len);
8329 default:
8330 return NULL_TREE;
8334 /* Returns true if we can interpret the contents of a native encoding
8335 as TYPE. */
8337 bool
8338 can_native_interpret_type_p (tree type)
8340 switch (TREE_CODE (type))
8342 case INTEGER_TYPE:
8343 case ENUMERAL_TYPE:
8344 case BOOLEAN_TYPE:
8345 case POINTER_TYPE:
8346 case REFERENCE_TYPE:
8347 case FIXED_POINT_TYPE:
8348 case REAL_TYPE:
8349 case COMPLEX_TYPE:
8350 case VECTOR_TYPE:
8351 return true;
8352 default:
8353 return false;
8357 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8358 or extracted constant positions and/or sizes aren't byte aligned. */
8360 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8361 bits between adjacent elements. AMNT should be within
8362 [0, BITS_PER_UNIT).
8363 Example, AMNT = 2:
8364 00011111|11100000 << 2 = 01111111|10000000
8365 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8367 void
8368 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8369 unsigned int amnt)
8371 if (amnt == 0)
8372 return;
8374 unsigned char carry_over = 0U;
8375 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8376 unsigned char clear_mask = (~0U) << amnt;
8378 for (unsigned int i = 0; i < sz; i++)
8380 unsigned prev_carry_over = carry_over;
8381 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8383 ptr[i] <<= amnt;
8384 if (i != 0)
8386 ptr[i] &= clear_mask;
8387 ptr[i] |= prev_carry_over;
8392 /* Like shift_bytes_in_array_left but for big-endian.
8393 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8394 bits between adjacent elements. AMNT should be within
8395 [0, BITS_PER_UNIT).
8396 Example, AMNT = 2:
8397 00011111|11100000 >> 2 = 00000111|11111000
8398 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8400 void
8401 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8402 unsigned int amnt)
8404 if (amnt == 0)
8405 return;
8407 unsigned char carry_over = 0U;
8408 unsigned char carry_mask = ~(~0U << amnt);
8410 for (unsigned int i = 0; i < sz; i++)
8412 unsigned prev_carry_over = carry_over;
8413 carry_over = ptr[i] & carry_mask;
8415 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8416 ptr[i] >>= amnt;
8417 ptr[i] |= prev_carry_over;
8421 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8422 directly on the VECTOR_CST encoding, in a way that works for variable-
8423 length vectors. Return the resulting VECTOR_CST on success or null
8424 on failure. */
8426 static tree
8427 fold_view_convert_vector_encoding (tree type, tree expr)
8429 tree expr_type = TREE_TYPE (expr);
8430 poly_uint64 type_bits, expr_bits;
8431 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8432 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8433 return NULL_TREE;
8435 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8436 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8437 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8438 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8440 /* We can only preserve the semantics of a stepped pattern if the new
8441 vector element is an integer of the same size. */
8442 if (VECTOR_CST_STEPPED_P (expr)
8443 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8444 return NULL_TREE;
8446 /* The number of bits needed to encode one element from every pattern
8447 of the original vector. */
8448 unsigned int expr_sequence_bits
8449 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8451 /* The number of bits needed to encode one element from every pattern
8452 of the result. */
8453 unsigned int type_sequence_bits
8454 = least_common_multiple (expr_sequence_bits, type_elt_bits);
8456 /* Don't try to read more bytes than are available, which can happen
8457 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8458 The general VIEW_CONVERT handling can cope with that case, so there's
8459 no point complicating things here. */
8460 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8461 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8462 BITS_PER_UNIT);
8463 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8464 if (known_gt (buffer_bits, expr_bits))
8465 return NULL_TREE;
8467 /* Get enough bytes of EXPR to form the new encoding. */
8468 auto_vec<unsigned char, 128> buffer (buffer_bytes);
8469 buffer.quick_grow (buffer_bytes);
8470 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8471 buffer_bits / expr_elt_bits)
8472 != (int) buffer_bytes)
8473 return NULL_TREE;
8475 /* Reencode the bytes as TYPE. */
8476 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8477 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8478 type_npatterns, nelts_per_pattern);
8481 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8482 TYPE at compile-time. If we're unable to perform the conversion
8483 return NULL_TREE. */
8485 static tree
8486 fold_view_convert_expr (tree type, tree expr)
8488 /* We support up to 512-bit values (for V8DFmode). */
8489 unsigned char buffer[64];
8490 int len;
8492 /* Check that the host and target are sane. */
8493 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8494 return NULL_TREE;
8496 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8497 if (tree res = fold_view_convert_vector_encoding (type, expr))
8498 return res;
8500 len = native_encode_expr (expr, buffer, sizeof (buffer));
8501 if (len == 0)
8502 return NULL_TREE;
8504 return native_interpret_expr (type, buffer, len);
8507 /* Build an expression for the address of T. Folds away INDIRECT_REF
8508 to avoid confusing the gimplify process. */
8510 tree
8511 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8513 /* The size of the object is not relevant when talking about its address. */
8514 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8515 t = TREE_OPERAND (t, 0);
8517 if (TREE_CODE (t) == INDIRECT_REF)
8519 t = TREE_OPERAND (t, 0);
8521 if (TREE_TYPE (t) != ptrtype)
8522 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8524 else if (TREE_CODE (t) == MEM_REF
8525 && integer_zerop (TREE_OPERAND (t, 1)))
8526 return TREE_OPERAND (t, 0);
8527 else if (TREE_CODE (t) == MEM_REF
8528 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8529 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8530 TREE_OPERAND (t, 0),
8531 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8532 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8534 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8536 if (TREE_TYPE (t) != ptrtype)
8537 t = fold_convert_loc (loc, ptrtype, t);
8539 else
8540 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8542 return t;
8545 /* Build an expression for the address of T. */
8547 tree
8548 build_fold_addr_expr_loc (location_t loc, tree t)
8550 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8552 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8555 /* Fold a unary expression of code CODE and type TYPE with operand
8556 OP0. Return the folded expression if folding is successful.
8557 Otherwise, return NULL_TREE. */
8559 tree
8560 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8562 tree tem;
8563 tree arg0;
8564 enum tree_code_class kind = TREE_CODE_CLASS (code);
8566 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8567 && TREE_CODE_LENGTH (code) == 1);
8569 arg0 = op0;
8570 if (arg0)
8572 if (CONVERT_EXPR_CODE_P (code)
8573 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8575 /* Don't use STRIP_NOPS, because signedness of argument type
8576 matters. */
8577 STRIP_SIGN_NOPS (arg0);
8579 else
8581 /* Strip any conversions that don't change the mode. This
8582 is safe for every expression, except for a comparison
8583 expression because its signedness is derived from its
8584 operands.
8586 Note that this is done as an internal manipulation within
8587 the constant folder, in order to find the simplest
8588 representation of the arguments so that their form can be
8589 studied. In any cases, the appropriate type conversions
8590 should be put back in the tree that will get out of the
8591 constant folder. */
8592 STRIP_NOPS (arg0);
8595 if (CONSTANT_CLASS_P (arg0))
8597 tree tem = const_unop (code, type, arg0);
8598 if (tem)
8600 if (TREE_TYPE (tem) != type)
8601 tem = fold_convert_loc (loc, type, tem);
8602 return tem;
8607 tem = generic_simplify (loc, code, type, op0);
8608 if (tem)
8609 return tem;
8611 if (TREE_CODE_CLASS (code) == tcc_unary)
8613 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8614 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8615 fold_build1_loc (loc, code, type,
8616 fold_convert_loc (loc, TREE_TYPE (op0),
8617 TREE_OPERAND (arg0, 1))));
8618 else if (TREE_CODE (arg0) == COND_EXPR)
8620 tree arg01 = TREE_OPERAND (arg0, 1);
8621 tree arg02 = TREE_OPERAND (arg0, 2);
8622 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8623 arg01 = fold_build1_loc (loc, code, type,
8624 fold_convert_loc (loc,
8625 TREE_TYPE (op0), arg01));
8626 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8627 arg02 = fold_build1_loc (loc, code, type,
8628 fold_convert_loc (loc,
8629 TREE_TYPE (op0), arg02));
8630 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8631 arg01, arg02);
8633 /* If this was a conversion, and all we did was to move into
8634 inside the COND_EXPR, bring it back out. But leave it if
8635 it is a conversion from integer to integer and the
8636 result precision is no wider than a word since such a
8637 conversion is cheap and may be optimized away by combine,
8638 while it couldn't if it were outside the COND_EXPR. Then return
8639 so we don't get into an infinite recursion loop taking the
8640 conversion out and then back in. */
8642 if ((CONVERT_EXPR_CODE_P (code)
8643 || code == NON_LVALUE_EXPR)
8644 && TREE_CODE (tem) == COND_EXPR
8645 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8646 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8647 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8648 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8649 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8650 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8651 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8652 && (INTEGRAL_TYPE_P
8653 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8654 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8655 || flag_syntax_only))
8656 tem = build1_loc (loc, code, type,
8657 build3 (COND_EXPR,
8658 TREE_TYPE (TREE_OPERAND
8659 (TREE_OPERAND (tem, 1), 0)),
8660 TREE_OPERAND (tem, 0),
8661 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8662 TREE_OPERAND (TREE_OPERAND (tem, 2),
8663 0)));
8664 return tem;
8668 switch (code)
8670 case NON_LVALUE_EXPR:
8671 if (!maybe_lvalue_p (op0))
8672 return fold_convert_loc (loc, type, op0);
8673 return NULL_TREE;
8675 CASE_CONVERT:
8676 case FLOAT_EXPR:
8677 case FIX_TRUNC_EXPR:
8678 if (COMPARISON_CLASS_P (op0))
8680 /* If we have (type) (a CMP b) and type is an integral type, return
8681 new expression involving the new type. Canonicalize
8682 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8683 non-integral type.
8684 Do not fold the result as that would not simplify further, also
8685 folding again results in recursions. */
8686 if (TREE_CODE (type) == BOOLEAN_TYPE)
8687 return build2_loc (loc, TREE_CODE (op0), type,
8688 TREE_OPERAND (op0, 0),
8689 TREE_OPERAND (op0, 1));
8690 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8691 && TREE_CODE (type) != VECTOR_TYPE)
8692 return build3_loc (loc, COND_EXPR, type, op0,
8693 constant_boolean_node (true, type),
8694 constant_boolean_node (false, type));
8697 /* Handle (T *)&A.B.C for A being of type T and B and C
8698 living at offset zero. This occurs frequently in
8699 C++ upcasting and then accessing the base. */
8700 if (TREE_CODE (op0) == ADDR_EXPR
8701 && POINTER_TYPE_P (type)
8702 && handled_component_p (TREE_OPERAND (op0, 0)))
8704 poly_int64 bitsize, bitpos;
8705 tree offset;
8706 machine_mode mode;
8707 int unsignedp, reversep, volatilep;
8708 tree base
8709 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8710 &offset, &mode, &unsignedp, &reversep,
8711 &volatilep);
8712 /* If the reference was to a (constant) zero offset, we can use
8713 the address of the base if it has the same base type
8714 as the result type and the pointer type is unqualified. */
8715 if (!offset
8716 && known_eq (bitpos, 0)
8717 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8718 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8719 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8720 return fold_convert_loc (loc, type,
8721 build_fold_addr_expr_loc (loc, base));
8724 if (TREE_CODE (op0) == MODIFY_EXPR
8725 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8726 /* Detect assigning a bitfield. */
8727 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8728 && DECL_BIT_FIELD
8729 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8731 /* Don't leave an assignment inside a conversion
8732 unless assigning a bitfield. */
8733 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8734 /* First do the assignment, then return converted constant. */
8735 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8736 TREE_NO_WARNING (tem) = 1;
8737 TREE_USED (tem) = 1;
8738 return tem;
8741 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8742 constants (if x has signed type, the sign bit cannot be set
8743 in c). This folds extension into the BIT_AND_EXPR.
8744 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8745 very likely don't have maximal range for their precision and this
8746 transformation effectively doesn't preserve non-maximal ranges. */
8747 if (TREE_CODE (type) == INTEGER_TYPE
8748 && TREE_CODE (op0) == BIT_AND_EXPR
8749 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8751 tree and_expr = op0;
8752 tree and0 = TREE_OPERAND (and_expr, 0);
8753 tree and1 = TREE_OPERAND (and_expr, 1);
8754 int change = 0;
8756 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8757 || (TYPE_PRECISION (type)
8758 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8759 change = 1;
8760 else if (TYPE_PRECISION (TREE_TYPE (and1))
8761 <= HOST_BITS_PER_WIDE_INT
8762 && tree_fits_uhwi_p (and1))
8764 unsigned HOST_WIDE_INT cst;
8766 cst = tree_to_uhwi (and1);
8767 cst &= HOST_WIDE_INT_M1U
8768 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8769 change = (cst == 0);
8770 if (change
8771 && !flag_syntax_only
8772 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8773 == ZERO_EXTEND))
8775 tree uns = unsigned_type_for (TREE_TYPE (and0));
8776 and0 = fold_convert_loc (loc, uns, and0);
8777 and1 = fold_convert_loc (loc, uns, and1);
8780 if (change)
8782 tem = force_fit_type (type, wi::to_widest (and1), 0,
8783 TREE_OVERFLOW (and1));
8784 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8785 fold_convert_loc (loc, type, and0), tem);
8789 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8790 cast (T1)X will fold away. We assume that this happens when X itself
8791 is a cast. */
8792 if (POINTER_TYPE_P (type)
8793 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8794 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8796 tree arg00 = TREE_OPERAND (arg0, 0);
8797 tree arg01 = TREE_OPERAND (arg0, 1);
8799 return fold_build_pointer_plus_loc
8800 (loc, fold_convert_loc (loc, type, arg00), arg01);
8803 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8804 of the same precision, and X is an integer type not narrower than
8805 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8806 if (INTEGRAL_TYPE_P (type)
8807 && TREE_CODE (op0) == BIT_NOT_EXPR
8808 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8809 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8810 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8812 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8813 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8814 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8815 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8816 fold_convert_loc (loc, type, tem));
8819 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8820 type of X and Y (integer types only). */
8821 if (INTEGRAL_TYPE_P (type)
8822 && TREE_CODE (op0) == MULT_EXPR
8823 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8824 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8826 /* Be careful not to introduce new overflows. */
8827 tree mult_type;
8828 if (TYPE_OVERFLOW_WRAPS (type))
8829 mult_type = type;
8830 else
8831 mult_type = unsigned_type_for (type);
8833 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8835 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8836 fold_convert_loc (loc, mult_type,
8837 TREE_OPERAND (op0, 0)),
8838 fold_convert_loc (loc, mult_type,
8839 TREE_OPERAND (op0, 1)));
8840 return fold_convert_loc (loc, type, tem);
8844 return NULL_TREE;
8846 case VIEW_CONVERT_EXPR:
8847 if (TREE_CODE (op0) == MEM_REF)
8849 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8850 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8851 tem = fold_build2_loc (loc, MEM_REF, type,
8852 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8853 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8854 return tem;
8857 return NULL_TREE;
8859 case NEGATE_EXPR:
8860 tem = fold_negate_expr (loc, arg0);
8861 if (tem)
8862 return fold_convert_loc (loc, type, tem);
8863 return NULL_TREE;
8865 case ABS_EXPR:
8866 /* Convert fabs((double)float) into (double)fabsf(float). */
8867 if (TREE_CODE (arg0) == NOP_EXPR
8868 && TREE_CODE (type) == REAL_TYPE)
8870 tree targ0 = strip_float_extensions (arg0);
8871 if (targ0 != arg0)
8872 return fold_convert_loc (loc, type,
8873 fold_build1_loc (loc, ABS_EXPR,
8874 TREE_TYPE (targ0),
8875 targ0));
8877 return NULL_TREE;
8879 case BIT_NOT_EXPR:
8880 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8881 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8882 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8883 fold_convert_loc (loc, type,
8884 TREE_OPERAND (arg0, 0)))))
8885 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8886 fold_convert_loc (loc, type,
8887 TREE_OPERAND (arg0, 1)));
8888 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8889 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8890 fold_convert_loc (loc, type,
8891 TREE_OPERAND (arg0, 1)))))
8892 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8893 fold_convert_loc (loc, type,
8894 TREE_OPERAND (arg0, 0)), tem);
8896 return NULL_TREE;
8898 case TRUTH_NOT_EXPR:
8899 /* Note that the operand of this must be an int
8900 and its values must be 0 or 1.
8901 ("true" is a fixed value perhaps depending on the language,
8902 but we don't handle values other than 1 correctly yet.) */
8903 tem = fold_truth_not_expr (loc, arg0);
8904 if (!tem)
8905 return NULL_TREE;
8906 return fold_convert_loc (loc, type, tem);
8908 case INDIRECT_REF:
8909 /* Fold *&X to X if X is an lvalue. */
8910 if (TREE_CODE (op0) == ADDR_EXPR)
8912 tree op00 = TREE_OPERAND (op0, 0);
8913 if ((VAR_P (op00)
8914 || TREE_CODE (op00) == PARM_DECL
8915 || TREE_CODE (op00) == RESULT_DECL)
8916 && !TREE_READONLY (op00))
8917 return op00;
8919 return NULL_TREE;
8921 default:
8922 return NULL_TREE;
8923 } /* switch (code) */
8927 /* If the operation was a conversion do _not_ mark a resulting constant
8928 with TREE_OVERFLOW if the original constant was not. These conversions
8929 have implementation defined behavior and retaining the TREE_OVERFLOW
8930 flag here would confuse later passes such as VRP. */
8931 tree
8932 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8933 tree type, tree op0)
8935 tree res = fold_unary_loc (loc, code, type, op0);
8936 if (res
8937 && TREE_CODE (res) == INTEGER_CST
8938 && TREE_CODE (op0) == INTEGER_CST
8939 && CONVERT_EXPR_CODE_P (code))
8940 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8942 return res;
8945 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8946 operands OP0 and OP1. LOC is the location of the resulting expression.
8947 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8948 Return the folded expression if folding is successful. Otherwise,
8949 return NULL_TREE. */
8950 static tree
8951 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8952 tree arg0, tree arg1, tree op0, tree op1)
8954 tree tem;
8956 /* We only do these simplifications if we are optimizing. */
8957 if (!optimize)
8958 return NULL_TREE;
8960 /* Check for things like (A || B) && (A || C). We can convert this
8961 to A || (B && C). Note that either operator can be any of the four
8962 truth and/or operations and the transformation will still be
8963 valid. Also note that we only care about order for the
8964 ANDIF and ORIF operators. If B contains side effects, this
8965 might change the truth-value of A. */
8966 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8967 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8968 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8969 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8970 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8971 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8973 tree a00 = TREE_OPERAND (arg0, 0);
8974 tree a01 = TREE_OPERAND (arg0, 1);
8975 tree a10 = TREE_OPERAND (arg1, 0);
8976 tree a11 = TREE_OPERAND (arg1, 1);
8977 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8978 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8979 && (code == TRUTH_AND_EXPR
8980 || code == TRUTH_OR_EXPR));
8982 if (operand_equal_p (a00, a10, 0))
8983 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8984 fold_build2_loc (loc, code, type, a01, a11));
8985 else if (commutative && operand_equal_p (a00, a11, 0))
8986 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8987 fold_build2_loc (loc, code, type, a01, a10));
8988 else if (commutative && operand_equal_p (a01, a10, 0))
8989 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8990 fold_build2_loc (loc, code, type, a00, a11));
8992 /* This case if tricky because we must either have commutative
8993 operators or else A10 must not have side-effects. */
8995 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8996 && operand_equal_p (a01, a11, 0))
8997 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8998 fold_build2_loc (loc, code, type, a00, a10),
8999 a01);
9002 /* See if we can build a range comparison. */
9003 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9004 return tem;
9006 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9007 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9009 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9010 if (tem)
9011 return fold_build2_loc (loc, code, type, tem, arg1);
9014 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9015 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9017 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9018 if (tem)
9019 return fold_build2_loc (loc, code, type, arg0, tem);
9022 /* Check for the possibility of merging component references. If our
9023 lhs is another similar operation, try to merge its rhs with our
9024 rhs. Then try to merge our lhs and rhs. */
9025 if (TREE_CODE (arg0) == code
9026 && (tem = fold_truth_andor_1 (loc, code, type,
9027 TREE_OPERAND (arg0, 1), arg1)) != 0)
9028 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9030 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9031 return tem;
9033 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9034 if (param_logical_op_non_short_circuit != -1)
9035 logical_op_non_short_circuit
9036 = param_logical_op_non_short_circuit;
9037 if (logical_op_non_short_circuit
9038 && !flag_sanitize_coverage
9039 && (code == TRUTH_AND_EXPR
9040 || code == TRUTH_ANDIF_EXPR
9041 || code == TRUTH_OR_EXPR
9042 || code == TRUTH_ORIF_EXPR))
9044 enum tree_code ncode, icode;
9046 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9047 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9048 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9050 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9051 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9052 We don't want to pack more than two leafs to a non-IF AND/OR
9053 expression.
9054 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9055 equal to IF-CODE, then we don't want to add right-hand operand.
9056 If the inner right-hand side of left-hand operand has
9057 side-effects, or isn't simple, then we can't add to it,
9058 as otherwise we might destroy if-sequence. */
9059 if (TREE_CODE (arg0) == icode
9060 && simple_operand_p_2 (arg1)
9061 /* Needed for sequence points to handle trappings, and
9062 side-effects. */
9063 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9065 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9066 arg1);
9067 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9068 tem);
9070 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9071 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9072 else if (TREE_CODE (arg1) == icode
9073 && simple_operand_p_2 (arg0)
9074 /* Needed for sequence points to handle trappings, and
9075 side-effects. */
9076 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9078 tem = fold_build2_loc (loc, ncode, type,
9079 arg0, TREE_OPERAND (arg1, 0));
9080 return fold_build2_loc (loc, icode, type, tem,
9081 TREE_OPERAND (arg1, 1));
9083 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9084 into (A OR B).
9085 For sequence point consistancy, we need to check for trapping,
9086 and side-effects. */
9087 else if (code == icode && simple_operand_p_2 (arg0)
9088 && simple_operand_p_2 (arg1))
9089 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9092 return NULL_TREE;
9095 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9096 by changing CODE to reduce the magnitude of constants involved in
9097 ARG0 of the comparison.
9098 Returns a canonicalized comparison tree if a simplification was
9099 possible, otherwise returns NULL_TREE.
9100 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9101 valid if signed overflow is undefined. */
9103 static tree
9104 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9105 tree arg0, tree arg1,
9106 bool *strict_overflow_p)
9108 enum tree_code code0 = TREE_CODE (arg0);
9109 tree t, cst0 = NULL_TREE;
9110 int sgn0;
9112 /* Match A +- CST code arg1. We can change this only if overflow
9113 is undefined. */
9114 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9115 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9116 /* In principle pointers also have undefined overflow behavior,
9117 but that causes problems elsewhere. */
9118 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9119 && (code0 == MINUS_EXPR
9120 || code0 == PLUS_EXPR)
9121 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9122 return NULL_TREE;
9124 /* Identify the constant in arg0 and its sign. */
9125 cst0 = TREE_OPERAND (arg0, 1);
9126 sgn0 = tree_int_cst_sgn (cst0);
9128 /* Overflowed constants and zero will cause problems. */
9129 if (integer_zerop (cst0)
9130 || TREE_OVERFLOW (cst0))
9131 return NULL_TREE;
9133 /* See if we can reduce the magnitude of the constant in
9134 arg0 by changing the comparison code. */
9135 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9136 if (code == LT_EXPR
9137 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9138 code = LE_EXPR;
9139 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9140 else if (code == GT_EXPR
9141 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9142 code = GE_EXPR;
9143 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9144 else if (code == LE_EXPR
9145 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9146 code = LT_EXPR;
9147 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9148 else if (code == GE_EXPR
9149 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9150 code = GT_EXPR;
9151 else
9152 return NULL_TREE;
9153 *strict_overflow_p = true;
9155 /* Now build the constant reduced in magnitude. But not if that
9156 would produce one outside of its types range. */
9157 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9158 && ((sgn0 == 1
9159 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9160 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9161 || (sgn0 == -1
9162 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9163 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9164 return NULL_TREE;
9166 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9167 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9168 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9169 t = fold_convert (TREE_TYPE (arg1), t);
9171 return fold_build2_loc (loc, code, type, t, arg1);
9174 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9175 overflow further. Try to decrease the magnitude of constants involved
9176 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9177 and put sole constants at the second argument position.
9178 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9180 static tree
9181 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9182 tree arg0, tree arg1)
9184 tree t;
9185 bool strict_overflow_p;
9186 const char * const warnmsg = G_("assuming signed overflow does not occur "
9187 "when reducing constant in comparison");
9189 /* Try canonicalization by simplifying arg0. */
9190 strict_overflow_p = false;
9191 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9192 &strict_overflow_p);
9193 if (t)
9195 if (strict_overflow_p)
9196 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9197 return t;
9200 /* Try canonicalization by simplifying arg1 using the swapped
9201 comparison. */
9202 code = swap_tree_comparison (code);
9203 strict_overflow_p = false;
9204 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9205 &strict_overflow_p);
9206 if (t && strict_overflow_p)
9207 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9208 return t;
9211 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9212 space. This is used to avoid issuing overflow warnings for
9213 expressions like &p->x which cannot wrap. */
9215 static bool
9216 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9218 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9219 return true;
9221 if (maybe_lt (bitpos, 0))
9222 return true;
9224 poly_wide_int wi_offset;
9225 int precision = TYPE_PRECISION (TREE_TYPE (base));
9226 if (offset == NULL_TREE)
9227 wi_offset = wi::zero (precision);
9228 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9229 return true;
9230 else
9231 wi_offset = wi::to_poly_wide (offset);
9233 wi::overflow_type overflow;
9234 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9235 precision);
9236 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9237 if (overflow)
9238 return true;
9240 poly_uint64 total_hwi, size;
9241 if (!total.to_uhwi (&total_hwi)
9242 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9243 &size)
9244 || known_eq (size, 0U))
9245 return true;
9247 if (known_le (total_hwi, size))
9248 return false;
9250 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9251 array. */
9252 if (TREE_CODE (base) == ADDR_EXPR
9253 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9254 &size)
9255 && maybe_ne (size, 0U)
9256 && known_le (total_hwi, size))
9257 return false;
9259 return true;
9262 /* Return a positive integer when the symbol DECL is known to have
9263 a nonzero address, zero when it's known not to (e.g., it's a weak
9264 symbol), and a negative integer when the symbol is not yet in the
9265 symbol table and so whether or not its address is zero is unknown.
9266 For function local objects always return positive integer. */
9267 static int
9268 maybe_nonzero_address (tree decl)
9270 if (DECL_P (decl) && decl_in_symtab_p (decl))
9271 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9272 return symbol->nonzero_address ();
9274 /* Function local objects are never NULL. */
9275 if (DECL_P (decl)
9276 && (DECL_CONTEXT (decl)
9277 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9278 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9279 return 1;
9281 return -1;
9284 /* Subroutine of fold_binary. This routine performs all of the
9285 transformations that are common to the equality/inequality
9286 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9287 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9288 fold_binary should call fold_binary. Fold a comparison with
9289 tree code CODE and type TYPE with operands OP0 and OP1. Return
9290 the folded comparison or NULL_TREE. */
9292 static tree
9293 fold_comparison (location_t loc, enum tree_code code, tree type,
9294 tree op0, tree op1)
9296 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9297 tree arg0, arg1, tem;
9299 arg0 = op0;
9300 arg1 = op1;
9302 STRIP_SIGN_NOPS (arg0);
9303 STRIP_SIGN_NOPS (arg1);
9305 /* For comparisons of pointers we can decompose it to a compile time
9306 comparison of the base objects and the offsets into the object.
9307 This requires at least one operand being an ADDR_EXPR or a
9308 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9309 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9310 && (TREE_CODE (arg0) == ADDR_EXPR
9311 || TREE_CODE (arg1) == ADDR_EXPR
9312 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9313 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9315 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9316 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9317 machine_mode mode;
9318 int volatilep, reversep, unsignedp;
9319 bool indirect_base0 = false, indirect_base1 = false;
9321 /* Get base and offset for the access. Strip ADDR_EXPR for
9322 get_inner_reference, but put it back by stripping INDIRECT_REF
9323 off the base object if possible. indirect_baseN will be true
9324 if baseN is not an address but refers to the object itself. */
9325 base0 = arg0;
9326 if (TREE_CODE (arg0) == ADDR_EXPR)
9328 base0
9329 = get_inner_reference (TREE_OPERAND (arg0, 0),
9330 &bitsize, &bitpos0, &offset0, &mode,
9331 &unsignedp, &reversep, &volatilep);
9332 if (TREE_CODE (base0) == INDIRECT_REF)
9333 base0 = TREE_OPERAND (base0, 0);
9334 else
9335 indirect_base0 = true;
9337 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9339 base0 = TREE_OPERAND (arg0, 0);
9340 STRIP_SIGN_NOPS (base0);
9341 if (TREE_CODE (base0) == ADDR_EXPR)
9343 base0
9344 = get_inner_reference (TREE_OPERAND (base0, 0),
9345 &bitsize, &bitpos0, &offset0, &mode,
9346 &unsignedp, &reversep, &volatilep);
9347 if (TREE_CODE (base0) == INDIRECT_REF)
9348 base0 = TREE_OPERAND (base0, 0);
9349 else
9350 indirect_base0 = true;
9352 if (offset0 == NULL_TREE || integer_zerop (offset0))
9353 offset0 = TREE_OPERAND (arg0, 1);
9354 else
9355 offset0 = size_binop (PLUS_EXPR, offset0,
9356 TREE_OPERAND (arg0, 1));
9357 if (poly_int_tree_p (offset0))
9359 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9360 TYPE_PRECISION (sizetype));
9361 tem <<= LOG2_BITS_PER_UNIT;
9362 tem += bitpos0;
9363 if (tem.to_shwi (&bitpos0))
9364 offset0 = NULL_TREE;
9368 base1 = arg1;
9369 if (TREE_CODE (arg1) == ADDR_EXPR)
9371 base1
9372 = get_inner_reference (TREE_OPERAND (arg1, 0),
9373 &bitsize, &bitpos1, &offset1, &mode,
9374 &unsignedp, &reversep, &volatilep);
9375 if (TREE_CODE (base1) == INDIRECT_REF)
9376 base1 = TREE_OPERAND (base1, 0);
9377 else
9378 indirect_base1 = true;
9380 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9382 base1 = TREE_OPERAND (arg1, 0);
9383 STRIP_SIGN_NOPS (base1);
9384 if (TREE_CODE (base1) == ADDR_EXPR)
9386 base1
9387 = get_inner_reference (TREE_OPERAND (base1, 0),
9388 &bitsize, &bitpos1, &offset1, &mode,
9389 &unsignedp, &reversep, &volatilep);
9390 if (TREE_CODE (base1) == INDIRECT_REF)
9391 base1 = TREE_OPERAND (base1, 0);
9392 else
9393 indirect_base1 = true;
9395 if (offset1 == NULL_TREE || integer_zerop (offset1))
9396 offset1 = TREE_OPERAND (arg1, 1);
9397 else
9398 offset1 = size_binop (PLUS_EXPR, offset1,
9399 TREE_OPERAND (arg1, 1));
9400 if (poly_int_tree_p (offset1))
9402 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9403 TYPE_PRECISION (sizetype));
9404 tem <<= LOG2_BITS_PER_UNIT;
9405 tem += bitpos1;
9406 if (tem.to_shwi (&bitpos1))
9407 offset1 = NULL_TREE;
9411 /* If we have equivalent bases we might be able to simplify. */
9412 if (indirect_base0 == indirect_base1
9413 && operand_equal_p (base0, base1,
9414 indirect_base0 ? OEP_ADDRESS_OF : 0))
9416 /* We can fold this expression to a constant if the non-constant
9417 offset parts are equal. */
9418 if ((offset0 == offset1
9419 || (offset0 && offset1
9420 && operand_equal_p (offset0, offset1, 0)))
9421 && (equality_code
9422 || (indirect_base0
9423 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9424 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9426 if (!equality_code
9427 && maybe_ne (bitpos0, bitpos1)
9428 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9429 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9430 fold_overflow_warning (("assuming pointer wraparound does not "
9431 "occur when comparing P +- C1 with "
9432 "P +- C2"),
9433 WARN_STRICT_OVERFLOW_CONDITIONAL);
9435 switch (code)
9437 case EQ_EXPR:
9438 if (known_eq (bitpos0, bitpos1))
9439 return constant_boolean_node (true, type);
9440 if (known_ne (bitpos0, bitpos1))
9441 return constant_boolean_node (false, type);
9442 break;
9443 case NE_EXPR:
9444 if (known_ne (bitpos0, bitpos1))
9445 return constant_boolean_node (true, type);
9446 if (known_eq (bitpos0, bitpos1))
9447 return constant_boolean_node (false, type);
9448 break;
9449 case LT_EXPR:
9450 if (known_lt (bitpos0, bitpos1))
9451 return constant_boolean_node (true, type);
9452 if (known_ge (bitpos0, bitpos1))
9453 return constant_boolean_node (false, type);
9454 break;
9455 case LE_EXPR:
9456 if (known_le (bitpos0, bitpos1))
9457 return constant_boolean_node (true, type);
9458 if (known_gt (bitpos0, bitpos1))
9459 return constant_boolean_node (false, type);
9460 break;
9461 case GE_EXPR:
9462 if (known_ge (bitpos0, bitpos1))
9463 return constant_boolean_node (true, type);
9464 if (known_lt (bitpos0, bitpos1))
9465 return constant_boolean_node (false, type);
9466 break;
9467 case GT_EXPR:
9468 if (known_gt (bitpos0, bitpos1))
9469 return constant_boolean_node (true, type);
9470 if (known_le (bitpos0, bitpos1))
9471 return constant_boolean_node (false, type);
9472 break;
9473 default:;
9476 /* We can simplify the comparison to a comparison of the variable
9477 offset parts if the constant offset parts are equal.
9478 Be careful to use signed sizetype here because otherwise we
9479 mess with array offsets in the wrong way. This is possible
9480 because pointer arithmetic is restricted to retain within an
9481 object and overflow on pointer differences is undefined as of
9482 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9483 else if (known_eq (bitpos0, bitpos1)
9484 && (equality_code
9485 || (indirect_base0
9486 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9487 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9489 /* By converting to signed sizetype we cover middle-end pointer
9490 arithmetic which operates on unsigned pointer types of size
9491 type size and ARRAY_REF offsets which are properly sign or
9492 zero extended from their type in case it is narrower than
9493 sizetype. */
9494 if (offset0 == NULL_TREE)
9495 offset0 = build_int_cst (ssizetype, 0);
9496 else
9497 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9498 if (offset1 == NULL_TREE)
9499 offset1 = build_int_cst (ssizetype, 0);
9500 else
9501 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9503 if (!equality_code
9504 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9505 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9506 fold_overflow_warning (("assuming pointer wraparound does not "
9507 "occur when comparing P +- C1 with "
9508 "P +- C2"),
9509 WARN_STRICT_OVERFLOW_COMPARISON);
9511 return fold_build2_loc (loc, code, type, offset0, offset1);
9514 /* For equal offsets we can simplify to a comparison of the
9515 base addresses. */
9516 else if (known_eq (bitpos0, bitpos1)
9517 && (indirect_base0
9518 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9519 && (indirect_base1
9520 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9521 && ((offset0 == offset1)
9522 || (offset0 && offset1
9523 && operand_equal_p (offset0, offset1, 0))))
9525 if (indirect_base0)
9526 base0 = build_fold_addr_expr_loc (loc, base0);
9527 if (indirect_base1)
9528 base1 = build_fold_addr_expr_loc (loc, base1);
9529 return fold_build2_loc (loc, code, type, base0, base1);
9531 /* Comparison between an ordinary (non-weak) symbol and a null
9532 pointer can be eliminated since such symbols must have a non
9533 null address. In C, relational expressions between pointers
9534 to objects and null pointers are undefined. The results
9535 below follow the C++ rules with the additional property that
9536 every object pointer compares greater than a null pointer.
9538 else if (((DECL_P (base0)
9539 && maybe_nonzero_address (base0) > 0
9540 /* Avoid folding references to struct members at offset 0 to
9541 prevent tests like '&ptr->firstmember == 0' from getting
9542 eliminated. When ptr is null, although the -> expression
9543 is strictly speaking invalid, GCC retains it as a matter
9544 of QoI. See PR c/44555. */
9545 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9546 || CONSTANT_CLASS_P (base0))
9547 && indirect_base0
9548 /* The caller guarantees that when one of the arguments is
9549 constant (i.e., null in this case) it is second. */
9550 && integer_zerop (arg1))
9552 switch (code)
9554 case EQ_EXPR:
9555 case LE_EXPR:
9556 case LT_EXPR:
9557 return constant_boolean_node (false, type);
9558 case GE_EXPR:
9559 case GT_EXPR:
9560 case NE_EXPR:
9561 return constant_boolean_node (true, type);
9562 default:
9563 gcc_unreachable ();
9568 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9569 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9570 the resulting offset is smaller in absolute value than the
9571 original one and has the same sign. */
9572 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9573 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9574 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9575 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9576 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9577 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9578 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9579 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9581 tree const1 = TREE_OPERAND (arg0, 1);
9582 tree const2 = TREE_OPERAND (arg1, 1);
9583 tree variable1 = TREE_OPERAND (arg0, 0);
9584 tree variable2 = TREE_OPERAND (arg1, 0);
9585 tree cst;
9586 const char * const warnmsg = G_("assuming signed overflow does not "
9587 "occur when combining constants around "
9588 "a comparison");
9590 /* Put the constant on the side where it doesn't overflow and is
9591 of lower absolute value and of same sign than before. */
9592 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9593 ? MINUS_EXPR : PLUS_EXPR,
9594 const2, const1);
9595 if (!TREE_OVERFLOW (cst)
9596 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9597 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9599 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9600 return fold_build2_loc (loc, code, type,
9601 variable1,
9602 fold_build2_loc (loc, TREE_CODE (arg1),
9603 TREE_TYPE (arg1),
9604 variable2, cst));
9607 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9608 ? MINUS_EXPR : PLUS_EXPR,
9609 const1, const2);
9610 if (!TREE_OVERFLOW (cst)
9611 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9612 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9614 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9615 return fold_build2_loc (loc, code, type,
9616 fold_build2_loc (loc, TREE_CODE (arg0),
9617 TREE_TYPE (arg0),
9618 variable1, cst),
9619 variable2);
9623 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9624 if (tem)
9625 return tem;
9627 /* If we are comparing an expression that just has comparisons
9628 of two integer values, arithmetic expressions of those comparisons,
9629 and constants, we can simplify it. There are only three cases
9630 to check: the two values can either be equal, the first can be
9631 greater, or the second can be greater. Fold the expression for
9632 those three values. Since each value must be 0 or 1, we have
9633 eight possibilities, each of which corresponds to the constant 0
9634 or 1 or one of the six possible comparisons.
9636 This handles common cases like (a > b) == 0 but also handles
9637 expressions like ((x > y) - (y > x)) > 0, which supposedly
9638 occur in macroized code. */
9640 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9642 tree cval1 = 0, cval2 = 0;
9644 if (twoval_comparison_p (arg0, &cval1, &cval2)
9645 /* Don't handle degenerate cases here; they should already
9646 have been handled anyway. */
9647 && cval1 != 0 && cval2 != 0
9648 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9649 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9650 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9651 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9652 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9653 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9654 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9656 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9657 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9659 /* We can't just pass T to eval_subst in case cval1 or cval2
9660 was the same as ARG1. */
9662 tree high_result
9663 = fold_build2_loc (loc, code, type,
9664 eval_subst (loc, arg0, cval1, maxval,
9665 cval2, minval),
9666 arg1);
9667 tree equal_result
9668 = fold_build2_loc (loc, code, type,
9669 eval_subst (loc, arg0, cval1, maxval,
9670 cval2, maxval),
9671 arg1);
9672 tree low_result
9673 = fold_build2_loc (loc, code, type,
9674 eval_subst (loc, arg0, cval1, minval,
9675 cval2, maxval),
9676 arg1);
9678 /* All three of these results should be 0 or 1. Confirm they are.
9679 Then use those values to select the proper code to use. */
9681 if (TREE_CODE (high_result) == INTEGER_CST
9682 && TREE_CODE (equal_result) == INTEGER_CST
9683 && TREE_CODE (low_result) == INTEGER_CST)
9685 /* Make a 3-bit mask with the high-order bit being the
9686 value for `>', the next for '=', and the low for '<'. */
9687 switch ((integer_onep (high_result) * 4)
9688 + (integer_onep (equal_result) * 2)
9689 + integer_onep (low_result))
9691 case 0:
9692 /* Always false. */
9693 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9694 case 1:
9695 code = LT_EXPR;
9696 break;
9697 case 2:
9698 code = EQ_EXPR;
9699 break;
9700 case 3:
9701 code = LE_EXPR;
9702 break;
9703 case 4:
9704 code = GT_EXPR;
9705 break;
9706 case 5:
9707 code = NE_EXPR;
9708 break;
9709 case 6:
9710 code = GE_EXPR;
9711 break;
9712 case 7:
9713 /* Always true. */
9714 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9717 return fold_build2_loc (loc, code, type, cval1, cval2);
9722 return NULL_TREE;
9726 /* Subroutine of fold_binary. Optimize complex multiplications of the
9727 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9728 argument EXPR represents the expression "z" of type TYPE. */
9730 static tree
9731 fold_mult_zconjz (location_t loc, tree type, tree expr)
9733 tree itype = TREE_TYPE (type);
9734 tree rpart, ipart, tem;
9736 if (TREE_CODE (expr) == COMPLEX_EXPR)
9738 rpart = TREE_OPERAND (expr, 0);
9739 ipart = TREE_OPERAND (expr, 1);
9741 else if (TREE_CODE (expr) == COMPLEX_CST)
9743 rpart = TREE_REALPART (expr);
9744 ipart = TREE_IMAGPART (expr);
9746 else
9748 expr = save_expr (expr);
9749 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9750 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9753 rpart = save_expr (rpart);
9754 ipart = save_expr (ipart);
9755 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9756 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9757 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9758 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9759 build_zero_cst (itype));
9763 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9764 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9765 true if successful. */
9767 static bool
9768 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9770 unsigned HOST_WIDE_INT i, nunits;
9772 if (TREE_CODE (arg) == VECTOR_CST
9773 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9775 for (i = 0; i < nunits; ++i)
9776 elts[i] = VECTOR_CST_ELT (arg, i);
9778 else if (TREE_CODE (arg) == CONSTRUCTOR)
9780 constructor_elt *elt;
9782 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9783 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9784 return false;
9785 else
9786 elts[i] = elt->value;
9788 else
9789 return false;
9790 for (; i < nelts; i++)
9791 elts[i]
9792 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9793 return true;
9796 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9797 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9798 NULL_TREE otherwise. */
9800 tree
9801 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9803 unsigned int i;
9804 unsigned HOST_WIDE_INT nelts;
9805 bool need_ctor = false;
9807 if (!sel.length ().is_constant (&nelts))
9808 return NULL_TREE;
9809 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9810 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9811 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9812 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9813 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9814 return NULL_TREE;
9816 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9817 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9818 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9819 return NULL_TREE;
9821 tree_vector_builder out_elts (type, nelts, 1);
9822 for (i = 0; i < nelts; i++)
9824 HOST_WIDE_INT index;
9825 if (!sel[i].is_constant (&index))
9826 return NULL_TREE;
9827 if (!CONSTANT_CLASS_P (in_elts[index]))
9828 need_ctor = true;
9829 out_elts.quick_push (unshare_expr (in_elts[index]));
9832 if (need_ctor)
9834 vec<constructor_elt, va_gc> *v;
9835 vec_alloc (v, nelts);
9836 for (i = 0; i < nelts; i++)
9837 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9838 return build_constructor (type, v);
9840 else
9841 return out_elts.build ();
9844 /* Try to fold a pointer difference of type TYPE two address expressions of
9845 array references AREF0 and AREF1 using location LOC. Return a
9846 simplified expression for the difference or NULL_TREE. */
9848 static tree
9849 fold_addr_of_array_ref_difference (location_t loc, tree type,
9850 tree aref0, tree aref1,
9851 bool use_pointer_diff)
9853 tree base0 = TREE_OPERAND (aref0, 0);
9854 tree base1 = TREE_OPERAND (aref1, 0);
9855 tree base_offset = build_int_cst (type, 0);
9857 /* If the bases are array references as well, recurse. If the bases
9858 are pointer indirections compute the difference of the pointers.
9859 If the bases are equal, we are set. */
9860 if ((TREE_CODE (base0) == ARRAY_REF
9861 && TREE_CODE (base1) == ARRAY_REF
9862 && (base_offset
9863 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9864 use_pointer_diff)))
9865 || (INDIRECT_REF_P (base0)
9866 && INDIRECT_REF_P (base1)
9867 && (base_offset
9868 = use_pointer_diff
9869 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9870 TREE_OPERAND (base0, 0),
9871 TREE_OPERAND (base1, 0))
9872 : fold_binary_loc (loc, MINUS_EXPR, type,
9873 fold_convert (type,
9874 TREE_OPERAND (base0, 0)),
9875 fold_convert (type,
9876 TREE_OPERAND (base1, 0)))))
9877 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9879 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9880 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9881 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9882 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9883 return fold_build2_loc (loc, PLUS_EXPR, type,
9884 base_offset,
9885 fold_build2_loc (loc, MULT_EXPR, type,
9886 diff, esz));
9888 return NULL_TREE;
9891 /* If the real or vector real constant CST of type TYPE has an exact
9892 inverse, return it, else return NULL. */
9894 tree
9895 exact_inverse (tree type, tree cst)
9897 REAL_VALUE_TYPE r;
9898 tree unit_type;
9899 machine_mode mode;
9901 switch (TREE_CODE (cst))
9903 case REAL_CST:
9904 r = TREE_REAL_CST (cst);
9906 if (exact_real_inverse (TYPE_MODE (type), &r))
9907 return build_real (type, r);
9909 return NULL_TREE;
9911 case VECTOR_CST:
9913 unit_type = TREE_TYPE (type);
9914 mode = TYPE_MODE (unit_type);
9916 tree_vector_builder elts;
9917 if (!elts.new_unary_operation (type, cst, false))
9918 return NULL_TREE;
9919 unsigned int count = elts.encoded_nelts ();
9920 for (unsigned int i = 0; i < count; ++i)
9922 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9923 if (!exact_real_inverse (mode, &r))
9924 return NULL_TREE;
9925 elts.quick_push (build_real (unit_type, r));
9928 return elts.build ();
9931 default:
9932 return NULL_TREE;
9936 /* Mask out the tz least significant bits of X of type TYPE where
9937 tz is the number of trailing zeroes in Y. */
9938 static wide_int
9939 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9941 int tz = wi::ctz (y);
9942 if (tz > 0)
9943 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9944 return x;
9947 /* Return true when T is an address and is known to be nonzero.
9948 For floating point we further ensure that T is not denormal.
9949 Similar logic is present in nonzero_address in rtlanal.h.
9951 If the return value is based on the assumption that signed overflow
9952 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9953 change *STRICT_OVERFLOW_P. */
9955 static bool
9956 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9958 tree type = TREE_TYPE (t);
9959 enum tree_code code;
9961 /* Doing something useful for floating point would need more work. */
9962 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9963 return false;
9965 code = TREE_CODE (t);
9966 switch (TREE_CODE_CLASS (code))
9968 case tcc_unary:
9969 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9970 strict_overflow_p);
9971 case tcc_binary:
9972 case tcc_comparison:
9973 return tree_binary_nonzero_warnv_p (code, type,
9974 TREE_OPERAND (t, 0),
9975 TREE_OPERAND (t, 1),
9976 strict_overflow_p);
9977 case tcc_constant:
9978 case tcc_declaration:
9979 case tcc_reference:
9980 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9982 default:
9983 break;
9986 switch (code)
9988 case TRUTH_NOT_EXPR:
9989 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9990 strict_overflow_p);
9992 case TRUTH_AND_EXPR:
9993 case TRUTH_OR_EXPR:
9994 case TRUTH_XOR_EXPR:
9995 return tree_binary_nonzero_warnv_p (code, type,
9996 TREE_OPERAND (t, 0),
9997 TREE_OPERAND (t, 1),
9998 strict_overflow_p);
10000 case COND_EXPR:
10001 case CONSTRUCTOR:
10002 case OBJ_TYPE_REF:
10003 case ASSERT_EXPR:
10004 case ADDR_EXPR:
10005 case WITH_SIZE_EXPR:
10006 case SSA_NAME:
10007 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10009 case COMPOUND_EXPR:
10010 case MODIFY_EXPR:
10011 case BIND_EXPR:
10012 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10013 strict_overflow_p);
10015 case SAVE_EXPR:
10016 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10017 strict_overflow_p);
10019 case CALL_EXPR:
10021 tree fndecl = get_callee_fndecl (t);
10022 if (!fndecl) return false;
10023 if (flag_delete_null_pointer_checks && !flag_check_new
10024 && DECL_IS_OPERATOR_NEW_P (fndecl)
10025 && !TREE_NOTHROW (fndecl))
10026 return true;
10027 if (flag_delete_null_pointer_checks
10028 && lookup_attribute ("returns_nonnull",
10029 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10030 return true;
10031 return alloca_call_p (t);
10034 default:
10035 break;
10037 return false;
10040 /* Return true when T is an address and is known to be nonzero.
10041 Handle warnings about undefined signed overflow. */
10043 bool
10044 tree_expr_nonzero_p (tree t)
10046 bool ret, strict_overflow_p;
10048 strict_overflow_p = false;
10049 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10050 if (strict_overflow_p)
10051 fold_overflow_warning (("assuming signed overflow does not occur when "
10052 "determining that expression is always "
10053 "non-zero"),
10054 WARN_STRICT_OVERFLOW_MISC);
10055 return ret;
10058 /* Return true if T is known not to be equal to an integer W. */
10060 bool
10061 expr_not_equal_to (tree t, const wide_int &w)
10063 wide_int min, max, nz;
10064 value_range_kind rtype;
10065 switch (TREE_CODE (t))
10067 case INTEGER_CST:
10068 return wi::to_wide (t) != w;
10070 case SSA_NAME:
10071 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10072 return false;
10073 rtype = get_range_info (t, &min, &max);
10074 if (rtype == VR_RANGE)
10076 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
10077 return true;
10078 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
10079 return true;
10081 else if (rtype == VR_ANTI_RANGE
10082 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
10083 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
10084 return true;
10085 /* If T has some known zero bits and W has any of those bits set,
10086 then T is known not to be equal to W. */
10087 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10088 TYPE_PRECISION (TREE_TYPE (t))), 0))
10089 return true;
10090 return false;
10092 default:
10093 return false;
10097 /* Fold a binary expression of code CODE and type TYPE with operands
10098 OP0 and OP1. LOC is the location of the resulting expression.
10099 Return the folded expression if folding is successful. Otherwise,
10100 return NULL_TREE. */
10102 tree
10103 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10104 tree op0, tree op1)
10106 enum tree_code_class kind = TREE_CODE_CLASS (code);
10107 tree arg0, arg1, tem;
10108 tree t1 = NULL_TREE;
10109 bool strict_overflow_p;
10110 unsigned int prec;
10112 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10113 && TREE_CODE_LENGTH (code) == 2
10114 && op0 != NULL_TREE
10115 && op1 != NULL_TREE);
10117 arg0 = op0;
10118 arg1 = op1;
10120 /* Strip any conversions that don't change the mode. This is
10121 safe for every expression, except for a comparison expression
10122 because its signedness is derived from its operands. So, in
10123 the latter case, only strip conversions that don't change the
10124 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10125 preserved.
10127 Note that this is done as an internal manipulation within the
10128 constant folder, in order to find the simplest representation
10129 of the arguments so that their form can be studied. In any
10130 cases, the appropriate type conversions should be put back in
10131 the tree that will get out of the constant folder. */
10133 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10135 STRIP_SIGN_NOPS (arg0);
10136 STRIP_SIGN_NOPS (arg1);
10138 else
10140 STRIP_NOPS (arg0);
10141 STRIP_NOPS (arg1);
10144 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10145 constant but we can't do arithmetic on them. */
10146 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10148 tem = const_binop (code, type, arg0, arg1);
10149 if (tem != NULL_TREE)
10151 if (TREE_TYPE (tem) != type)
10152 tem = fold_convert_loc (loc, type, tem);
10153 return tem;
10157 /* If this is a commutative operation, and ARG0 is a constant, move it
10158 to ARG1 to reduce the number of tests below. */
10159 if (commutative_tree_code (code)
10160 && tree_swap_operands_p (arg0, arg1))
10161 return fold_build2_loc (loc, code, type, op1, op0);
10163 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10164 to ARG1 to reduce the number of tests below. */
10165 if (kind == tcc_comparison
10166 && tree_swap_operands_p (arg0, arg1))
10167 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10169 tem = generic_simplify (loc, code, type, op0, op1);
10170 if (tem)
10171 return tem;
10173 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10175 First check for cases where an arithmetic operation is applied to a
10176 compound, conditional, or comparison operation. Push the arithmetic
10177 operation inside the compound or conditional to see if any folding
10178 can then be done. Convert comparison to conditional for this purpose.
10179 The also optimizes non-constant cases that used to be done in
10180 expand_expr.
10182 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10183 one of the operands is a comparison and the other is a comparison, a
10184 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10185 code below would make the expression more complex. Change it to a
10186 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10187 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10189 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10190 || code == EQ_EXPR || code == NE_EXPR)
10191 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10192 && ((truth_value_p (TREE_CODE (arg0))
10193 && (truth_value_p (TREE_CODE (arg1))
10194 || (TREE_CODE (arg1) == BIT_AND_EXPR
10195 && integer_onep (TREE_OPERAND (arg1, 1)))))
10196 || (truth_value_p (TREE_CODE (arg1))
10197 && (truth_value_p (TREE_CODE (arg0))
10198 || (TREE_CODE (arg0) == BIT_AND_EXPR
10199 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10201 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10202 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10203 : TRUTH_XOR_EXPR,
10204 boolean_type_node,
10205 fold_convert_loc (loc, boolean_type_node, arg0),
10206 fold_convert_loc (loc, boolean_type_node, arg1));
10208 if (code == EQ_EXPR)
10209 tem = invert_truthvalue_loc (loc, tem);
10211 return fold_convert_loc (loc, type, tem);
10214 if (TREE_CODE_CLASS (code) == tcc_binary
10215 || TREE_CODE_CLASS (code) == tcc_comparison)
10217 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10219 tem = fold_build2_loc (loc, code, type,
10220 fold_convert_loc (loc, TREE_TYPE (op0),
10221 TREE_OPERAND (arg0, 1)), op1);
10222 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10223 tem);
10225 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10227 tem = fold_build2_loc (loc, code, type, op0,
10228 fold_convert_loc (loc, TREE_TYPE (op1),
10229 TREE_OPERAND (arg1, 1)));
10230 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10231 tem);
10234 if (TREE_CODE (arg0) == COND_EXPR
10235 || TREE_CODE (arg0) == VEC_COND_EXPR
10236 || COMPARISON_CLASS_P (arg0))
10238 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10239 arg0, arg1,
10240 /*cond_first_p=*/1);
10241 if (tem != NULL_TREE)
10242 return tem;
10245 if (TREE_CODE (arg1) == COND_EXPR
10246 || TREE_CODE (arg1) == VEC_COND_EXPR
10247 || COMPARISON_CLASS_P (arg1))
10249 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10250 arg1, arg0,
10251 /*cond_first_p=*/0);
10252 if (tem != NULL_TREE)
10253 return tem;
10257 switch (code)
10259 case MEM_REF:
10260 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10261 if (TREE_CODE (arg0) == ADDR_EXPR
10262 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10264 tree iref = TREE_OPERAND (arg0, 0);
10265 return fold_build2 (MEM_REF, type,
10266 TREE_OPERAND (iref, 0),
10267 int_const_binop (PLUS_EXPR, arg1,
10268 TREE_OPERAND (iref, 1)));
10271 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10272 if (TREE_CODE (arg0) == ADDR_EXPR
10273 && handled_component_p (TREE_OPERAND (arg0, 0)))
10275 tree base;
10276 poly_int64 coffset;
10277 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10278 &coffset);
10279 if (!base)
10280 return NULL_TREE;
10281 return fold_build2 (MEM_REF, type,
10282 build_fold_addr_expr (base),
10283 int_const_binop (PLUS_EXPR, arg1,
10284 size_int (coffset)));
10287 return NULL_TREE;
10289 case POINTER_PLUS_EXPR:
10290 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10291 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10292 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10293 return fold_convert_loc (loc, type,
10294 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10295 fold_convert_loc (loc, sizetype,
10296 arg1),
10297 fold_convert_loc (loc, sizetype,
10298 arg0)));
10300 return NULL_TREE;
10302 case PLUS_EXPR:
10303 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10305 /* X + (X / CST) * -CST is X % CST. */
10306 if (TREE_CODE (arg1) == MULT_EXPR
10307 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10308 && operand_equal_p (arg0,
10309 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10311 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10312 tree cst1 = TREE_OPERAND (arg1, 1);
10313 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10314 cst1, cst0);
10315 if (sum && integer_zerop (sum))
10316 return fold_convert_loc (loc, type,
10317 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10318 TREE_TYPE (arg0), arg0,
10319 cst0));
10323 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10324 one. Make sure the type is not saturating and has the signedness of
10325 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10326 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10327 if ((TREE_CODE (arg0) == MULT_EXPR
10328 || TREE_CODE (arg1) == MULT_EXPR)
10329 && !TYPE_SATURATING (type)
10330 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10331 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10332 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10334 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10335 if (tem)
10336 return tem;
10339 if (! FLOAT_TYPE_P (type))
10341 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10342 (plus (plus (mult) (mult)) (foo)) so that we can
10343 take advantage of the factoring cases below. */
10344 if (ANY_INTEGRAL_TYPE_P (type)
10345 && TYPE_OVERFLOW_WRAPS (type)
10346 && (((TREE_CODE (arg0) == PLUS_EXPR
10347 || TREE_CODE (arg0) == MINUS_EXPR)
10348 && TREE_CODE (arg1) == MULT_EXPR)
10349 || ((TREE_CODE (arg1) == PLUS_EXPR
10350 || TREE_CODE (arg1) == MINUS_EXPR)
10351 && TREE_CODE (arg0) == MULT_EXPR)))
10353 tree parg0, parg1, parg, marg;
10354 enum tree_code pcode;
10356 if (TREE_CODE (arg1) == MULT_EXPR)
10357 parg = arg0, marg = arg1;
10358 else
10359 parg = arg1, marg = arg0;
10360 pcode = TREE_CODE (parg);
10361 parg0 = TREE_OPERAND (parg, 0);
10362 parg1 = TREE_OPERAND (parg, 1);
10363 STRIP_NOPS (parg0);
10364 STRIP_NOPS (parg1);
10366 if (TREE_CODE (parg0) == MULT_EXPR
10367 && TREE_CODE (parg1) != MULT_EXPR)
10368 return fold_build2_loc (loc, pcode, type,
10369 fold_build2_loc (loc, PLUS_EXPR, type,
10370 fold_convert_loc (loc, type,
10371 parg0),
10372 fold_convert_loc (loc, type,
10373 marg)),
10374 fold_convert_loc (loc, type, parg1));
10375 if (TREE_CODE (parg0) != MULT_EXPR
10376 && TREE_CODE (parg1) == MULT_EXPR)
10377 return
10378 fold_build2_loc (loc, PLUS_EXPR, type,
10379 fold_convert_loc (loc, type, parg0),
10380 fold_build2_loc (loc, pcode, type,
10381 fold_convert_loc (loc, type, marg),
10382 fold_convert_loc (loc, type,
10383 parg1)));
10386 else
10388 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10389 to __complex__ ( x, y ). This is not the same for SNaNs or
10390 if signed zeros are involved. */
10391 if (!HONOR_SNANS (element_mode (arg0))
10392 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10393 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10395 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10396 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10397 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10398 bool arg0rz = false, arg0iz = false;
10399 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10400 || (arg0i && (arg0iz = real_zerop (arg0i))))
10402 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10403 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10404 if (arg0rz && arg1i && real_zerop (arg1i))
10406 tree rp = arg1r ? arg1r
10407 : build1 (REALPART_EXPR, rtype, arg1);
10408 tree ip = arg0i ? arg0i
10409 : build1 (IMAGPART_EXPR, rtype, arg0);
10410 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10412 else if (arg0iz && arg1r && real_zerop (arg1r))
10414 tree rp = arg0r ? arg0r
10415 : build1 (REALPART_EXPR, rtype, arg0);
10416 tree ip = arg1i ? arg1i
10417 : build1 (IMAGPART_EXPR, rtype, arg1);
10418 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10423 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10424 We associate floats only if the user has specified
10425 -fassociative-math. */
10426 if (flag_associative_math
10427 && TREE_CODE (arg1) == PLUS_EXPR
10428 && TREE_CODE (arg0) != MULT_EXPR)
10430 tree tree10 = TREE_OPERAND (arg1, 0);
10431 tree tree11 = TREE_OPERAND (arg1, 1);
10432 if (TREE_CODE (tree11) == MULT_EXPR
10433 && TREE_CODE (tree10) == MULT_EXPR)
10435 tree tree0;
10436 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10437 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10440 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10441 We associate floats only if the user has specified
10442 -fassociative-math. */
10443 if (flag_associative_math
10444 && TREE_CODE (arg0) == PLUS_EXPR
10445 && TREE_CODE (arg1) != MULT_EXPR)
10447 tree tree00 = TREE_OPERAND (arg0, 0);
10448 tree tree01 = TREE_OPERAND (arg0, 1);
10449 if (TREE_CODE (tree01) == MULT_EXPR
10450 && TREE_CODE (tree00) == MULT_EXPR)
10452 tree tree0;
10453 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10454 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10459 bit_rotate:
10460 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10461 is a rotate of A by C1 bits. */
10462 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10463 is a rotate of A by B bits.
10464 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10465 though in this case CODE must be | and not + or ^, otherwise
10466 it doesn't return A when B is 0. */
10468 enum tree_code code0, code1;
10469 tree rtype;
10470 code0 = TREE_CODE (arg0);
10471 code1 = TREE_CODE (arg1);
10472 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10473 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10474 && operand_equal_p (TREE_OPERAND (arg0, 0),
10475 TREE_OPERAND (arg1, 0), 0)
10476 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10477 TYPE_UNSIGNED (rtype))
10478 /* Only create rotates in complete modes. Other cases are not
10479 expanded properly. */
10480 && (element_precision (rtype)
10481 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10483 tree tree01, tree11;
10484 tree orig_tree01, orig_tree11;
10485 enum tree_code code01, code11;
10487 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10488 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10489 STRIP_NOPS (tree01);
10490 STRIP_NOPS (tree11);
10491 code01 = TREE_CODE (tree01);
10492 code11 = TREE_CODE (tree11);
10493 if (code11 != MINUS_EXPR
10494 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10496 std::swap (code0, code1);
10497 std::swap (code01, code11);
10498 std::swap (tree01, tree11);
10499 std::swap (orig_tree01, orig_tree11);
10501 if (code01 == INTEGER_CST
10502 && code11 == INTEGER_CST
10503 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10504 == element_precision (rtype)))
10506 tem = build2_loc (loc, LROTATE_EXPR,
10507 rtype, TREE_OPERAND (arg0, 0),
10508 code0 == LSHIFT_EXPR
10509 ? orig_tree01 : orig_tree11);
10510 return fold_convert_loc (loc, type, tem);
10512 else if (code11 == MINUS_EXPR)
10514 tree tree110, tree111;
10515 tree110 = TREE_OPERAND (tree11, 0);
10516 tree111 = TREE_OPERAND (tree11, 1);
10517 STRIP_NOPS (tree110);
10518 STRIP_NOPS (tree111);
10519 if (TREE_CODE (tree110) == INTEGER_CST
10520 && compare_tree_int (tree110,
10521 element_precision (rtype)) == 0
10522 && operand_equal_p (tree01, tree111, 0))
10524 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10525 ? LROTATE_EXPR : RROTATE_EXPR),
10526 rtype, TREE_OPERAND (arg0, 0),
10527 orig_tree01);
10528 return fold_convert_loc (loc, type, tem);
10531 else if (code == BIT_IOR_EXPR
10532 && code11 == BIT_AND_EXPR
10533 && pow2p_hwi (element_precision (rtype)))
10535 tree tree110, tree111;
10536 tree110 = TREE_OPERAND (tree11, 0);
10537 tree111 = TREE_OPERAND (tree11, 1);
10538 STRIP_NOPS (tree110);
10539 STRIP_NOPS (tree111);
10540 if (TREE_CODE (tree110) == NEGATE_EXPR
10541 && TREE_CODE (tree111) == INTEGER_CST
10542 && compare_tree_int (tree111,
10543 element_precision (rtype) - 1) == 0
10544 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10546 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10547 ? LROTATE_EXPR : RROTATE_EXPR),
10548 rtype, TREE_OPERAND (arg0, 0),
10549 orig_tree01);
10550 return fold_convert_loc (loc, type, tem);
10556 associate:
10557 /* In most languages, can't associate operations on floats through
10558 parentheses. Rather than remember where the parentheses were, we
10559 don't associate floats at all, unless the user has specified
10560 -fassociative-math.
10561 And, we need to make sure type is not saturating. */
10563 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10564 && !TYPE_SATURATING (type))
10566 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10567 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10568 tree atype = type;
10569 bool ok = true;
10571 /* Split both trees into variables, constants, and literals. Then
10572 associate each group together, the constants with literals,
10573 then the result with variables. This increases the chances of
10574 literals being recombined later and of generating relocatable
10575 expressions for the sum of a constant and literal. */
10576 var0 = split_tree (arg0, type, code,
10577 &minus_var0, &con0, &minus_con0,
10578 &lit0, &minus_lit0, 0);
10579 var1 = split_tree (arg1, type, code,
10580 &minus_var1, &con1, &minus_con1,
10581 &lit1, &minus_lit1, code == MINUS_EXPR);
10583 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10584 if (code == MINUS_EXPR)
10585 code = PLUS_EXPR;
10587 /* With undefined overflow prefer doing association in a type
10588 which wraps on overflow, if that is one of the operand types. */
10589 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10590 && !TYPE_OVERFLOW_WRAPS (type))
10592 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10593 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10594 atype = TREE_TYPE (arg0);
10595 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10596 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10597 atype = TREE_TYPE (arg1);
10598 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10601 /* With undefined overflow we can only associate constants with one
10602 variable, and constants whose association doesn't overflow. */
10603 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10604 && !TYPE_OVERFLOW_WRAPS (atype))
10606 if ((var0 && var1) || (minus_var0 && minus_var1))
10608 /* ??? If split_tree would handle NEGATE_EXPR we could
10609 simply reject these cases and the allowed cases would
10610 be the var0/minus_var1 ones. */
10611 tree tmp0 = var0 ? var0 : minus_var0;
10612 tree tmp1 = var1 ? var1 : minus_var1;
10613 bool one_neg = false;
10615 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10617 tmp0 = TREE_OPERAND (tmp0, 0);
10618 one_neg = !one_neg;
10620 if (CONVERT_EXPR_P (tmp0)
10621 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10622 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10623 <= TYPE_PRECISION (atype)))
10624 tmp0 = TREE_OPERAND (tmp0, 0);
10625 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10627 tmp1 = TREE_OPERAND (tmp1, 0);
10628 one_neg = !one_neg;
10630 if (CONVERT_EXPR_P (tmp1)
10631 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10632 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10633 <= TYPE_PRECISION (atype)))
10634 tmp1 = TREE_OPERAND (tmp1, 0);
10635 /* The only case we can still associate with two variables
10636 is if they cancel out. */
10637 if (!one_neg
10638 || !operand_equal_p (tmp0, tmp1, 0))
10639 ok = false;
10641 else if ((var0 && minus_var1
10642 && ! operand_equal_p (var0, minus_var1, 0))
10643 || (minus_var0 && var1
10644 && ! operand_equal_p (minus_var0, var1, 0)))
10645 ok = false;
10648 /* Only do something if we found more than two objects. Otherwise,
10649 nothing has changed and we risk infinite recursion. */
10650 if (ok
10651 && ((var0 != 0) + (var1 != 0)
10652 + (minus_var0 != 0) + (minus_var1 != 0)
10653 + (con0 != 0) + (con1 != 0)
10654 + (minus_con0 != 0) + (minus_con1 != 0)
10655 + (lit0 != 0) + (lit1 != 0)
10656 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10658 var0 = associate_trees (loc, var0, var1, code, atype);
10659 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10660 code, atype);
10661 con0 = associate_trees (loc, con0, con1, code, atype);
10662 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10663 code, atype);
10664 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10665 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10666 code, atype);
10668 if (minus_var0 && var0)
10670 var0 = associate_trees (loc, var0, minus_var0,
10671 MINUS_EXPR, atype);
10672 minus_var0 = 0;
10674 if (minus_con0 && con0)
10676 con0 = associate_trees (loc, con0, minus_con0,
10677 MINUS_EXPR, atype);
10678 minus_con0 = 0;
10681 /* Preserve the MINUS_EXPR if the negative part of the literal is
10682 greater than the positive part. Otherwise, the multiplicative
10683 folding code (i.e extract_muldiv) may be fooled in case
10684 unsigned constants are subtracted, like in the following
10685 example: ((X*2 + 4) - 8U)/2. */
10686 if (minus_lit0 && lit0)
10688 if (TREE_CODE (lit0) == INTEGER_CST
10689 && TREE_CODE (minus_lit0) == INTEGER_CST
10690 && tree_int_cst_lt (lit0, minus_lit0)
10691 /* But avoid ending up with only negated parts. */
10692 && (var0 || con0))
10694 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10695 MINUS_EXPR, atype);
10696 lit0 = 0;
10698 else
10700 lit0 = associate_trees (loc, lit0, minus_lit0,
10701 MINUS_EXPR, atype);
10702 minus_lit0 = 0;
10706 /* Don't introduce overflows through reassociation. */
10707 if ((lit0 && TREE_OVERFLOW_P (lit0))
10708 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10709 return NULL_TREE;
10711 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10712 con0 = associate_trees (loc, con0, lit0, code, atype);
10713 lit0 = 0;
10714 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10715 code, atype);
10716 minus_lit0 = 0;
10718 /* Eliminate minus_con0. */
10719 if (minus_con0)
10721 if (con0)
10722 con0 = associate_trees (loc, con0, minus_con0,
10723 MINUS_EXPR, atype);
10724 else if (var0)
10725 var0 = associate_trees (loc, var0, minus_con0,
10726 MINUS_EXPR, atype);
10727 else
10728 gcc_unreachable ();
10729 minus_con0 = 0;
10732 /* Eliminate minus_var0. */
10733 if (minus_var0)
10735 if (con0)
10736 con0 = associate_trees (loc, con0, minus_var0,
10737 MINUS_EXPR, atype);
10738 else
10739 gcc_unreachable ();
10740 minus_var0 = 0;
10743 return
10744 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10745 code, atype));
10749 return NULL_TREE;
10751 case POINTER_DIFF_EXPR:
10752 case MINUS_EXPR:
10753 /* Fold &a[i] - &a[j] to i-j. */
10754 if (TREE_CODE (arg0) == ADDR_EXPR
10755 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10756 && TREE_CODE (arg1) == ADDR_EXPR
10757 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10759 tree tem = fold_addr_of_array_ref_difference (loc, type,
10760 TREE_OPERAND (arg0, 0),
10761 TREE_OPERAND (arg1, 0),
10762 code
10763 == POINTER_DIFF_EXPR);
10764 if (tem)
10765 return tem;
10768 /* Further transformations are not for pointers. */
10769 if (code == POINTER_DIFF_EXPR)
10770 return NULL_TREE;
10772 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10773 if (TREE_CODE (arg0) == NEGATE_EXPR
10774 && negate_expr_p (op1)
10775 /* If arg0 is e.g. unsigned int and type is int, then this could
10776 introduce UB, because if A is INT_MIN at runtime, the original
10777 expression can be well defined while the latter is not.
10778 See PR83269. */
10779 && !(ANY_INTEGRAL_TYPE_P (type)
10780 && TYPE_OVERFLOW_UNDEFINED (type)
10781 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10782 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10783 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10784 fold_convert_loc (loc, type,
10785 TREE_OPERAND (arg0, 0)));
10787 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10788 __complex__ ( x, -y ). This is not the same for SNaNs or if
10789 signed zeros are involved. */
10790 if (!HONOR_SNANS (element_mode (arg0))
10791 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10792 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10794 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10795 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10796 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10797 bool arg0rz = false, arg0iz = false;
10798 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10799 || (arg0i && (arg0iz = real_zerop (arg0i))))
10801 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10802 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10803 if (arg0rz && arg1i && real_zerop (arg1i))
10805 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10806 arg1r ? arg1r
10807 : build1 (REALPART_EXPR, rtype, arg1));
10808 tree ip = arg0i ? arg0i
10809 : build1 (IMAGPART_EXPR, rtype, arg0);
10810 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10812 else if (arg0iz && arg1r && real_zerop (arg1r))
10814 tree rp = arg0r ? arg0r
10815 : build1 (REALPART_EXPR, rtype, arg0);
10816 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10817 arg1i ? arg1i
10818 : build1 (IMAGPART_EXPR, rtype, arg1));
10819 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10824 /* A - B -> A + (-B) if B is easily negatable. */
10825 if (negate_expr_p (op1)
10826 && ! TYPE_OVERFLOW_SANITIZED (type)
10827 && ((FLOAT_TYPE_P (type)
10828 /* Avoid this transformation if B is a positive REAL_CST. */
10829 && (TREE_CODE (op1) != REAL_CST
10830 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10831 || INTEGRAL_TYPE_P (type)))
10832 return fold_build2_loc (loc, PLUS_EXPR, type,
10833 fold_convert_loc (loc, type, arg0),
10834 negate_expr (op1));
10836 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10837 one. Make sure the type is not saturating and has the signedness of
10838 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10839 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10840 if ((TREE_CODE (arg0) == MULT_EXPR
10841 || TREE_CODE (arg1) == MULT_EXPR)
10842 && !TYPE_SATURATING (type)
10843 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10844 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10845 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10847 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10848 if (tem)
10849 return tem;
10852 goto associate;
10854 case MULT_EXPR:
10855 if (! FLOAT_TYPE_P (type))
10857 /* Transform x * -C into -x * C if x is easily negatable. */
10858 if (TREE_CODE (op1) == INTEGER_CST
10859 && tree_int_cst_sgn (op1) == -1
10860 && negate_expr_p (op0)
10861 && negate_expr_p (op1)
10862 && (tem = negate_expr (op1)) != op1
10863 && ! TREE_OVERFLOW (tem))
10864 return fold_build2_loc (loc, MULT_EXPR, type,
10865 fold_convert_loc (loc, type,
10866 negate_expr (op0)), tem);
10868 strict_overflow_p = false;
10869 if (TREE_CODE (arg1) == INTEGER_CST
10870 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10871 &strict_overflow_p)) != 0)
10873 if (strict_overflow_p)
10874 fold_overflow_warning (("assuming signed overflow does not "
10875 "occur when simplifying "
10876 "multiplication"),
10877 WARN_STRICT_OVERFLOW_MISC);
10878 return fold_convert_loc (loc, type, tem);
10881 /* Optimize z * conj(z) for integer complex numbers. */
10882 if (TREE_CODE (arg0) == CONJ_EXPR
10883 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10884 return fold_mult_zconjz (loc, type, arg1);
10885 if (TREE_CODE (arg1) == CONJ_EXPR
10886 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10887 return fold_mult_zconjz (loc, type, arg0);
10889 else
10891 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10892 This is not the same for NaNs or if signed zeros are
10893 involved. */
10894 if (!HONOR_NANS (arg0)
10895 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10896 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10897 && TREE_CODE (arg1) == COMPLEX_CST
10898 && real_zerop (TREE_REALPART (arg1)))
10900 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10901 if (real_onep (TREE_IMAGPART (arg1)))
10902 return
10903 fold_build2_loc (loc, COMPLEX_EXPR, type,
10904 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10905 rtype, arg0)),
10906 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10907 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10908 return
10909 fold_build2_loc (loc, COMPLEX_EXPR, type,
10910 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10911 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10912 rtype, arg0)));
10915 /* Optimize z * conj(z) for floating point complex numbers.
10916 Guarded by flag_unsafe_math_optimizations as non-finite
10917 imaginary components don't produce scalar results. */
10918 if (flag_unsafe_math_optimizations
10919 && TREE_CODE (arg0) == CONJ_EXPR
10920 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10921 return fold_mult_zconjz (loc, type, arg1);
10922 if (flag_unsafe_math_optimizations
10923 && TREE_CODE (arg1) == CONJ_EXPR
10924 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10925 return fold_mult_zconjz (loc, type, arg0);
10927 goto associate;
10929 case BIT_IOR_EXPR:
10930 /* Canonicalize (X & C1) | C2. */
10931 if (TREE_CODE (arg0) == BIT_AND_EXPR
10932 && TREE_CODE (arg1) == INTEGER_CST
10933 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10935 int width = TYPE_PRECISION (type), w;
10936 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10937 wide_int c2 = wi::to_wide (arg1);
10939 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10940 if ((c1 & c2) == c1)
10941 return omit_one_operand_loc (loc, type, arg1,
10942 TREE_OPERAND (arg0, 0));
10944 wide_int msk = wi::mask (width, false,
10945 TYPE_PRECISION (TREE_TYPE (arg1)));
10947 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10948 if (wi::bit_and_not (msk, c1 | c2) == 0)
10950 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10951 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10954 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10955 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10956 mode which allows further optimizations. */
10957 c1 &= msk;
10958 c2 &= msk;
10959 wide_int c3 = wi::bit_and_not (c1, c2);
10960 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10962 wide_int mask = wi::mask (w, false,
10963 TYPE_PRECISION (type));
10964 if (((c1 | c2) & mask) == mask
10965 && wi::bit_and_not (c1, mask) == 0)
10967 c3 = mask;
10968 break;
10972 if (c3 != c1)
10974 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10975 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10976 wide_int_to_tree (type, c3));
10977 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10981 /* See if this can be simplified into a rotate first. If that
10982 is unsuccessful continue in the association code. */
10983 goto bit_rotate;
10985 case BIT_XOR_EXPR:
10986 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10987 if (TREE_CODE (arg0) == BIT_AND_EXPR
10988 && INTEGRAL_TYPE_P (type)
10989 && integer_onep (TREE_OPERAND (arg0, 1))
10990 && integer_onep (arg1))
10991 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10992 build_zero_cst (TREE_TYPE (arg0)));
10994 /* See if this can be simplified into a rotate first. If that
10995 is unsuccessful continue in the association code. */
10996 goto bit_rotate;
10998 case BIT_AND_EXPR:
10999 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11000 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11001 && INTEGRAL_TYPE_P (type)
11002 && integer_onep (TREE_OPERAND (arg0, 1))
11003 && integer_onep (arg1))
11005 tree tem2;
11006 tem = TREE_OPERAND (arg0, 0);
11007 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11008 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11009 tem, tem2);
11010 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11011 build_zero_cst (TREE_TYPE (tem)));
11013 /* Fold ~X & 1 as (X & 1) == 0. */
11014 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11015 && INTEGRAL_TYPE_P (type)
11016 && integer_onep (arg1))
11018 tree tem2;
11019 tem = TREE_OPERAND (arg0, 0);
11020 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11021 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11022 tem, tem2);
11023 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11024 build_zero_cst (TREE_TYPE (tem)));
11026 /* Fold !X & 1 as X == 0. */
11027 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11028 && integer_onep (arg1))
11030 tem = TREE_OPERAND (arg0, 0);
11031 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11032 build_zero_cst (TREE_TYPE (tem)));
11035 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11036 multiple of 1 << CST. */
11037 if (TREE_CODE (arg1) == INTEGER_CST)
11039 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11040 wide_int ncst1 = -cst1;
11041 if ((cst1 & ncst1) == ncst1
11042 && multiple_of_p (type, arg0,
11043 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11044 return fold_convert_loc (loc, type, arg0);
11047 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11048 bits from CST2. */
11049 if (TREE_CODE (arg1) == INTEGER_CST
11050 && TREE_CODE (arg0) == MULT_EXPR
11051 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11053 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11054 wide_int masked
11055 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11057 if (masked == 0)
11058 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11059 arg0, arg1);
11060 else if (masked != warg1)
11062 /* Avoid the transform if arg1 is a mask of some
11063 mode which allows further optimizations. */
11064 int pop = wi::popcount (warg1);
11065 if (!(pop >= BITS_PER_UNIT
11066 && pow2p_hwi (pop)
11067 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11068 return fold_build2_loc (loc, code, type, op0,
11069 wide_int_to_tree (type, masked));
11073 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11074 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11075 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11077 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11079 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11080 if (mask == -1)
11081 return
11082 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11085 goto associate;
11087 case RDIV_EXPR:
11088 /* Don't touch a floating-point divide by zero unless the mode
11089 of the constant can represent infinity. */
11090 if (TREE_CODE (arg1) == REAL_CST
11091 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11092 && real_zerop (arg1))
11093 return NULL_TREE;
11095 /* (-A) / (-B) -> A / B */
11096 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11097 return fold_build2_loc (loc, RDIV_EXPR, type,
11098 TREE_OPERAND (arg0, 0),
11099 negate_expr (arg1));
11100 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11101 return fold_build2_loc (loc, RDIV_EXPR, type,
11102 negate_expr (arg0),
11103 TREE_OPERAND (arg1, 0));
11104 return NULL_TREE;
11106 case TRUNC_DIV_EXPR:
11107 /* Fall through */
11109 case FLOOR_DIV_EXPR:
11110 /* Simplify A / (B << N) where A and B are positive and B is
11111 a power of 2, to A >> (N + log2(B)). */
11112 strict_overflow_p = false;
11113 if (TREE_CODE (arg1) == LSHIFT_EXPR
11114 && (TYPE_UNSIGNED (type)
11115 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11117 tree sval = TREE_OPERAND (arg1, 0);
11118 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11120 tree sh_cnt = TREE_OPERAND (arg1, 1);
11121 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11122 wi::exact_log2 (wi::to_wide (sval)));
11124 if (strict_overflow_p)
11125 fold_overflow_warning (("assuming signed overflow does not "
11126 "occur when simplifying A / (B << N)"),
11127 WARN_STRICT_OVERFLOW_MISC);
11129 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11130 sh_cnt, pow2);
11131 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11132 fold_convert_loc (loc, type, arg0), sh_cnt);
11136 /* Fall through */
11138 case ROUND_DIV_EXPR:
11139 case CEIL_DIV_EXPR:
11140 case EXACT_DIV_EXPR:
11141 if (integer_zerop (arg1))
11142 return NULL_TREE;
11144 /* Convert -A / -B to A / B when the type is signed and overflow is
11145 undefined. */
11146 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11147 && TREE_CODE (op0) == NEGATE_EXPR
11148 && negate_expr_p (op1))
11150 if (INTEGRAL_TYPE_P (type))
11151 fold_overflow_warning (("assuming signed overflow does not occur "
11152 "when distributing negation across "
11153 "division"),
11154 WARN_STRICT_OVERFLOW_MISC);
11155 return fold_build2_loc (loc, code, type,
11156 fold_convert_loc (loc, type,
11157 TREE_OPERAND (arg0, 0)),
11158 negate_expr (op1));
11160 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11161 && TREE_CODE (arg1) == NEGATE_EXPR
11162 && negate_expr_p (op0))
11164 if (INTEGRAL_TYPE_P (type))
11165 fold_overflow_warning (("assuming signed overflow does not occur "
11166 "when distributing negation across "
11167 "division"),
11168 WARN_STRICT_OVERFLOW_MISC);
11169 return fold_build2_loc (loc, code, type,
11170 negate_expr (op0),
11171 fold_convert_loc (loc, type,
11172 TREE_OPERAND (arg1, 0)));
11175 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11176 operation, EXACT_DIV_EXPR.
11178 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11179 At one time others generated faster code, it's not clear if they do
11180 after the last round to changes to the DIV code in expmed.c. */
11181 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11182 && multiple_of_p (type, arg0, arg1))
11183 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11184 fold_convert (type, arg0),
11185 fold_convert (type, arg1));
11187 strict_overflow_p = false;
11188 if (TREE_CODE (arg1) == INTEGER_CST
11189 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11190 &strict_overflow_p)) != 0)
11192 if (strict_overflow_p)
11193 fold_overflow_warning (("assuming signed overflow does not occur "
11194 "when simplifying division"),
11195 WARN_STRICT_OVERFLOW_MISC);
11196 return fold_convert_loc (loc, type, tem);
11199 return NULL_TREE;
11201 case CEIL_MOD_EXPR:
11202 case FLOOR_MOD_EXPR:
11203 case ROUND_MOD_EXPR:
11204 case TRUNC_MOD_EXPR:
11205 strict_overflow_p = false;
11206 if (TREE_CODE (arg1) == INTEGER_CST
11207 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11208 &strict_overflow_p)) != 0)
11210 if (strict_overflow_p)
11211 fold_overflow_warning (("assuming signed overflow does not occur "
11212 "when simplifying modulus"),
11213 WARN_STRICT_OVERFLOW_MISC);
11214 return fold_convert_loc (loc, type, tem);
11217 return NULL_TREE;
11219 case LROTATE_EXPR:
11220 case RROTATE_EXPR:
11221 case RSHIFT_EXPR:
11222 case LSHIFT_EXPR:
11223 /* Since negative shift count is not well-defined,
11224 don't try to compute it in the compiler. */
11225 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11226 return NULL_TREE;
11228 prec = element_precision (type);
11230 /* If we have a rotate of a bit operation with the rotate count and
11231 the second operand of the bit operation both constant,
11232 permute the two operations. */
11233 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11234 && (TREE_CODE (arg0) == BIT_AND_EXPR
11235 || TREE_CODE (arg0) == BIT_IOR_EXPR
11236 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11237 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11239 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11240 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11241 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11242 fold_build2_loc (loc, code, type,
11243 arg00, arg1),
11244 fold_build2_loc (loc, code, type,
11245 arg01, arg1));
11248 /* Two consecutive rotates adding up to the some integer
11249 multiple of the precision of the type can be ignored. */
11250 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11251 && TREE_CODE (arg0) == RROTATE_EXPR
11252 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11253 && wi::umod_trunc (wi::to_wide (arg1)
11254 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11255 prec) == 0)
11256 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11258 return NULL_TREE;
11260 case MIN_EXPR:
11261 case MAX_EXPR:
11262 goto associate;
11264 case TRUTH_ANDIF_EXPR:
11265 /* Note that the operands of this must be ints
11266 and their values must be 0 or 1.
11267 ("true" is a fixed value perhaps depending on the language.) */
11268 /* If first arg is constant zero, return it. */
11269 if (integer_zerop (arg0))
11270 return fold_convert_loc (loc, type, arg0);
11271 /* FALLTHRU */
11272 case TRUTH_AND_EXPR:
11273 /* If either arg is constant true, drop it. */
11274 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11275 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11276 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11277 /* Preserve sequence points. */
11278 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11279 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11280 /* If second arg is constant zero, result is zero, but first arg
11281 must be evaluated. */
11282 if (integer_zerop (arg1))
11283 return omit_one_operand_loc (loc, type, arg1, arg0);
11284 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11285 case will be handled here. */
11286 if (integer_zerop (arg0))
11287 return omit_one_operand_loc (loc, type, arg0, arg1);
11289 /* !X && X is always false. */
11290 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11291 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11292 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11293 /* X && !X is always false. */
11294 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11295 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11296 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11298 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11299 means A >= Y && A != MAX, but in this case we know that
11300 A < X <= MAX. */
11302 if (!TREE_SIDE_EFFECTS (arg0)
11303 && !TREE_SIDE_EFFECTS (arg1))
11305 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11306 if (tem && !operand_equal_p (tem, arg0, 0))
11307 return fold_build2_loc (loc, code, type, tem, arg1);
11309 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11310 if (tem && !operand_equal_p (tem, arg1, 0))
11311 return fold_build2_loc (loc, code, type, arg0, tem);
11314 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11315 != NULL_TREE)
11316 return tem;
11318 return NULL_TREE;
11320 case TRUTH_ORIF_EXPR:
11321 /* Note that the operands of this must be ints
11322 and their values must be 0 or true.
11323 ("true" is a fixed value perhaps depending on the language.) */
11324 /* If first arg is constant true, return it. */
11325 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11326 return fold_convert_loc (loc, type, arg0);
11327 /* FALLTHRU */
11328 case TRUTH_OR_EXPR:
11329 /* If either arg is constant zero, drop it. */
11330 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11331 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11332 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11333 /* Preserve sequence points. */
11334 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11335 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11336 /* If second arg is constant true, result is true, but we must
11337 evaluate first arg. */
11338 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11339 return omit_one_operand_loc (loc, type, arg1, arg0);
11340 /* Likewise for first arg, but note this only occurs here for
11341 TRUTH_OR_EXPR. */
11342 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11343 return omit_one_operand_loc (loc, type, arg0, arg1);
11345 /* !X || X is always true. */
11346 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11347 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11348 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11349 /* X || !X is always true. */
11350 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11351 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11352 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11354 /* (X && !Y) || (!X && Y) is X ^ Y */
11355 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11356 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11358 tree a0, a1, l0, l1, n0, n1;
11360 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11361 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11363 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11364 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11366 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11367 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11369 if ((operand_equal_p (n0, a0, 0)
11370 && operand_equal_p (n1, a1, 0))
11371 || (operand_equal_p (n0, a1, 0)
11372 && operand_equal_p (n1, a0, 0)))
11373 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11376 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11377 != NULL_TREE)
11378 return tem;
11380 return NULL_TREE;
11382 case TRUTH_XOR_EXPR:
11383 /* If the second arg is constant zero, drop it. */
11384 if (integer_zerop (arg1))
11385 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11386 /* If the second arg is constant true, this is a logical inversion. */
11387 if (integer_onep (arg1))
11389 tem = invert_truthvalue_loc (loc, arg0);
11390 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11392 /* Identical arguments cancel to zero. */
11393 if (operand_equal_p (arg0, arg1, 0))
11394 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11396 /* !X ^ X is always true. */
11397 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11398 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11399 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11401 /* X ^ !X is always true. */
11402 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11403 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11404 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11406 return NULL_TREE;
11408 case EQ_EXPR:
11409 case NE_EXPR:
11410 STRIP_NOPS (arg0);
11411 STRIP_NOPS (arg1);
11413 tem = fold_comparison (loc, code, type, op0, op1);
11414 if (tem != NULL_TREE)
11415 return tem;
11417 /* bool_var != 1 becomes !bool_var. */
11418 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11419 && code == NE_EXPR)
11420 return fold_convert_loc (loc, type,
11421 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11422 TREE_TYPE (arg0), arg0));
11424 /* bool_var == 0 becomes !bool_var. */
11425 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11426 && code == EQ_EXPR)
11427 return fold_convert_loc (loc, type,
11428 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11429 TREE_TYPE (arg0), arg0));
11431 /* !exp != 0 becomes !exp */
11432 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11433 && code == NE_EXPR)
11434 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11436 /* If this is an EQ or NE comparison with zero and ARG0 is
11437 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11438 two operations, but the latter can be done in one less insn
11439 on machines that have only two-operand insns or on which a
11440 constant cannot be the first operand. */
11441 if (TREE_CODE (arg0) == BIT_AND_EXPR
11442 && integer_zerop (arg1))
11444 tree arg00 = TREE_OPERAND (arg0, 0);
11445 tree arg01 = TREE_OPERAND (arg0, 1);
11446 if (TREE_CODE (arg00) == LSHIFT_EXPR
11447 && integer_onep (TREE_OPERAND (arg00, 0)))
11449 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11450 arg01, TREE_OPERAND (arg00, 1));
11451 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11452 build_int_cst (TREE_TYPE (arg0), 1));
11453 return fold_build2_loc (loc, code, type,
11454 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11455 arg1);
11457 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11458 && integer_onep (TREE_OPERAND (arg01, 0)))
11460 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11461 arg00, TREE_OPERAND (arg01, 1));
11462 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11463 build_int_cst (TREE_TYPE (arg0), 1));
11464 return fold_build2_loc (loc, code, type,
11465 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11466 arg1);
11470 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11471 C1 is a valid shift constant, and C2 is a power of two, i.e.
11472 a single bit. */
11473 if (TREE_CODE (arg0) == BIT_AND_EXPR
11474 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11475 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11476 == INTEGER_CST
11477 && integer_pow2p (TREE_OPERAND (arg0, 1))
11478 && integer_zerop (arg1))
11480 tree itype = TREE_TYPE (arg0);
11481 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11482 prec = TYPE_PRECISION (itype);
11484 /* Check for a valid shift count. */
11485 if (wi::ltu_p (wi::to_wide (arg001), prec))
11487 tree arg01 = TREE_OPERAND (arg0, 1);
11488 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11489 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11490 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11491 can be rewritten as (X & (C2 << C1)) != 0. */
11492 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11494 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11495 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11496 return fold_build2_loc (loc, code, type, tem,
11497 fold_convert_loc (loc, itype, arg1));
11499 /* Otherwise, for signed (arithmetic) shifts,
11500 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11501 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11502 else if (!TYPE_UNSIGNED (itype))
11503 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11504 arg000, build_int_cst (itype, 0));
11505 /* Otherwise, of unsigned (logical) shifts,
11506 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11507 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11508 else
11509 return omit_one_operand_loc (loc, type,
11510 code == EQ_EXPR ? integer_one_node
11511 : integer_zero_node,
11512 arg000);
11516 /* If this is a comparison of a field, we may be able to simplify it. */
11517 if ((TREE_CODE (arg0) == COMPONENT_REF
11518 || TREE_CODE (arg0) == BIT_FIELD_REF)
11519 /* Handle the constant case even without -O
11520 to make sure the warnings are given. */
11521 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11523 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11524 if (t1)
11525 return t1;
11528 /* Optimize comparisons of strlen vs zero to a compare of the
11529 first character of the string vs zero. To wit,
11530 strlen(ptr) == 0 => *ptr == 0
11531 strlen(ptr) != 0 => *ptr != 0
11532 Other cases should reduce to one of these two (or a constant)
11533 due to the return value of strlen being unsigned. */
11534 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11536 tree fndecl = get_callee_fndecl (arg0);
11538 if (fndecl
11539 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11540 && call_expr_nargs (arg0) == 1
11541 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11542 == POINTER_TYPE))
11544 tree ptrtype
11545 = build_pointer_type (build_qualified_type (char_type_node,
11546 TYPE_QUAL_CONST));
11547 tree ptr = fold_convert_loc (loc, ptrtype,
11548 CALL_EXPR_ARG (arg0, 0));
11549 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11550 return fold_build2_loc (loc, code, type, iref,
11551 build_int_cst (TREE_TYPE (iref), 0));
11555 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11556 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11557 if (TREE_CODE (arg0) == RSHIFT_EXPR
11558 && integer_zerop (arg1)
11559 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11561 tree arg00 = TREE_OPERAND (arg0, 0);
11562 tree arg01 = TREE_OPERAND (arg0, 1);
11563 tree itype = TREE_TYPE (arg00);
11564 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11566 if (TYPE_UNSIGNED (itype))
11568 itype = signed_type_for (itype);
11569 arg00 = fold_convert_loc (loc, itype, arg00);
11571 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11572 type, arg00, build_zero_cst (itype));
11576 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11577 (X & C) == 0 when C is a single bit. */
11578 if (TREE_CODE (arg0) == BIT_AND_EXPR
11579 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11580 && integer_zerop (arg1)
11581 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11583 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11584 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11585 TREE_OPERAND (arg0, 1));
11586 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11587 type, tem,
11588 fold_convert_loc (loc, TREE_TYPE (arg0),
11589 arg1));
11592 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11593 constant C is a power of two, i.e. a single bit. */
11594 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11595 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11596 && integer_zerop (arg1)
11597 && integer_pow2p (TREE_OPERAND (arg0, 1))
11598 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11599 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11601 tree arg00 = TREE_OPERAND (arg0, 0);
11602 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11603 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11606 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11607 when is C is a power of two, i.e. a single bit. */
11608 if (TREE_CODE (arg0) == BIT_AND_EXPR
11609 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11610 && integer_zerop (arg1)
11611 && integer_pow2p (TREE_OPERAND (arg0, 1))
11612 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11613 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11615 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11616 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11617 arg000, TREE_OPERAND (arg0, 1));
11618 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11619 tem, build_int_cst (TREE_TYPE (tem), 0));
11622 if (integer_zerop (arg1)
11623 && tree_expr_nonzero_p (arg0))
11625 tree res = constant_boolean_node (code==NE_EXPR, type);
11626 return omit_one_operand_loc (loc, type, res, arg0);
11629 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11630 if (TREE_CODE (arg0) == BIT_AND_EXPR
11631 && TREE_CODE (arg1) == BIT_AND_EXPR)
11633 tree arg00 = TREE_OPERAND (arg0, 0);
11634 tree arg01 = TREE_OPERAND (arg0, 1);
11635 tree arg10 = TREE_OPERAND (arg1, 0);
11636 tree arg11 = TREE_OPERAND (arg1, 1);
11637 tree itype = TREE_TYPE (arg0);
11639 if (operand_equal_p (arg01, arg11, 0))
11641 tem = fold_convert_loc (loc, itype, arg10);
11642 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11643 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11644 return fold_build2_loc (loc, code, type, tem,
11645 build_zero_cst (itype));
11647 if (operand_equal_p (arg01, arg10, 0))
11649 tem = fold_convert_loc (loc, itype, arg11);
11650 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11651 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11652 return fold_build2_loc (loc, code, type, tem,
11653 build_zero_cst (itype));
11655 if (operand_equal_p (arg00, arg11, 0))
11657 tem = fold_convert_loc (loc, itype, arg10);
11658 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11659 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11660 return fold_build2_loc (loc, code, type, tem,
11661 build_zero_cst (itype));
11663 if (operand_equal_p (arg00, arg10, 0))
11665 tem = fold_convert_loc (loc, itype, arg11);
11666 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11667 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11668 return fold_build2_loc (loc, code, type, tem,
11669 build_zero_cst (itype));
11673 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11674 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11676 tree arg00 = TREE_OPERAND (arg0, 0);
11677 tree arg01 = TREE_OPERAND (arg0, 1);
11678 tree arg10 = TREE_OPERAND (arg1, 0);
11679 tree arg11 = TREE_OPERAND (arg1, 1);
11680 tree itype = TREE_TYPE (arg0);
11682 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11683 operand_equal_p guarantees no side-effects so we don't need
11684 to use omit_one_operand on Z. */
11685 if (operand_equal_p (arg01, arg11, 0))
11686 return fold_build2_loc (loc, code, type, arg00,
11687 fold_convert_loc (loc, TREE_TYPE (arg00),
11688 arg10));
11689 if (operand_equal_p (arg01, arg10, 0))
11690 return fold_build2_loc (loc, code, type, arg00,
11691 fold_convert_loc (loc, TREE_TYPE (arg00),
11692 arg11));
11693 if (operand_equal_p (arg00, arg11, 0))
11694 return fold_build2_loc (loc, code, type, arg01,
11695 fold_convert_loc (loc, TREE_TYPE (arg01),
11696 arg10));
11697 if (operand_equal_p (arg00, arg10, 0))
11698 return fold_build2_loc (loc, code, type, arg01,
11699 fold_convert_loc (loc, TREE_TYPE (arg01),
11700 arg11));
11702 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11703 if (TREE_CODE (arg01) == INTEGER_CST
11704 && TREE_CODE (arg11) == INTEGER_CST)
11706 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11707 fold_convert_loc (loc, itype, arg11));
11708 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11709 return fold_build2_loc (loc, code, type, tem,
11710 fold_convert_loc (loc, itype, arg10));
11714 /* Attempt to simplify equality/inequality comparisons of complex
11715 values. Only lower the comparison if the result is known or
11716 can be simplified to a single scalar comparison. */
11717 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11718 || TREE_CODE (arg0) == COMPLEX_CST)
11719 && (TREE_CODE (arg1) == COMPLEX_EXPR
11720 || TREE_CODE (arg1) == COMPLEX_CST))
11722 tree real0, imag0, real1, imag1;
11723 tree rcond, icond;
11725 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11727 real0 = TREE_OPERAND (arg0, 0);
11728 imag0 = TREE_OPERAND (arg0, 1);
11730 else
11732 real0 = TREE_REALPART (arg0);
11733 imag0 = TREE_IMAGPART (arg0);
11736 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11738 real1 = TREE_OPERAND (arg1, 0);
11739 imag1 = TREE_OPERAND (arg1, 1);
11741 else
11743 real1 = TREE_REALPART (arg1);
11744 imag1 = TREE_IMAGPART (arg1);
11747 rcond = fold_binary_loc (loc, code, type, real0, real1);
11748 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11750 if (integer_zerop (rcond))
11752 if (code == EQ_EXPR)
11753 return omit_two_operands_loc (loc, type, boolean_false_node,
11754 imag0, imag1);
11755 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11757 else
11759 if (code == NE_EXPR)
11760 return omit_two_operands_loc (loc, type, boolean_true_node,
11761 imag0, imag1);
11762 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11766 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11767 if (icond && TREE_CODE (icond) == INTEGER_CST)
11769 if (integer_zerop (icond))
11771 if (code == EQ_EXPR)
11772 return omit_two_operands_loc (loc, type, boolean_false_node,
11773 real0, real1);
11774 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11776 else
11778 if (code == NE_EXPR)
11779 return omit_two_operands_loc (loc, type, boolean_true_node,
11780 real0, real1);
11781 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11786 return NULL_TREE;
11788 case LT_EXPR:
11789 case GT_EXPR:
11790 case LE_EXPR:
11791 case GE_EXPR:
11792 tem = fold_comparison (loc, code, type, op0, op1);
11793 if (tem != NULL_TREE)
11794 return tem;
11796 /* Transform comparisons of the form X +- C CMP X. */
11797 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11798 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11799 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11800 && !HONOR_SNANS (arg0))
11802 tree arg01 = TREE_OPERAND (arg0, 1);
11803 enum tree_code code0 = TREE_CODE (arg0);
11804 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11806 /* (X - c) > X becomes false. */
11807 if (code == GT_EXPR
11808 && ((code0 == MINUS_EXPR && is_positive >= 0)
11809 || (code0 == PLUS_EXPR && is_positive <= 0)))
11810 return constant_boolean_node (0, type);
11812 /* Likewise (X + c) < X becomes false. */
11813 if (code == LT_EXPR
11814 && ((code0 == PLUS_EXPR && is_positive >= 0)
11815 || (code0 == MINUS_EXPR && is_positive <= 0)))
11816 return constant_boolean_node (0, type);
11818 /* Convert (X - c) <= X to true. */
11819 if (!HONOR_NANS (arg1)
11820 && code == LE_EXPR
11821 && ((code0 == MINUS_EXPR && is_positive >= 0)
11822 || (code0 == PLUS_EXPR && is_positive <= 0)))
11823 return constant_boolean_node (1, type);
11825 /* Convert (X + c) >= X to true. */
11826 if (!HONOR_NANS (arg1)
11827 && code == GE_EXPR
11828 && ((code0 == PLUS_EXPR && is_positive >= 0)
11829 || (code0 == MINUS_EXPR && is_positive <= 0)))
11830 return constant_boolean_node (1, type);
11833 /* If we are comparing an ABS_EXPR with a constant, we can
11834 convert all the cases into explicit comparisons, but they may
11835 well not be faster than doing the ABS and one comparison.
11836 But ABS (X) <= C is a range comparison, which becomes a subtraction
11837 and a comparison, and is probably faster. */
11838 if (code == LE_EXPR
11839 && TREE_CODE (arg1) == INTEGER_CST
11840 && TREE_CODE (arg0) == ABS_EXPR
11841 && ! TREE_SIDE_EFFECTS (arg0)
11842 && (tem = negate_expr (arg1)) != 0
11843 && TREE_CODE (tem) == INTEGER_CST
11844 && !TREE_OVERFLOW (tem))
11845 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11846 build2 (GE_EXPR, type,
11847 TREE_OPERAND (arg0, 0), tem),
11848 build2 (LE_EXPR, type,
11849 TREE_OPERAND (arg0, 0), arg1));
11851 /* Convert ABS_EXPR<x> >= 0 to true. */
11852 strict_overflow_p = false;
11853 if (code == GE_EXPR
11854 && (integer_zerop (arg1)
11855 || (! HONOR_NANS (arg0)
11856 && real_zerop (arg1)))
11857 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11859 if (strict_overflow_p)
11860 fold_overflow_warning (("assuming signed overflow does not occur "
11861 "when simplifying comparison of "
11862 "absolute value and zero"),
11863 WARN_STRICT_OVERFLOW_CONDITIONAL);
11864 return omit_one_operand_loc (loc, type,
11865 constant_boolean_node (true, type),
11866 arg0);
11869 /* Convert ABS_EXPR<x> < 0 to false. */
11870 strict_overflow_p = false;
11871 if (code == LT_EXPR
11872 && (integer_zerop (arg1) || real_zerop (arg1))
11873 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11875 if (strict_overflow_p)
11876 fold_overflow_warning (("assuming signed overflow does not occur "
11877 "when simplifying comparison of "
11878 "absolute value and zero"),
11879 WARN_STRICT_OVERFLOW_CONDITIONAL);
11880 return omit_one_operand_loc (loc, type,
11881 constant_boolean_node (false, type),
11882 arg0);
11885 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11886 and similarly for >= into !=. */
11887 if ((code == LT_EXPR || code == GE_EXPR)
11888 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11889 && TREE_CODE (arg1) == LSHIFT_EXPR
11890 && integer_onep (TREE_OPERAND (arg1, 0)))
11891 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11892 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11893 TREE_OPERAND (arg1, 1)),
11894 build_zero_cst (TREE_TYPE (arg0)));
11896 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11897 otherwise Y might be >= # of bits in X's type and thus e.g.
11898 (unsigned char) (1 << Y) for Y 15 might be 0.
11899 If the cast is widening, then 1 << Y should have unsigned type,
11900 otherwise if Y is number of bits in the signed shift type minus 1,
11901 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11902 31 might be 0xffffffff80000000. */
11903 if ((code == LT_EXPR || code == GE_EXPR)
11904 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11905 && CONVERT_EXPR_P (arg1)
11906 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11907 && (element_precision (TREE_TYPE (arg1))
11908 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11909 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11910 || (element_precision (TREE_TYPE (arg1))
11911 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11912 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11914 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11915 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11916 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11917 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11918 build_zero_cst (TREE_TYPE (arg0)));
11921 return NULL_TREE;
11923 case UNORDERED_EXPR:
11924 case ORDERED_EXPR:
11925 case UNLT_EXPR:
11926 case UNLE_EXPR:
11927 case UNGT_EXPR:
11928 case UNGE_EXPR:
11929 case UNEQ_EXPR:
11930 case LTGT_EXPR:
11931 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11933 tree targ0 = strip_float_extensions (arg0);
11934 tree targ1 = strip_float_extensions (arg1);
11935 tree newtype = TREE_TYPE (targ0);
11937 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11938 newtype = TREE_TYPE (targ1);
11940 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11941 return fold_build2_loc (loc, code, type,
11942 fold_convert_loc (loc, newtype, targ0),
11943 fold_convert_loc (loc, newtype, targ1));
11946 return NULL_TREE;
11948 case COMPOUND_EXPR:
11949 /* When pedantic, a compound expression can be neither an lvalue
11950 nor an integer constant expression. */
11951 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11952 return NULL_TREE;
11953 /* Don't let (0, 0) be null pointer constant. */
11954 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11955 : fold_convert_loc (loc, type, arg1);
11956 return pedantic_non_lvalue_loc (loc, tem);
11958 case ASSERT_EXPR:
11959 /* An ASSERT_EXPR should never be passed to fold_binary. */
11960 gcc_unreachable ();
11962 default:
11963 return NULL_TREE;
11964 } /* switch (code) */
11967 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11968 ((A & N) + B) & M -> (A + B) & M
11969 Similarly if (N & M) == 0,
11970 ((A | N) + B) & M -> (A + B) & M
11971 and for - instead of + (or unary - instead of +)
11972 and/or ^ instead of |.
11973 If B is constant and (B & M) == 0, fold into A & M.
11975 This function is a helper for match.pd patterns. Return non-NULL
11976 type in which the simplified operation should be performed only
11977 if any optimization is possible.
11979 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11980 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11981 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11982 +/-. */
11983 tree
11984 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11985 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11986 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11987 tree *pmop)
11989 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11990 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11991 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11992 if (~cst1 == 0
11993 || (cst1 & (cst1 + 1)) != 0
11994 || !INTEGRAL_TYPE_P (type)
11995 || (!TYPE_OVERFLOW_WRAPS (type)
11996 && TREE_CODE (type) != INTEGER_TYPE)
11997 || (wi::max_value (type) & cst1) != cst1)
11998 return NULL_TREE;
12000 enum tree_code codes[2] = { code00, code01 };
12001 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12002 int which = 0;
12003 wide_int cst0;
12005 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12006 arg1 (M) is == (1LL << cst) - 1.
12007 Store C into PMOP[0] and D into PMOP[1]. */
12008 pmop[0] = arg00;
12009 pmop[1] = arg01;
12010 which = code != NEGATE_EXPR;
12012 for (; which >= 0; which--)
12013 switch (codes[which])
12015 case BIT_AND_EXPR:
12016 case BIT_IOR_EXPR:
12017 case BIT_XOR_EXPR:
12018 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12019 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12020 if (codes[which] == BIT_AND_EXPR)
12022 if (cst0 != cst1)
12023 break;
12025 else if (cst0 != 0)
12026 break;
12027 /* If C or D is of the form (A & N) where
12028 (N & M) == M, or of the form (A | N) or
12029 (A ^ N) where (N & M) == 0, replace it with A. */
12030 pmop[which] = arg0xx[2 * which];
12031 break;
12032 case ERROR_MARK:
12033 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12034 break;
12035 /* If C or D is a N where (N & M) == 0, it can be
12036 omitted (replaced with 0). */
12037 if ((code == PLUS_EXPR
12038 || (code == MINUS_EXPR && which == 0))
12039 && (cst1 & wi::to_wide (pmop[which])) == 0)
12040 pmop[which] = build_int_cst (type, 0);
12041 /* Similarly, with C - N where (-N & M) == 0. */
12042 if (code == MINUS_EXPR
12043 && which == 1
12044 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12045 pmop[which] = build_int_cst (type, 0);
12046 break;
12047 default:
12048 gcc_unreachable ();
12051 /* Only build anything new if we optimized one or both arguments above. */
12052 if (pmop[0] == arg00 && pmop[1] == arg01)
12053 return NULL_TREE;
12055 if (TYPE_OVERFLOW_WRAPS (type))
12056 return type;
12057 else
12058 return unsigned_type_for (type);
12061 /* Used by contains_label_[p1]. */
12063 struct contains_label_data
12065 hash_set<tree> *pset;
12066 bool inside_switch_p;
12069 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12070 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12071 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12073 static tree
12074 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12076 contains_label_data *d = (contains_label_data *) data;
12077 switch (TREE_CODE (*tp))
12079 case LABEL_EXPR:
12080 return *tp;
12082 case CASE_LABEL_EXPR:
12083 if (!d->inside_switch_p)
12084 return *tp;
12085 return NULL_TREE;
12087 case SWITCH_EXPR:
12088 if (!d->inside_switch_p)
12090 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12091 return *tp;
12092 d->inside_switch_p = true;
12093 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12094 return *tp;
12095 d->inside_switch_p = false;
12096 *walk_subtrees = 0;
12098 return NULL_TREE;
12100 case GOTO_EXPR:
12101 *walk_subtrees = 0;
12102 return NULL_TREE;
12104 default:
12105 return NULL_TREE;
12109 /* Return whether the sub-tree ST contains a label which is accessible from
12110 outside the sub-tree. */
12112 static bool
12113 contains_label_p (tree st)
12115 hash_set<tree> pset;
12116 contains_label_data data = { &pset, false };
12117 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12120 /* Fold a ternary expression of code CODE and type TYPE with operands
12121 OP0, OP1, and OP2. Return the folded expression if folding is
12122 successful. Otherwise, return NULL_TREE. */
12124 tree
12125 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12126 tree op0, tree op1, tree op2)
12128 tree tem;
12129 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12130 enum tree_code_class kind = TREE_CODE_CLASS (code);
12132 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12133 && TREE_CODE_LENGTH (code) == 3);
12135 /* If this is a commutative operation, and OP0 is a constant, move it
12136 to OP1 to reduce the number of tests below. */
12137 if (commutative_ternary_tree_code (code)
12138 && tree_swap_operands_p (op0, op1))
12139 return fold_build3_loc (loc, code, type, op1, op0, op2);
12141 tem = generic_simplify (loc, code, type, op0, op1, op2);
12142 if (tem)
12143 return tem;
12145 /* Strip any conversions that don't change the mode. This is safe
12146 for every expression, except for a comparison expression because
12147 its signedness is derived from its operands. So, in the latter
12148 case, only strip conversions that don't change the signedness.
12150 Note that this is done as an internal manipulation within the
12151 constant folder, in order to find the simplest representation of
12152 the arguments so that their form can be studied. In any cases,
12153 the appropriate type conversions should be put back in the tree
12154 that will get out of the constant folder. */
12155 if (op0)
12157 arg0 = op0;
12158 STRIP_NOPS (arg0);
12161 if (op1)
12163 arg1 = op1;
12164 STRIP_NOPS (arg1);
12167 if (op2)
12169 arg2 = op2;
12170 STRIP_NOPS (arg2);
12173 switch (code)
12175 case COMPONENT_REF:
12176 if (TREE_CODE (arg0) == CONSTRUCTOR
12177 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12179 unsigned HOST_WIDE_INT idx;
12180 tree field, value;
12181 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12182 if (field == arg1)
12183 return value;
12185 return NULL_TREE;
12187 case COND_EXPR:
12188 case VEC_COND_EXPR:
12189 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12190 so all simple results must be passed through pedantic_non_lvalue. */
12191 if (TREE_CODE (arg0) == INTEGER_CST)
12193 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12194 tem = integer_zerop (arg0) ? op2 : op1;
12195 /* Only optimize constant conditions when the selected branch
12196 has the same type as the COND_EXPR. This avoids optimizing
12197 away "c ? x : throw", where the throw has a void type.
12198 Avoid throwing away that operand which contains label. */
12199 if ((!TREE_SIDE_EFFECTS (unused_op)
12200 || !contains_label_p (unused_op))
12201 && (! VOID_TYPE_P (TREE_TYPE (tem))
12202 || VOID_TYPE_P (type)))
12203 return pedantic_non_lvalue_loc (loc, tem);
12204 return NULL_TREE;
12206 else if (TREE_CODE (arg0) == VECTOR_CST)
12208 unsigned HOST_WIDE_INT nelts;
12209 if ((TREE_CODE (arg1) == VECTOR_CST
12210 || TREE_CODE (arg1) == CONSTRUCTOR)
12211 && (TREE_CODE (arg2) == VECTOR_CST
12212 || TREE_CODE (arg2) == CONSTRUCTOR)
12213 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12215 vec_perm_builder sel (nelts, nelts, 1);
12216 for (unsigned int i = 0; i < nelts; i++)
12218 tree val = VECTOR_CST_ELT (arg0, i);
12219 if (integer_all_onesp (val))
12220 sel.quick_push (i);
12221 else if (integer_zerop (val))
12222 sel.quick_push (nelts + i);
12223 else /* Currently unreachable. */
12224 return NULL_TREE;
12226 vec_perm_indices indices (sel, 2, nelts);
12227 tree t = fold_vec_perm (type, arg1, arg2, indices);
12228 if (t != NULL_TREE)
12229 return t;
12233 /* If we have A op B ? A : C, we may be able to convert this to a
12234 simpler expression, depending on the operation and the values
12235 of B and C. Signed zeros prevent all of these transformations,
12236 for reasons given above each one.
12238 Also try swapping the arguments and inverting the conditional. */
12239 if (COMPARISON_CLASS_P (arg0)
12240 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12241 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12243 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12244 if (tem)
12245 return tem;
12248 if (COMPARISON_CLASS_P (arg0)
12249 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12250 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12252 location_t loc0 = expr_location_or (arg0, loc);
12253 tem = fold_invert_truthvalue (loc0, arg0);
12254 if (tem && COMPARISON_CLASS_P (tem))
12256 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12257 if (tem)
12258 return tem;
12262 /* If the second operand is simpler than the third, swap them
12263 since that produces better jump optimization results. */
12264 if (truth_value_p (TREE_CODE (arg0))
12265 && tree_swap_operands_p (op1, op2))
12267 location_t loc0 = expr_location_or (arg0, loc);
12268 /* See if this can be inverted. If it can't, possibly because
12269 it was a floating-point inequality comparison, don't do
12270 anything. */
12271 tem = fold_invert_truthvalue (loc0, arg0);
12272 if (tem)
12273 return fold_build3_loc (loc, code, type, tem, op2, op1);
12276 /* Convert A ? 1 : 0 to simply A. */
12277 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12278 : (integer_onep (op1)
12279 && !VECTOR_TYPE_P (type)))
12280 && integer_zerop (op2)
12281 /* If we try to convert OP0 to our type, the
12282 call to fold will try to move the conversion inside
12283 a COND, which will recurse. In that case, the COND_EXPR
12284 is probably the best choice, so leave it alone. */
12285 && type == TREE_TYPE (arg0))
12286 return pedantic_non_lvalue_loc (loc, arg0);
12288 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12289 over COND_EXPR in cases such as floating point comparisons. */
12290 if (integer_zerop (op1)
12291 && code == COND_EXPR
12292 && integer_onep (op2)
12293 && !VECTOR_TYPE_P (type)
12294 && truth_value_p (TREE_CODE (arg0)))
12295 return pedantic_non_lvalue_loc (loc,
12296 fold_convert_loc (loc, type,
12297 invert_truthvalue_loc (loc,
12298 arg0)));
12300 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12301 if (TREE_CODE (arg0) == LT_EXPR
12302 && integer_zerop (TREE_OPERAND (arg0, 1))
12303 && integer_zerop (op2)
12304 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12306 /* sign_bit_p looks through both zero and sign extensions,
12307 but for this optimization only sign extensions are
12308 usable. */
12309 tree tem2 = TREE_OPERAND (arg0, 0);
12310 while (tem != tem2)
12312 if (TREE_CODE (tem2) != NOP_EXPR
12313 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12315 tem = NULL_TREE;
12316 break;
12318 tem2 = TREE_OPERAND (tem2, 0);
12320 /* sign_bit_p only checks ARG1 bits within A's precision.
12321 If <sign bit of A> has wider type than A, bits outside
12322 of A's precision in <sign bit of A> need to be checked.
12323 If they are all 0, this optimization needs to be done
12324 in unsigned A's type, if they are all 1 in signed A's type,
12325 otherwise this can't be done. */
12326 if (tem
12327 && TYPE_PRECISION (TREE_TYPE (tem))
12328 < TYPE_PRECISION (TREE_TYPE (arg1))
12329 && TYPE_PRECISION (TREE_TYPE (tem))
12330 < TYPE_PRECISION (type))
12332 int inner_width, outer_width;
12333 tree tem_type;
12335 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12336 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12337 if (outer_width > TYPE_PRECISION (type))
12338 outer_width = TYPE_PRECISION (type);
12340 wide_int mask = wi::shifted_mask
12341 (inner_width, outer_width - inner_width, false,
12342 TYPE_PRECISION (TREE_TYPE (arg1)));
12344 wide_int common = mask & wi::to_wide (arg1);
12345 if (common == mask)
12347 tem_type = signed_type_for (TREE_TYPE (tem));
12348 tem = fold_convert_loc (loc, tem_type, tem);
12350 else if (common == 0)
12352 tem_type = unsigned_type_for (TREE_TYPE (tem));
12353 tem = fold_convert_loc (loc, tem_type, tem);
12355 else
12356 tem = NULL;
12359 if (tem)
12360 return
12361 fold_convert_loc (loc, type,
12362 fold_build2_loc (loc, BIT_AND_EXPR,
12363 TREE_TYPE (tem), tem,
12364 fold_convert_loc (loc,
12365 TREE_TYPE (tem),
12366 arg1)));
12369 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12370 already handled above. */
12371 if (TREE_CODE (arg0) == BIT_AND_EXPR
12372 && integer_onep (TREE_OPERAND (arg0, 1))
12373 && integer_zerop (op2)
12374 && integer_pow2p (arg1))
12376 tree tem = TREE_OPERAND (arg0, 0);
12377 STRIP_NOPS (tem);
12378 if (TREE_CODE (tem) == RSHIFT_EXPR
12379 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12380 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12381 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12382 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12383 fold_convert_loc (loc, type,
12384 TREE_OPERAND (tem, 0)),
12385 op1);
12388 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12389 is probably obsolete because the first operand should be a
12390 truth value (that's why we have the two cases above), but let's
12391 leave it in until we can confirm this for all front-ends. */
12392 if (integer_zerop (op2)
12393 && TREE_CODE (arg0) == NE_EXPR
12394 && integer_zerop (TREE_OPERAND (arg0, 1))
12395 && integer_pow2p (arg1)
12396 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12397 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12398 arg1, OEP_ONLY_CONST)
12399 /* operand_equal_p compares just value, not precision, so e.g.
12400 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12401 second operand 32-bit -128, which is not a power of two (or vice
12402 versa. */
12403 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12404 return pedantic_non_lvalue_loc (loc,
12405 fold_convert_loc (loc, type,
12406 TREE_OPERAND (arg0,
12407 0)));
12409 /* Disable the transformations below for vectors, since
12410 fold_binary_op_with_conditional_arg may undo them immediately,
12411 yielding an infinite loop. */
12412 if (code == VEC_COND_EXPR)
12413 return NULL_TREE;
12415 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12416 if (integer_zerop (op2)
12417 && truth_value_p (TREE_CODE (arg0))
12418 && truth_value_p (TREE_CODE (arg1))
12419 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12420 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12421 : TRUTH_ANDIF_EXPR,
12422 type, fold_convert_loc (loc, type, arg0), op1);
12424 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12425 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12426 && truth_value_p (TREE_CODE (arg0))
12427 && truth_value_p (TREE_CODE (arg1))
12428 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12430 location_t loc0 = expr_location_or (arg0, loc);
12431 /* Only perform transformation if ARG0 is easily inverted. */
12432 tem = fold_invert_truthvalue (loc0, arg0);
12433 if (tem)
12434 return fold_build2_loc (loc, code == VEC_COND_EXPR
12435 ? BIT_IOR_EXPR
12436 : TRUTH_ORIF_EXPR,
12437 type, fold_convert_loc (loc, type, tem),
12438 op1);
12441 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12442 if (integer_zerop (arg1)
12443 && truth_value_p (TREE_CODE (arg0))
12444 && truth_value_p (TREE_CODE (op2))
12445 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12447 location_t loc0 = expr_location_or (arg0, loc);
12448 /* Only perform transformation if ARG0 is easily inverted. */
12449 tem = fold_invert_truthvalue (loc0, arg0);
12450 if (tem)
12451 return fold_build2_loc (loc, code == VEC_COND_EXPR
12452 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12453 type, fold_convert_loc (loc, type, tem),
12454 op2);
12457 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12458 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12459 && truth_value_p (TREE_CODE (arg0))
12460 && truth_value_p (TREE_CODE (op2))
12461 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12462 return fold_build2_loc (loc, code == VEC_COND_EXPR
12463 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12464 type, fold_convert_loc (loc, type, arg0), op2);
12466 return NULL_TREE;
12468 case CALL_EXPR:
12469 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12470 of fold_ternary on them. */
12471 gcc_unreachable ();
12473 case BIT_FIELD_REF:
12474 if (TREE_CODE (arg0) == VECTOR_CST
12475 && (type == TREE_TYPE (TREE_TYPE (arg0))
12476 || (VECTOR_TYPE_P (type)
12477 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12478 && tree_fits_uhwi_p (op1)
12479 && tree_fits_uhwi_p (op2))
12481 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12482 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12483 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12484 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12486 if (n != 0
12487 && (idx % width) == 0
12488 && (n % width) == 0
12489 && known_le ((idx + n) / width,
12490 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12492 idx = idx / width;
12493 n = n / width;
12495 if (TREE_CODE (arg0) == VECTOR_CST)
12497 if (n == 1)
12499 tem = VECTOR_CST_ELT (arg0, idx);
12500 if (VECTOR_TYPE_P (type))
12501 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12502 return tem;
12505 tree_vector_builder vals (type, n, 1);
12506 for (unsigned i = 0; i < n; ++i)
12507 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12508 return vals.build ();
12513 /* On constants we can use native encode/interpret to constant
12514 fold (nearly) all BIT_FIELD_REFs. */
12515 if (CONSTANT_CLASS_P (arg0)
12516 && can_native_interpret_type_p (type)
12517 && BITS_PER_UNIT == 8
12518 && tree_fits_uhwi_p (op1)
12519 && tree_fits_uhwi_p (op2))
12521 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12522 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12523 /* Limit us to a reasonable amount of work. To relax the
12524 other limitations we need bit-shifting of the buffer
12525 and rounding up the size. */
12526 if (bitpos % BITS_PER_UNIT == 0
12527 && bitsize % BITS_PER_UNIT == 0
12528 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12530 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12531 unsigned HOST_WIDE_INT len
12532 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12533 bitpos / BITS_PER_UNIT);
12534 if (len > 0
12535 && len * BITS_PER_UNIT >= bitsize)
12537 tree v = native_interpret_expr (type, b,
12538 bitsize / BITS_PER_UNIT);
12539 if (v)
12540 return v;
12545 return NULL_TREE;
12547 case VEC_PERM_EXPR:
12548 /* Perform constant folding of BIT_INSERT_EXPR. */
12549 if (TREE_CODE (arg2) == VECTOR_CST
12550 && TREE_CODE (op0) == VECTOR_CST
12551 && TREE_CODE (op1) == VECTOR_CST)
12553 /* Build a vector of integers from the tree mask. */
12554 vec_perm_builder builder;
12555 if (!tree_to_vec_perm_builder (&builder, arg2))
12556 return NULL_TREE;
12558 /* Create a vec_perm_indices for the integer vector. */
12559 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12560 bool single_arg = (op0 == op1);
12561 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12562 return fold_vec_perm (type, op0, op1, sel);
12564 return NULL_TREE;
12566 case BIT_INSERT_EXPR:
12567 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12568 if (TREE_CODE (arg0) == INTEGER_CST
12569 && TREE_CODE (arg1) == INTEGER_CST)
12571 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12572 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12573 wide_int tem = (wi::to_wide (arg0)
12574 & wi::shifted_mask (bitpos, bitsize, true,
12575 TYPE_PRECISION (type)));
12576 wide_int tem2
12577 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12578 bitsize), bitpos);
12579 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12581 else if (TREE_CODE (arg0) == VECTOR_CST
12582 && CONSTANT_CLASS_P (arg1)
12583 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12584 TREE_TYPE (arg1)))
12586 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12587 unsigned HOST_WIDE_INT elsize
12588 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12589 if (bitpos % elsize == 0)
12591 unsigned k = bitpos / elsize;
12592 unsigned HOST_WIDE_INT nelts;
12593 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12594 return arg0;
12595 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12597 tree_vector_builder elts (type, nelts, 1);
12598 elts.quick_grow (nelts);
12599 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12600 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12601 return elts.build ();
12605 return NULL_TREE;
12607 default:
12608 return NULL_TREE;
12609 } /* switch (code) */
12612 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12613 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12614 constructor element index of the value returned. If the element is
12615 not found NULL_TREE is returned and *CTOR_IDX is updated to
12616 the index of the element after the ACCESS_INDEX position (which
12617 may be outside of the CTOR array). */
12619 tree
12620 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12621 unsigned *ctor_idx)
12623 tree index_type = NULL_TREE;
12624 signop index_sgn = UNSIGNED;
12625 offset_int low_bound = 0;
12627 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12629 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12630 if (domain_type && TYPE_MIN_VALUE (domain_type))
12632 /* Static constructors for variably sized objects makes no sense. */
12633 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12634 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12635 /* ??? When it is obvious that the range is signed, treat it so. */
12636 if (TYPE_UNSIGNED (index_type)
12637 && TYPE_MAX_VALUE (domain_type)
12638 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12639 TYPE_MIN_VALUE (domain_type)))
12641 index_sgn = SIGNED;
12642 low_bound
12643 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12644 SIGNED);
12646 else
12648 index_sgn = TYPE_SIGN (index_type);
12649 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12654 if (index_type)
12655 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12656 index_sgn);
12658 offset_int index = low_bound;
12659 if (index_type)
12660 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12662 offset_int max_index = index;
12663 unsigned cnt;
12664 tree cfield, cval;
12665 bool first_p = true;
12667 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12669 /* Array constructor might explicitly set index, or specify a range,
12670 or leave index NULL meaning that it is next index after previous
12671 one. */
12672 if (cfield)
12674 if (TREE_CODE (cfield) == INTEGER_CST)
12675 max_index = index
12676 = offset_int::from (wi::to_wide (cfield), index_sgn);
12677 else
12679 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12680 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12681 index_sgn);
12682 max_index
12683 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12684 index_sgn);
12685 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12688 else if (!first_p)
12690 index = max_index + 1;
12691 if (index_type)
12692 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12693 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12694 max_index = index;
12696 else
12697 first_p = false;
12699 /* Do we have match? */
12700 if (wi::cmp (access_index, index, index_sgn) >= 0)
12702 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12704 if (ctor_idx)
12705 *ctor_idx = cnt;
12706 return cval;
12709 else if (in_gimple_form)
12710 /* We're past the element we search for. Note during parsing
12711 the elements might not be sorted.
12712 ??? We should use a binary search and a flag on the
12713 CONSTRUCTOR as to whether elements are sorted in declaration
12714 order. */
12715 break;
12717 if (ctor_idx)
12718 *ctor_idx = cnt;
12719 return NULL_TREE;
12722 /* Perform constant folding and related simplification of EXPR.
12723 The related simplifications include x*1 => x, x*0 => 0, etc.,
12724 and application of the associative law.
12725 NOP_EXPR conversions may be removed freely (as long as we
12726 are careful not to change the type of the overall expression).
12727 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12728 but we can constant-fold them if they have constant operands. */
12730 #ifdef ENABLE_FOLD_CHECKING
12731 # define fold(x) fold_1 (x)
12732 static tree fold_1 (tree);
12733 static
12734 #endif
12735 tree
12736 fold (tree expr)
12738 const tree t = expr;
12739 enum tree_code code = TREE_CODE (t);
12740 enum tree_code_class kind = TREE_CODE_CLASS (code);
12741 tree tem;
12742 location_t loc = EXPR_LOCATION (expr);
12744 /* Return right away if a constant. */
12745 if (kind == tcc_constant)
12746 return t;
12748 /* CALL_EXPR-like objects with variable numbers of operands are
12749 treated specially. */
12750 if (kind == tcc_vl_exp)
12752 if (code == CALL_EXPR)
12754 tem = fold_call_expr (loc, expr, false);
12755 return tem ? tem : expr;
12757 return expr;
12760 if (IS_EXPR_CODE_CLASS (kind))
12762 tree type = TREE_TYPE (t);
12763 tree op0, op1, op2;
12765 switch (TREE_CODE_LENGTH (code))
12767 case 1:
12768 op0 = TREE_OPERAND (t, 0);
12769 tem = fold_unary_loc (loc, code, type, op0);
12770 return tem ? tem : expr;
12771 case 2:
12772 op0 = TREE_OPERAND (t, 0);
12773 op1 = TREE_OPERAND (t, 1);
12774 tem = fold_binary_loc (loc, code, type, op0, op1);
12775 return tem ? tem : expr;
12776 case 3:
12777 op0 = TREE_OPERAND (t, 0);
12778 op1 = TREE_OPERAND (t, 1);
12779 op2 = TREE_OPERAND (t, 2);
12780 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12781 return tem ? tem : expr;
12782 default:
12783 break;
12787 switch (code)
12789 case ARRAY_REF:
12791 tree op0 = TREE_OPERAND (t, 0);
12792 tree op1 = TREE_OPERAND (t, 1);
12794 if (TREE_CODE (op1) == INTEGER_CST
12795 && TREE_CODE (op0) == CONSTRUCTOR
12796 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12798 tree val = get_array_ctor_element_at_index (op0,
12799 wi::to_offset (op1));
12800 if (val)
12801 return val;
12804 return t;
12807 /* Return a VECTOR_CST if possible. */
12808 case CONSTRUCTOR:
12810 tree type = TREE_TYPE (t);
12811 if (TREE_CODE (type) != VECTOR_TYPE)
12812 return t;
12814 unsigned i;
12815 tree val;
12816 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12817 if (! CONSTANT_CLASS_P (val))
12818 return t;
12820 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12823 case CONST_DECL:
12824 return fold (DECL_INITIAL (t));
12826 default:
12827 return t;
12828 } /* switch (code) */
12831 #ifdef ENABLE_FOLD_CHECKING
12832 #undef fold
12834 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12835 hash_table<nofree_ptr_hash<const tree_node> > *);
12836 static void fold_check_failed (const_tree, const_tree);
12837 void print_fold_checksum (const_tree);
12839 /* When --enable-checking=fold, compute a digest of expr before
12840 and after actual fold call to see if fold did not accidentally
12841 change original expr. */
12843 tree
12844 fold (tree expr)
12846 tree ret;
12847 struct md5_ctx ctx;
12848 unsigned char checksum_before[16], checksum_after[16];
12849 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12851 md5_init_ctx (&ctx);
12852 fold_checksum_tree (expr, &ctx, &ht);
12853 md5_finish_ctx (&ctx, checksum_before);
12854 ht.empty ();
12856 ret = fold_1 (expr);
12858 md5_init_ctx (&ctx);
12859 fold_checksum_tree (expr, &ctx, &ht);
12860 md5_finish_ctx (&ctx, checksum_after);
12862 if (memcmp (checksum_before, checksum_after, 16))
12863 fold_check_failed (expr, ret);
12865 return ret;
12868 void
12869 print_fold_checksum (const_tree expr)
12871 struct md5_ctx ctx;
12872 unsigned char checksum[16], cnt;
12873 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12875 md5_init_ctx (&ctx);
12876 fold_checksum_tree (expr, &ctx, &ht);
12877 md5_finish_ctx (&ctx, checksum);
12878 for (cnt = 0; cnt < 16; ++cnt)
12879 fprintf (stderr, "%02x", checksum[cnt]);
12880 putc ('\n', stderr);
12883 static void
12884 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12886 internal_error ("fold check: original tree changed by fold");
12889 static void
12890 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12891 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12893 const tree_node **slot;
12894 enum tree_code code;
12895 union tree_node *buf;
12896 int i, len;
12898 recursive_label:
12899 if (expr == NULL)
12900 return;
12901 slot = ht->find_slot (expr, INSERT);
12902 if (*slot != NULL)
12903 return;
12904 *slot = expr;
12905 code = TREE_CODE (expr);
12906 if (TREE_CODE_CLASS (code) == tcc_declaration
12907 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12909 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12910 size_t sz = tree_size (expr);
12911 buf = XALLOCAVAR (union tree_node, sz);
12912 memcpy ((char *) buf, expr, sz);
12913 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12914 buf->decl_with_vis.symtab_node = NULL;
12915 buf->base.nowarning_flag = 0;
12916 expr = (tree) buf;
12918 else if (TREE_CODE_CLASS (code) == tcc_type
12919 && (TYPE_POINTER_TO (expr)
12920 || TYPE_REFERENCE_TO (expr)
12921 || TYPE_CACHED_VALUES_P (expr)
12922 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12923 || TYPE_NEXT_VARIANT (expr)
12924 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12926 /* Allow these fields to be modified. */
12927 tree tmp;
12928 size_t sz = tree_size (expr);
12929 buf = XALLOCAVAR (union tree_node, sz);
12930 memcpy ((char *) buf, expr, sz);
12931 expr = tmp = (tree) buf;
12932 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12933 TYPE_POINTER_TO (tmp) = NULL;
12934 TYPE_REFERENCE_TO (tmp) = NULL;
12935 TYPE_NEXT_VARIANT (tmp) = NULL;
12936 TYPE_ALIAS_SET (tmp) = -1;
12937 if (TYPE_CACHED_VALUES_P (tmp))
12939 TYPE_CACHED_VALUES_P (tmp) = 0;
12940 TYPE_CACHED_VALUES (tmp) = NULL;
12943 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
12945 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
12946 and change builtins.c etc. instead - see PR89543. */
12947 size_t sz = tree_size (expr);
12948 buf = XALLOCAVAR (union tree_node, sz);
12949 memcpy ((char *) buf, expr, sz);
12950 buf->base.nowarning_flag = 0;
12951 expr = (tree) buf;
12953 md5_process_bytes (expr, tree_size (expr), ctx);
12954 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12955 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12956 if (TREE_CODE_CLASS (code) != tcc_type
12957 && TREE_CODE_CLASS (code) != tcc_declaration
12958 && code != TREE_LIST
12959 && code != SSA_NAME
12960 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12961 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12962 switch (TREE_CODE_CLASS (code))
12964 case tcc_constant:
12965 switch (code)
12967 case STRING_CST:
12968 md5_process_bytes (TREE_STRING_POINTER (expr),
12969 TREE_STRING_LENGTH (expr), ctx);
12970 break;
12971 case COMPLEX_CST:
12972 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12973 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12974 break;
12975 case VECTOR_CST:
12976 len = vector_cst_encoded_nelts (expr);
12977 for (i = 0; i < len; ++i)
12978 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12979 break;
12980 default:
12981 break;
12983 break;
12984 case tcc_exceptional:
12985 switch (code)
12987 case TREE_LIST:
12988 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12989 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12990 expr = TREE_CHAIN (expr);
12991 goto recursive_label;
12992 break;
12993 case TREE_VEC:
12994 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12995 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12996 break;
12997 default:
12998 break;
13000 break;
13001 case tcc_expression:
13002 case tcc_reference:
13003 case tcc_comparison:
13004 case tcc_unary:
13005 case tcc_binary:
13006 case tcc_statement:
13007 case tcc_vl_exp:
13008 len = TREE_OPERAND_LENGTH (expr);
13009 for (i = 0; i < len; ++i)
13010 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13011 break;
13012 case tcc_declaration:
13013 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13014 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13015 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13017 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13018 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13019 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13020 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13021 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13024 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13026 if (TREE_CODE (expr) == FUNCTION_DECL)
13028 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13029 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13031 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13033 break;
13034 case tcc_type:
13035 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13036 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13037 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13038 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13039 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13040 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13041 if (INTEGRAL_TYPE_P (expr)
13042 || SCALAR_FLOAT_TYPE_P (expr))
13044 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13045 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13047 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13048 if (TREE_CODE (expr) == RECORD_TYPE
13049 || TREE_CODE (expr) == UNION_TYPE
13050 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13051 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13052 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13053 break;
13054 default:
13055 break;
13059 /* Helper function for outputting the checksum of a tree T. When
13060 debugging with gdb, you can "define mynext" to be "next" followed
13061 by "call debug_fold_checksum (op0)", then just trace down till the
13062 outputs differ. */
13064 DEBUG_FUNCTION void
13065 debug_fold_checksum (const_tree t)
13067 int i;
13068 unsigned char checksum[16];
13069 struct md5_ctx ctx;
13070 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13072 md5_init_ctx (&ctx);
13073 fold_checksum_tree (t, &ctx, &ht);
13074 md5_finish_ctx (&ctx, checksum);
13075 ht.empty ();
13077 for (i = 0; i < 16; i++)
13078 fprintf (stderr, "%d ", checksum[i]);
13080 fprintf (stderr, "\n");
13083 #endif
13085 /* Fold a unary tree expression with code CODE of type TYPE with an
13086 operand OP0. LOC is the location of the resulting expression.
13087 Return a folded expression if successful. Otherwise, return a tree
13088 expression with code CODE of type TYPE with an operand OP0. */
13090 tree
13091 fold_build1_loc (location_t loc,
13092 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13094 tree tem;
13095 #ifdef ENABLE_FOLD_CHECKING
13096 unsigned char checksum_before[16], checksum_after[16];
13097 struct md5_ctx ctx;
13098 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13100 md5_init_ctx (&ctx);
13101 fold_checksum_tree (op0, &ctx, &ht);
13102 md5_finish_ctx (&ctx, checksum_before);
13103 ht.empty ();
13104 #endif
13106 tem = fold_unary_loc (loc, code, type, op0);
13107 if (!tem)
13108 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13110 #ifdef ENABLE_FOLD_CHECKING
13111 md5_init_ctx (&ctx);
13112 fold_checksum_tree (op0, &ctx, &ht);
13113 md5_finish_ctx (&ctx, checksum_after);
13115 if (memcmp (checksum_before, checksum_after, 16))
13116 fold_check_failed (op0, tem);
13117 #endif
13118 return tem;
13121 /* Fold a binary tree expression with code CODE of type TYPE with
13122 operands OP0 and OP1. LOC is the location of the resulting
13123 expression. Return a folded expression if successful. Otherwise,
13124 return a tree expression with code CODE of type TYPE with operands
13125 OP0 and OP1. */
13127 tree
13128 fold_build2_loc (location_t loc,
13129 enum tree_code code, tree type, tree op0, tree op1
13130 MEM_STAT_DECL)
13132 tree tem;
13133 #ifdef ENABLE_FOLD_CHECKING
13134 unsigned char checksum_before_op0[16],
13135 checksum_before_op1[16],
13136 checksum_after_op0[16],
13137 checksum_after_op1[16];
13138 struct md5_ctx ctx;
13139 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13141 md5_init_ctx (&ctx);
13142 fold_checksum_tree (op0, &ctx, &ht);
13143 md5_finish_ctx (&ctx, checksum_before_op0);
13144 ht.empty ();
13146 md5_init_ctx (&ctx);
13147 fold_checksum_tree (op1, &ctx, &ht);
13148 md5_finish_ctx (&ctx, checksum_before_op1);
13149 ht.empty ();
13150 #endif
13152 tem = fold_binary_loc (loc, code, type, op0, op1);
13153 if (!tem)
13154 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13156 #ifdef ENABLE_FOLD_CHECKING
13157 md5_init_ctx (&ctx);
13158 fold_checksum_tree (op0, &ctx, &ht);
13159 md5_finish_ctx (&ctx, checksum_after_op0);
13160 ht.empty ();
13162 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13163 fold_check_failed (op0, tem);
13165 md5_init_ctx (&ctx);
13166 fold_checksum_tree (op1, &ctx, &ht);
13167 md5_finish_ctx (&ctx, checksum_after_op1);
13169 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13170 fold_check_failed (op1, tem);
13171 #endif
13172 return tem;
13175 /* Fold a ternary tree expression with code CODE of type TYPE with
13176 operands OP0, OP1, and OP2. Return a folded expression if
13177 successful. Otherwise, return a tree expression with code CODE of
13178 type TYPE with operands OP0, OP1, and OP2. */
13180 tree
13181 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13182 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13184 tree tem;
13185 #ifdef ENABLE_FOLD_CHECKING
13186 unsigned char checksum_before_op0[16],
13187 checksum_before_op1[16],
13188 checksum_before_op2[16],
13189 checksum_after_op0[16],
13190 checksum_after_op1[16],
13191 checksum_after_op2[16];
13192 struct md5_ctx ctx;
13193 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13195 md5_init_ctx (&ctx);
13196 fold_checksum_tree (op0, &ctx, &ht);
13197 md5_finish_ctx (&ctx, checksum_before_op0);
13198 ht.empty ();
13200 md5_init_ctx (&ctx);
13201 fold_checksum_tree (op1, &ctx, &ht);
13202 md5_finish_ctx (&ctx, checksum_before_op1);
13203 ht.empty ();
13205 md5_init_ctx (&ctx);
13206 fold_checksum_tree (op2, &ctx, &ht);
13207 md5_finish_ctx (&ctx, checksum_before_op2);
13208 ht.empty ();
13209 #endif
13211 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13212 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13213 if (!tem)
13214 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13216 #ifdef ENABLE_FOLD_CHECKING
13217 md5_init_ctx (&ctx);
13218 fold_checksum_tree (op0, &ctx, &ht);
13219 md5_finish_ctx (&ctx, checksum_after_op0);
13220 ht.empty ();
13222 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13223 fold_check_failed (op0, tem);
13225 md5_init_ctx (&ctx);
13226 fold_checksum_tree (op1, &ctx, &ht);
13227 md5_finish_ctx (&ctx, checksum_after_op1);
13228 ht.empty ();
13230 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13231 fold_check_failed (op1, tem);
13233 md5_init_ctx (&ctx);
13234 fold_checksum_tree (op2, &ctx, &ht);
13235 md5_finish_ctx (&ctx, checksum_after_op2);
13237 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13238 fold_check_failed (op2, tem);
13239 #endif
13240 return tem;
13243 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13244 arguments in ARGARRAY, and a null static chain.
13245 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13246 of type TYPE from the given operands as constructed by build_call_array. */
13248 tree
13249 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13250 int nargs, tree *argarray)
13252 tree tem;
13253 #ifdef ENABLE_FOLD_CHECKING
13254 unsigned char checksum_before_fn[16],
13255 checksum_before_arglist[16],
13256 checksum_after_fn[16],
13257 checksum_after_arglist[16];
13258 struct md5_ctx ctx;
13259 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13260 int i;
13262 md5_init_ctx (&ctx);
13263 fold_checksum_tree (fn, &ctx, &ht);
13264 md5_finish_ctx (&ctx, checksum_before_fn);
13265 ht.empty ();
13267 md5_init_ctx (&ctx);
13268 for (i = 0; i < nargs; i++)
13269 fold_checksum_tree (argarray[i], &ctx, &ht);
13270 md5_finish_ctx (&ctx, checksum_before_arglist);
13271 ht.empty ();
13272 #endif
13274 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13275 if (!tem)
13276 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13278 #ifdef ENABLE_FOLD_CHECKING
13279 md5_init_ctx (&ctx);
13280 fold_checksum_tree (fn, &ctx, &ht);
13281 md5_finish_ctx (&ctx, checksum_after_fn);
13282 ht.empty ();
13284 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13285 fold_check_failed (fn, tem);
13287 md5_init_ctx (&ctx);
13288 for (i = 0; i < nargs; i++)
13289 fold_checksum_tree (argarray[i], &ctx, &ht);
13290 md5_finish_ctx (&ctx, checksum_after_arglist);
13292 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13293 fold_check_failed (NULL_TREE, tem);
13294 #endif
13295 return tem;
13298 /* Perform constant folding and related simplification of initializer
13299 expression EXPR. These behave identically to "fold_buildN" but ignore
13300 potential run-time traps and exceptions that fold must preserve. */
13302 #define START_FOLD_INIT \
13303 int saved_signaling_nans = flag_signaling_nans;\
13304 int saved_trapping_math = flag_trapping_math;\
13305 int saved_rounding_math = flag_rounding_math;\
13306 int saved_trapv = flag_trapv;\
13307 int saved_folding_initializer = folding_initializer;\
13308 flag_signaling_nans = 0;\
13309 flag_trapping_math = 0;\
13310 flag_rounding_math = 0;\
13311 flag_trapv = 0;\
13312 folding_initializer = 1;
13314 #define END_FOLD_INIT \
13315 flag_signaling_nans = saved_signaling_nans;\
13316 flag_trapping_math = saved_trapping_math;\
13317 flag_rounding_math = saved_rounding_math;\
13318 flag_trapv = saved_trapv;\
13319 folding_initializer = saved_folding_initializer;
13321 tree
13322 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13323 tree type, tree op)
13325 tree result;
13326 START_FOLD_INIT;
13328 result = fold_build1_loc (loc, code, type, op);
13330 END_FOLD_INIT;
13331 return result;
13334 tree
13335 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13336 tree type, tree op0, tree op1)
13338 tree result;
13339 START_FOLD_INIT;
13341 result = fold_build2_loc (loc, code, type, op0, op1);
13343 END_FOLD_INIT;
13344 return result;
13347 tree
13348 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13349 int nargs, tree *argarray)
13351 tree result;
13352 START_FOLD_INIT;
13354 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13356 END_FOLD_INIT;
13357 return result;
13360 #undef START_FOLD_INIT
13361 #undef END_FOLD_INIT
13363 /* Determine if first argument is a multiple of second argument. Return 0 if
13364 it is not, or we cannot easily determined it to be.
13366 An example of the sort of thing we care about (at this point; this routine
13367 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13368 fold cases do now) is discovering that
13370 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13372 is a multiple of
13374 SAVE_EXPR (J * 8)
13376 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13378 This code also handles discovering that
13380 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13382 is a multiple of 8 so we don't have to worry about dealing with a
13383 possible remainder.
13385 Note that we *look* inside a SAVE_EXPR only to determine how it was
13386 calculated; it is not safe for fold to do much of anything else with the
13387 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13388 at run time. For example, the latter example above *cannot* be implemented
13389 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13390 evaluation time of the original SAVE_EXPR is not necessarily the same at
13391 the time the new expression is evaluated. The only optimization of this
13392 sort that would be valid is changing
13394 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13396 divided by 8 to
13398 SAVE_EXPR (I) * SAVE_EXPR (J)
13400 (where the same SAVE_EXPR (J) is used in the original and the
13401 transformed version). */
13404 multiple_of_p (tree type, const_tree top, const_tree bottom)
13406 gimple *stmt;
13407 tree t1, op1, op2;
13409 if (operand_equal_p (top, bottom, 0))
13410 return 1;
13412 if (TREE_CODE (type) != INTEGER_TYPE)
13413 return 0;
13415 switch (TREE_CODE (top))
13417 case BIT_AND_EXPR:
13418 /* Bitwise and provides a power of two multiple. If the mask is
13419 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13420 if (!integer_pow2p (bottom))
13421 return 0;
13422 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13423 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13425 case MULT_EXPR:
13426 if (TREE_CODE (bottom) == INTEGER_CST)
13428 op1 = TREE_OPERAND (top, 0);
13429 op2 = TREE_OPERAND (top, 1);
13430 if (TREE_CODE (op1) == INTEGER_CST)
13431 std::swap (op1, op2);
13432 if (TREE_CODE (op2) == INTEGER_CST)
13434 if (multiple_of_p (type, op2, bottom))
13435 return 1;
13436 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13437 if (multiple_of_p (type, bottom, op2))
13439 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13440 wi::to_widest (op2));
13441 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13443 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13444 return multiple_of_p (type, op1, op2);
13447 return multiple_of_p (type, op1, bottom);
13450 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13451 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13453 case MINUS_EXPR:
13454 /* It is impossible to prove if op0 - op1 is multiple of bottom
13455 precisely, so be conservative here checking if both op0 and op1
13456 are multiple of bottom. Note we check the second operand first
13457 since it's usually simpler. */
13458 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13459 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13461 case PLUS_EXPR:
13462 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13463 as op0 - 3 if the expression has unsigned type. For example,
13464 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13465 op1 = TREE_OPERAND (top, 1);
13466 if (TYPE_UNSIGNED (type)
13467 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13468 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13469 return (multiple_of_p (type, op1, bottom)
13470 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13472 case LSHIFT_EXPR:
13473 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13475 op1 = TREE_OPERAND (top, 1);
13476 /* const_binop may not detect overflow correctly,
13477 so check for it explicitly here. */
13478 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13479 wi::to_wide (op1))
13480 && (t1 = fold_convert (type,
13481 const_binop (LSHIFT_EXPR, size_one_node,
13482 op1))) != 0
13483 && !TREE_OVERFLOW (t1))
13484 return multiple_of_p (type, t1, bottom);
13486 return 0;
13488 case NOP_EXPR:
13489 /* Can't handle conversions from non-integral or wider integral type. */
13490 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13491 || (TYPE_PRECISION (type)
13492 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13493 return 0;
13495 /* fall through */
13497 case SAVE_EXPR:
13498 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13500 case COND_EXPR:
13501 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13502 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13504 case INTEGER_CST:
13505 if (TREE_CODE (bottom) != INTEGER_CST
13506 || integer_zerop (bottom)
13507 || (TYPE_UNSIGNED (type)
13508 && (tree_int_cst_sgn (top) < 0
13509 || tree_int_cst_sgn (bottom) < 0)))
13510 return 0;
13511 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13512 SIGNED);
13514 case SSA_NAME:
13515 if (TREE_CODE (bottom) == INTEGER_CST
13516 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13517 && gimple_code (stmt) == GIMPLE_ASSIGN)
13519 enum tree_code code = gimple_assign_rhs_code (stmt);
13521 /* Check for special cases to see if top is defined as multiple
13522 of bottom:
13524 top = (X & ~(bottom - 1) ; bottom is power of 2
13528 Y = X % bottom
13529 top = X - Y. */
13530 if (code == BIT_AND_EXPR
13531 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13532 && TREE_CODE (op2) == INTEGER_CST
13533 && integer_pow2p (bottom)
13534 && wi::multiple_of_p (wi::to_widest (op2),
13535 wi::to_widest (bottom), UNSIGNED))
13536 return 1;
13538 op1 = gimple_assign_rhs1 (stmt);
13539 if (code == MINUS_EXPR
13540 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13541 && TREE_CODE (op2) == SSA_NAME
13542 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13543 && gimple_code (stmt) == GIMPLE_ASSIGN
13544 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13545 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13546 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13547 return 1;
13550 /* fall through */
13552 default:
13553 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13554 return multiple_p (wi::to_poly_widest (top),
13555 wi::to_poly_widest (bottom));
13557 return 0;
13561 #define tree_expr_nonnegative_warnv_p(X, Y) \
13562 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13564 #define RECURSE(X) \
13565 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13567 /* Return true if CODE or TYPE is known to be non-negative. */
13569 static bool
13570 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13572 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13573 && truth_value_p (code))
13574 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13575 have a signed:1 type (where the value is -1 and 0). */
13576 return true;
13577 return false;
13580 /* Return true if (CODE OP0) is known to be non-negative. If the return
13581 value is based on the assumption that signed overflow is undefined,
13582 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13583 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13585 bool
13586 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13587 bool *strict_overflow_p, int depth)
13589 if (TYPE_UNSIGNED (type))
13590 return true;
13592 switch (code)
13594 case ABS_EXPR:
13595 /* We can't return 1 if flag_wrapv is set because
13596 ABS_EXPR<INT_MIN> = INT_MIN. */
13597 if (!ANY_INTEGRAL_TYPE_P (type))
13598 return true;
13599 if (TYPE_OVERFLOW_UNDEFINED (type))
13601 *strict_overflow_p = true;
13602 return true;
13604 break;
13606 case NON_LVALUE_EXPR:
13607 case FLOAT_EXPR:
13608 case FIX_TRUNC_EXPR:
13609 return RECURSE (op0);
13611 CASE_CONVERT:
13613 tree inner_type = TREE_TYPE (op0);
13614 tree outer_type = type;
13616 if (TREE_CODE (outer_type) == REAL_TYPE)
13618 if (TREE_CODE (inner_type) == REAL_TYPE)
13619 return RECURSE (op0);
13620 if (INTEGRAL_TYPE_P (inner_type))
13622 if (TYPE_UNSIGNED (inner_type))
13623 return true;
13624 return RECURSE (op0);
13627 else if (INTEGRAL_TYPE_P (outer_type))
13629 if (TREE_CODE (inner_type) == REAL_TYPE)
13630 return RECURSE (op0);
13631 if (INTEGRAL_TYPE_P (inner_type))
13632 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13633 && TYPE_UNSIGNED (inner_type);
13636 break;
13638 default:
13639 return tree_simple_nonnegative_warnv_p (code, type);
13642 /* We don't know sign of `t', so be conservative and return false. */
13643 return false;
13646 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13647 value is based on the assumption that signed overflow is undefined,
13648 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13649 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13651 bool
13652 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13653 tree op1, bool *strict_overflow_p,
13654 int depth)
13656 if (TYPE_UNSIGNED (type))
13657 return true;
13659 switch (code)
13661 case POINTER_PLUS_EXPR:
13662 case PLUS_EXPR:
13663 if (FLOAT_TYPE_P (type))
13664 return RECURSE (op0) && RECURSE (op1);
13666 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13667 both unsigned and at least 2 bits shorter than the result. */
13668 if (TREE_CODE (type) == INTEGER_TYPE
13669 && TREE_CODE (op0) == NOP_EXPR
13670 && TREE_CODE (op1) == NOP_EXPR)
13672 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13673 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13674 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13675 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13677 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13678 TYPE_PRECISION (inner2)) + 1;
13679 return prec < TYPE_PRECISION (type);
13682 break;
13684 case MULT_EXPR:
13685 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13687 /* x * x is always non-negative for floating point x
13688 or without overflow. */
13689 if (operand_equal_p (op0, op1, 0)
13690 || (RECURSE (op0) && RECURSE (op1)))
13692 if (ANY_INTEGRAL_TYPE_P (type)
13693 && TYPE_OVERFLOW_UNDEFINED (type))
13694 *strict_overflow_p = true;
13695 return true;
13699 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13700 both unsigned and their total bits is shorter than the result. */
13701 if (TREE_CODE (type) == INTEGER_TYPE
13702 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13703 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13705 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13706 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13707 : TREE_TYPE (op0);
13708 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13709 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13710 : TREE_TYPE (op1);
13712 bool unsigned0 = TYPE_UNSIGNED (inner0);
13713 bool unsigned1 = TYPE_UNSIGNED (inner1);
13715 if (TREE_CODE (op0) == INTEGER_CST)
13716 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13718 if (TREE_CODE (op1) == INTEGER_CST)
13719 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13721 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13722 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13724 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13725 ? tree_int_cst_min_precision (op0, UNSIGNED)
13726 : TYPE_PRECISION (inner0);
13728 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13729 ? tree_int_cst_min_precision (op1, UNSIGNED)
13730 : TYPE_PRECISION (inner1);
13732 return precision0 + precision1 < TYPE_PRECISION (type);
13735 return false;
13737 case BIT_AND_EXPR:
13738 case MAX_EXPR:
13739 return RECURSE (op0) || RECURSE (op1);
13741 case BIT_IOR_EXPR:
13742 case BIT_XOR_EXPR:
13743 case MIN_EXPR:
13744 case RDIV_EXPR:
13745 case TRUNC_DIV_EXPR:
13746 case CEIL_DIV_EXPR:
13747 case FLOOR_DIV_EXPR:
13748 case ROUND_DIV_EXPR:
13749 return RECURSE (op0) && RECURSE (op1);
13751 case TRUNC_MOD_EXPR:
13752 return RECURSE (op0);
13754 case FLOOR_MOD_EXPR:
13755 return RECURSE (op1);
13757 case CEIL_MOD_EXPR:
13758 case ROUND_MOD_EXPR:
13759 default:
13760 return tree_simple_nonnegative_warnv_p (code, type);
13763 /* We don't know sign of `t', so be conservative and return false. */
13764 return false;
13767 /* Return true if T is known to be non-negative. If the return
13768 value is based on the assumption that signed overflow is undefined,
13769 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13770 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13772 bool
13773 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13775 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13776 return true;
13778 switch (TREE_CODE (t))
13780 case INTEGER_CST:
13781 return tree_int_cst_sgn (t) >= 0;
13783 case REAL_CST:
13784 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13786 case FIXED_CST:
13787 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13789 case COND_EXPR:
13790 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13792 case SSA_NAME:
13793 /* Limit the depth of recursion to avoid quadratic behavior.
13794 This is expected to catch almost all occurrences in practice.
13795 If this code misses important cases that unbounded recursion
13796 would not, passes that need this information could be revised
13797 to provide it through dataflow propagation. */
13798 return (!name_registered_for_update_p (t)
13799 && depth < param_max_ssa_name_query_depth
13800 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13801 strict_overflow_p, depth));
13803 default:
13804 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13808 /* Return true if T is known to be non-negative. If the return
13809 value is based on the assumption that signed overflow is undefined,
13810 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13811 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13813 bool
13814 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13815 bool *strict_overflow_p, int depth)
13817 switch (fn)
13819 CASE_CFN_ACOS:
13820 CASE_CFN_ACOSH:
13821 CASE_CFN_CABS:
13822 CASE_CFN_COSH:
13823 CASE_CFN_ERFC:
13824 CASE_CFN_EXP:
13825 CASE_CFN_EXP10:
13826 CASE_CFN_EXP2:
13827 CASE_CFN_FABS:
13828 CASE_CFN_FDIM:
13829 CASE_CFN_HYPOT:
13830 CASE_CFN_POW10:
13831 CASE_CFN_FFS:
13832 CASE_CFN_PARITY:
13833 CASE_CFN_POPCOUNT:
13834 CASE_CFN_CLZ:
13835 CASE_CFN_CLRSB:
13836 case CFN_BUILT_IN_BSWAP32:
13837 case CFN_BUILT_IN_BSWAP64:
13838 /* Always true. */
13839 return true;
13841 CASE_CFN_SQRT:
13842 CASE_CFN_SQRT_FN:
13843 /* sqrt(-0.0) is -0.0. */
13844 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13845 return true;
13846 return RECURSE (arg0);
13848 CASE_CFN_ASINH:
13849 CASE_CFN_ATAN:
13850 CASE_CFN_ATANH:
13851 CASE_CFN_CBRT:
13852 CASE_CFN_CEIL:
13853 CASE_CFN_CEIL_FN:
13854 CASE_CFN_ERF:
13855 CASE_CFN_EXPM1:
13856 CASE_CFN_FLOOR:
13857 CASE_CFN_FLOOR_FN:
13858 CASE_CFN_FMOD:
13859 CASE_CFN_FREXP:
13860 CASE_CFN_ICEIL:
13861 CASE_CFN_IFLOOR:
13862 CASE_CFN_IRINT:
13863 CASE_CFN_IROUND:
13864 CASE_CFN_LCEIL:
13865 CASE_CFN_LDEXP:
13866 CASE_CFN_LFLOOR:
13867 CASE_CFN_LLCEIL:
13868 CASE_CFN_LLFLOOR:
13869 CASE_CFN_LLRINT:
13870 CASE_CFN_LLROUND:
13871 CASE_CFN_LRINT:
13872 CASE_CFN_LROUND:
13873 CASE_CFN_MODF:
13874 CASE_CFN_NEARBYINT:
13875 CASE_CFN_NEARBYINT_FN:
13876 CASE_CFN_RINT:
13877 CASE_CFN_RINT_FN:
13878 CASE_CFN_ROUND:
13879 CASE_CFN_ROUND_FN:
13880 CASE_CFN_ROUNDEVEN:
13881 CASE_CFN_ROUNDEVEN_FN:
13882 CASE_CFN_SCALB:
13883 CASE_CFN_SCALBLN:
13884 CASE_CFN_SCALBN:
13885 CASE_CFN_SIGNBIT:
13886 CASE_CFN_SIGNIFICAND:
13887 CASE_CFN_SINH:
13888 CASE_CFN_TANH:
13889 CASE_CFN_TRUNC:
13890 CASE_CFN_TRUNC_FN:
13891 /* True if the 1st argument is nonnegative. */
13892 return RECURSE (arg0);
13894 CASE_CFN_FMAX:
13895 CASE_CFN_FMAX_FN:
13896 /* True if the 1st OR 2nd arguments are nonnegative. */
13897 return RECURSE (arg0) || RECURSE (arg1);
13899 CASE_CFN_FMIN:
13900 CASE_CFN_FMIN_FN:
13901 /* True if the 1st AND 2nd arguments are nonnegative. */
13902 return RECURSE (arg0) && RECURSE (arg1);
13904 CASE_CFN_COPYSIGN:
13905 CASE_CFN_COPYSIGN_FN:
13906 /* True if the 2nd argument is nonnegative. */
13907 return RECURSE (arg1);
13909 CASE_CFN_POWI:
13910 /* True if the 1st argument is nonnegative or the second
13911 argument is an even integer. */
13912 if (TREE_CODE (arg1) == INTEGER_CST
13913 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13914 return true;
13915 return RECURSE (arg0);
13917 CASE_CFN_POW:
13918 /* True if the 1st argument is nonnegative or the second
13919 argument is an even integer valued real. */
13920 if (TREE_CODE (arg1) == REAL_CST)
13922 REAL_VALUE_TYPE c;
13923 HOST_WIDE_INT n;
13925 c = TREE_REAL_CST (arg1);
13926 n = real_to_integer (&c);
13927 if ((n & 1) == 0)
13929 REAL_VALUE_TYPE cint;
13930 real_from_integer (&cint, VOIDmode, n, SIGNED);
13931 if (real_identical (&c, &cint))
13932 return true;
13935 return RECURSE (arg0);
13937 default:
13938 break;
13940 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13943 /* Return true if T is known to be non-negative. If the return
13944 value is based on the assumption that signed overflow is undefined,
13945 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13946 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13948 static bool
13949 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13951 enum tree_code code = TREE_CODE (t);
13952 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13953 return true;
13955 switch (code)
13957 case TARGET_EXPR:
13959 tree temp = TARGET_EXPR_SLOT (t);
13960 t = TARGET_EXPR_INITIAL (t);
13962 /* If the initializer is non-void, then it's a normal expression
13963 that will be assigned to the slot. */
13964 if (!VOID_TYPE_P (t))
13965 return RECURSE (t);
13967 /* Otherwise, the initializer sets the slot in some way. One common
13968 way is an assignment statement at the end of the initializer. */
13969 while (1)
13971 if (TREE_CODE (t) == BIND_EXPR)
13972 t = expr_last (BIND_EXPR_BODY (t));
13973 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13974 || TREE_CODE (t) == TRY_CATCH_EXPR)
13975 t = expr_last (TREE_OPERAND (t, 0));
13976 else if (TREE_CODE (t) == STATEMENT_LIST)
13977 t = expr_last (t);
13978 else
13979 break;
13981 if (TREE_CODE (t) == MODIFY_EXPR
13982 && TREE_OPERAND (t, 0) == temp)
13983 return RECURSE (TREE_OPERAND (t, 1));
13985 return false;
13988 case CALL_EXPR:
13990 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13991 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13993 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13994 get_call_combined_fn (t),
13995 arg0,
13996 arg1,
13997 strict_overflow_p, depth);
13999 case COMPOUND_EXPR:
14000 case MODIFY_EXPR:
14001 return RECURSE (TREE_OPERAND (t, 1));
14003 case BIND_EXPR:
14004 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14006 case SAVE_EXPR:
14007 return RECURSE (TREE_OPERAND (t, 0));
14009 default:
14010 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14014 #undef RECURSE
14015 #undef tree_expr_nonnegative_warnv_p
14017 /* Return true if T is known to be non-negative. If the return
14018 value is based on the assumption that signed overflow is undefined,
14019 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14020 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14022 bool
14023 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14025 enum tree_code code;
14026 if (t == error_mark_node)
14027 return false;
14029 code = TREE_CODE (t);
14030 switch (TREE_CODE_CLASS (code))
14032 case tcc_binary:
14033 case tcc_comparison:
14034 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14035 TREE_TYPE (t),
14036 TREE_OPERAND (t, 0),
14037 TREE_OPERAND (t, 1),
14038 strict_overflow_p, depth);
14040 case tcc_unary:
14041 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14042 TREE_TYPE (t),
14043 TREE_OPERAND (t, 0),
14044 strict_overflow_p, depth);
14046 case tcc_constant:
14047 case tcc_declaration:
14048 case tcc_reference:
14049 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14051 default:
14052 break;
14055 switch (code)
14057 case TRUTH_AND_EXPR:
14058 case TRUTH_OR_EXPR:
14059 case TRUTH_XOR_EXPR:
14060 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14061 TREE_TYPE (t),
14062 TREE_OPERAND (t, 0),
14063 TREE_OPERAND (t, 1),
14064 strict_overflow_p, depth);
14065 case TRUTH_NOT_EXPR:
14066 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14067 TREE_TYPE (t),
14068 TREE_OPERAND (t, 0),
14069 strict_overflow_p, depth);
14071 case COND_EXPR:
14072 case CONSTRUCTOR:
14073 case OBJ_TYPE_REF:
14074 case ASSERT_EXPR:
14075 case ADDR_EXPR:
14076 case WITH_SIZE_EXPR:
14077 case SSA_NAME:
14078 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14080 default:
14081 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14085 /* Return true if `t' is known to be non-negative. Handle warnings
14086 about undefined signed overflow. */
14088 bool
14089 tree_expr_nonnegative_p (tree t)
14091 bool ret, strict_overflow_p;
14093 strict_overflow_p = false;
14094 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14095 if (strict_overflow_p)
14096 fold_overflow_warning (("assuming signed overflow does not occur when "
14097 "determining that expression is always "
14098 "non-negative"),
14099 WARN_STRICT_OVERFLOW_MISC);
14100 return ret;
14104 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14105 For floating point we further ensure that T is not denormal.
14106 Similar logic is present in nonzero_address in rtlanal.h.
14108 If the return value is based on the assumption that signed overflow
14109 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14110 change *STRICT_OVERFLOW_P. */
14112 bool
14113 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14114 bool *strict_overflow_p)
14116 switch (code)
14118 case ABS_EXPR:
14119 return tree_expr_nonzero_warnv_p (op0,
14120 strict_overflow_p);
14122 case NOP_EXPR:
14124 tree inner_type = TREE_TYPE (op0);
14125 tree outer_type = type;
14127 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14128 && tree_expr_nonzero_warnv_p (op0,
14129 strict_overflow_p));
14131 break;
14133 case NON_LVALUE_EXPR:
14134 return tree_expr_nonzero_warnv_p (op0,
14135 strict_overflow_p);
14137 default:
14138 break;
14141 return false;
14144 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14145 For floating point we further ensure that T is not denormal.
14146 Similar logic is present in nonzero_address in rtlanal.h.
14148 If the return value is based on the assumption that signed overflow
14149 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14150 change *STRICT_OVERFLOW_P. */
14152 bool
14153 tree_binary_nonzero_warnv_p (enum tree_code code,
14154 tree type,
14155 tree op0,
14156 tree op1, bool *strict_overflow_p)
14158 bool sub_strict_overflow_p;
14159 switch (code)
14161 case POINTER_PLUS_EXPR:
14162 case PLUS_EXPR:
14163 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14165 /* With the presence of negative values it is hard
14166 to say something. */
14167 sub_strict_overflow_p = false;
14168 if (!tree_expr_nonnegative_warnv_p (op0,
14169 &sub_strict_overflow_p)
14170 || !tree_expr_nonnegative_warnv_p (op1,
14171 &sub_strict_overflow_p))
14172 return false;
14173 /* One of operands must be positive and the other non-negative. */
14174 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14175 overflows, on a twos-complement machine the sum of two
14176 nonnegative numbers can never be zero. */
14177 return (tree_expr_nonzero_warnv_p (op0,
14178 strict_overflow_p)
14179 || tree_expr_nonzero_warnv_p (op1,
14180 strict_overflow_p));
14182 break;
14184 case MULT_EXPR:
14185 if (TYPE_OVERFLOW_UNDEFINED (type))
14187 if (tree_expr_nonzero_warnv_p (op0,
14188 strict_overflow_p)
14189 && tree_expr_nonzero_warnv_p (op1,
14190 strict_overflow_p))
14192 *strict_overflow_p = true;
14193 return true;
14196 break;
14198 case MIN_EXPR:
14199 sub_strict_overflow_p = false;
14200 if (tree_expr_nonzero_warnv_p (op0,
14201 &sub_strict_overflow_p)
14202 && tree_expr_nonzero_warnv_p (op1,
14203 &sub_strict_overflow_p))
14205 if (sub_strict_overflow_p)
14206 *strict_overflow_p = true;
14208 break;
14210 case MAX_EXPR:
14211 sub_strict_overflow_p = false;
14212 if (tree_expr_nonzero_warnv_p (op0,
14213 &sub_strict_overflow_p))
14215 if (sub_strict_overflow_p)
14216 *strict_overflow_p = true;
14218 /* When both operands are nonzero, then MAX must be too. */
14219 if (tree_expr_nonzero_warnv_p (op1,
14220 strict_overflow_p))
14221 return true;
14223 /* MAX where operand 0 is positive is positive. */
14224 return tree_expr_nonnegative_warnv_p (op0,
14225 strict_overflow_p);
14227 /* MAX where operand 1 is positive is positive. */
14228 else if (tree_expr_nonzero_warnv_p (op1,
14229 &sub_strict_overflow_p)
14230 && tree_expr_nonnegative_warnv_p (op1,
14231 &sub_strict_overflow_p))
14233 if (sub_strict_overflow_p)
14234 *strict_overflow_p = true;
14235 return true;
14237 break;
14239 case BIT_IOR_EXPR:
14240 return (tree_expr_nonzero_warnv_p (op1,
14241 strict_overflow_p)
14242 || tree_expr_nonzero_warnv_p (op0,
14243 strict_overflow_p));
14245 default:
14246 break;
14249 return false;
14252 /* Return true when T is an address and is known to be nonzero.
14253 For floating point we further ensure that T is not denormal.
14254 Similar logic is present in nonzero_address in rtlanal.h.
14256 If the return value is based on the assumption that signed overflow
14257 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14258 change *STRICT_OVERFLOW_P. */
14260 bool
14261 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14263 bool sub_strict_overflow_p;
14264 switch (TREE_CODE (t))
14266 case INTEGER_CST:
14267 return !integer_zerop (t);
14269 case ADDR_EXPR:
14271 tree base = TREE_OPERAND (t, 0);
14273 if (!DECL_P (base))
14274 base = get_base_address (base);
14276 if (base && TREE_CODE (base) == TARGET_EXPR)
14277 base = TARGET_EXPR_SLOT (base);
14279 if (!base)
14280 return false;
14282 /* For objects in symbol table check if we know they are non-zero.
14283 Don't do anything for variables and functions before symtab is built;
14284 it is quite possible that they will be declared weak later. */
14285 int nonzero_addr = maybe_nonzero_address (base);
14286 if (nonzero_addr >= 0)
14287 return nonzero_addr;
14289 /* Constants are never weak. */
14290 if (CONSTANT_CLASS_P (base))
14291 return true;
14293 return false;
14296 case COND_EXPR:
14297 sub_strict_overflow_p = false;
14298 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14299 &sub_strict_overflow_p)
14300 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14301 &sub_strict_overflow_p))
14303 if (sub_strict_overflow_p)
14304 *strict_overflow_p = true;
14305 return true;
14307 break;
14309 case SSA_NAME:
14310 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14311 break;
14312 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14314 default:
14315 break;
14317 return false;
14320 #define integer_valued_real_p(X) \
14321 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14323 #define RECURSE(X) \
14324 ((integer_valued_real_p) (X, depth + 1))
14326 /* Return true if the floating point result of (CODE OP0) has an
14327 integer value. We also allow +Inf, -Inf and NaN to be considered
14328 integer values. Return false for signaling NaN.
14330 DEPTH is the current nesting depth of the query. */
14332 bool
14333 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14335 switch (code)
14337 case FLOAT_EXPR:
14338 return true;
14340 case ABS_EXPR:
14341 return RECURSE (op0);
14343 CASE_CONVERT:
14345 tree type = TREE_TYPE (op0);
14346 if (TREE_CODE (type) == INTEGER_TYPE)
14347 return true;
14348 if (TREE_CODE (type) == REAL_TYPE)
14349 return RECURSE (op0);
14350 break;
14353 default:
14354 break;
14356 return false;
14359 /* Return true if the floating point result of (CODE OP0 OP1) has an
14360 integer value. We also allow +Inf, -Inf and NaN to be considered
14361 integer values. Return false for signaling NaN.
14363 DEPTH is the current nesting depth of the query. */
14365 bool
14366 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14368 switch (code)
14370 case PLUS_EXPR:
14371 case MINUS_EXPR:
14372 case MULT_EXPR:
14373 case MIN_EXPR:
14374 case MAX_EXPR:
14375 return RECURSE (op0) && RECURSE (op1);
14377 default:
14378 break;
14380 return false;
14383 /* Return true if the floating point result of calling FNDECL with arguments
14384 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14385 considered integer values. Return false for signaling NaN. If FNDECL
14386 takes fewer than 2 arguments, the remaining ARGn are null.
14388 DEPTH is the current nesting depth of the query. */
14390 bool
14391 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14393 switch (fn)
14395 CASE_CFN_CEIL:
14396 CASE_CFN_CEIL_FN:
14397 CASE_CFN_FLOOR:
14398 CASE_CFN_FLOOR_FN:
14399 CASE_CFN_NEARBYINT:
14400 CASE_CFN_NEARBYINT_FN:
14401 CASE_CFN_RINT:
14402 CASE_CFN_RINT_FN:
14403 CASE_CFN_ROUND:
14404 CASE_CFN_ROUND_FN:
14405 CASE_CFN_ROUNDEVEN:
14406 CASE_CFN_ROUNDEVEN_FN:
14407 CASE_CFN_TRUNC:
14408 CASE_CFN_TRUNC_FN:
14409 return true;
14411 CASE_CFN_FMIN:
14412 CASE_CFN_FMIN_FN:
14413 CASE_CFN_FMAX:
14414 CASE_CFN_FMAX_FN:
14415 return RECURSE (arg0) && RECURSE (arg1);
14417 default:
14418 break;
14420 return false;
14423 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14424 has an integer value. We also allow +Inf, -Inf and NaN to be
14425 considered integer values. Return false for signaling NaN.
14427 DEPTH is the current nesting depth of the query. */
14429 bool
14430 integer_valued_real_single_p (tree t, int depth)
14432 switch (TREE_CODE (t))
14434 case REAL_CST:
14435 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14437 case COND_EXPR:
14438 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14440 case SSA_NAME:
14441 /* Limit the depth of recursion to avoid quadratic behavior.
14442 This is expected to catch almost all occurrences in practice.
14443 If this code misses important cases that unbounded recursion
14444 would not, passes that need this information could be revised
14445 to provide it through dataflow propagation. */
14446 return (!name_registered_for_update_p (t)
14447 && depth < param_max_ssa_name_query_depth
14448 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14449 depth));
14451 default:
14452 break;
14454 return false;
14457 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14458 has an integer value. We also allow +Inf, -Inf and NaN to be
14459 considered integer values. Return false for signaling NaN.
14461 DEPTH is the current nesting depth of the query. */
14463 static bool
14464 integer_valued_real_invalid_p (tree t, int depth)
14466 switch (TREE_CODE (t))
14468 case COMPOUND_EXPR:
14469 case MODIFY_EXPR:
14470 case BIND_EXPR:
14471 return RECURSE (TREE_OPERAND (t, 1));
14473 case SAVE_EXPR:
14474 return RECURSE (TREE_OPERAND (t, 0));
14476 default:
14477 break;
14479 return false;
14482 #undef RECURSE
14483 #undef integer_valued_real_p
14485 /* Return true if the floating point expression T has an integer value.
14486 We also allow +Inf, -Inf and NaN to be considered integer values.
14487 Return false for signaling NaN.
14489 DEPTH is the current nesting depth of the query. */
14491 bool
14492 integer_valued_real_p (tree t, int depth)
14494 if (t == error_mark_node)
14495 return false;
14497 STRIP_ANY_LOCATION_WRAPPER (t);
14499 tree_code code = TREE_CODE (t);
14500 switch (TREE_CODE_CLASS (code))
14502 case tcc_binary:
14503 case tcc_comparison:
14504 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14505 TREE_OPERAND (t, 1), depth);
14507 case tcc_unary:
14508 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14510 case tcc_constant:
14511 case tcc_declaration:
14512 case tcc_reference:
14513 return integer_valued_real_single_p (t, depth);
14515 default:
14516 break;
14519 switch (code)
14521 case COND_EXPR:
14522 case SSA_NAME:
14523 return integer_valued_real_single_p (t, depth);
14525 case CALL_EXPR:
14527 tree arg0 = (call_expr_nargs (t) > 0
14528 ? CALL_EXPR_ARG (t, 0)
14529 : NULL_TREE);
14530 tree arg1 = (call_expr_nargs (t) > 1
14531 ? CALL_EXPR_ARG (t, 1)
14532 : NULL_TREE);
14533 return integer_valued_real_call_p (get_call_combined_fn (t),
14534 arg0, arg1, depth);
14537 default:
14538 return integer_valued_real_invalid_p (t, depth);
14542 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14543 attempt to fold the expression to a constant without modifying TYPE,
14544 OP0 or OP1.
14546 If the expression could be simplified to a constant, then return
14547 the constant. If the expression would not be simplified to a
14548 constant, then return NULL_TREE. */
14550 tree
14551 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14553 tree tem = fold_binary (code, type, op0, op1);
14554 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14557 /* Given the components of a unary expression CODE, TYPE and OP0,
14558 attempt to fold the expression to a constant without modifying
14559 TYPE or OP0.
14561 If the expression could be simplified to a constant, then return
14562 the constant. If the expression would not be simplified to a
14563 constant, then return NULL_TREE. */
14565 tree
14566 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14568 tree tem = fold_unary (code, type, op0);
14569 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14572 /* If EXP represents referencing an element in a constant string
14573 (either via pointer arithmetic or array indexing), return the
14574 tree representing the value accessed, otherwise return NULL. */
14576 tree
14577 fold_read_from_constant_string (tree exp)
14579 if ((TREE_CODE (exp) == INDIRECT_REF
14580 || TREE_CODE (exp) == ARRAY_REF)
14581 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14583 tree exp1 = TREE_OPERAND (exp, 0);
14584 tree index;
14585 tree string;
14586 location_t loc = EXPR_LOCATION (exp);
14588 if (TREE_CODE (exp) == INDIRECT_REF)
14589 string = string_constant (exp1, &index, NULL, NULL);
14590 else
14592 tree low_bound = array_ref_low_bound (exp);
14593 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14595 /* Optimize the special-case of a zero lower bound.
14597 We convert the low_bound to sizetype to avoid some problems
14598 with constant folding. (E.g. suppose the lower bound is 1,
14599 and its mode is QI. Without the conversion,l (ARRAY
14600 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14601 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14602 if (! integer_zerop (low_bound))
14603 index = size_diffop_loc (loc, index,
14604 fold_convert_loc (loc, sizetype, low_bound));
14606 string = exp1;
14609 scalar_int_mode char_mode;
14610 if (string
14611 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14612 && TREE_CODE (string) == STRING_CST
14613 && TREE_CODE (index) == INTEGER_CST
14614 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14615 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14616 &char_mode)
14617 && GET_MODE_SIZE (char_mode) == 1)
14618 return build_int_cst_type (TREE_TYPE (exp),
14619 (TREE_STRING_POINTER (string)
14620 [TREE_INT_CST_LOW (index)]));
14622 return NULL;
14625 /* Folds a read from vector element at IDX of vector ARG. */
14627 tree
14628 fold_read_from_vector (tree arg, poly_uint64 idx)
14630 unsigned HOST_WIDE_INT i;
14631 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14632 && known_ge (idx, 0u)
14633 && idx.is_constant (&i))
14635 if (TREE_CODE (arg) == VECTOR_CST)
14636 return VECTOR_CST_ELT (arg, i);
14637 else if (TREE_CODE (arg) == CONSTRUCTOR)
14639 if (i >= CONSTRUCTOR_NELTS (arg))
14640 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14641 return CONSTRUCTOR_ELT (arg, i)->value;
14644 return NULL_TREE;
14647 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14648 an integer constant, real, or fixed-point constant.
14650 TYPE is the type of the result. */
14652 static tree
14653 fold_negate_const (tree arg0, tree type)
14655 tree t = NULL_TREE;
14657 switch (TREE_CODE (arg0))
14659 case REAL_CST:
14660 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14661 break;
14663 case FIXED_CST:
14665 FIXED_VALUE_TYPE f;
14666 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14667 &(TREE_FIXED_CST (arg0)), NULL,
14668 TYPE_SATURATING (type));
14669 t = build_fixed (type, f);
14670 /* Propagate overflow flags. */
14671 if (overflow_p | TREE_OVERFLOW (arg0))
14672 TREE_OVERFLOW (t) = 1;
14673 break;
14676 default:
14677 if (poly_int_tree_p (arg0))
14679 wi::overflow_type overflow;
14680 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14681 t = force_fit_type (type, res, 1,
14682 (overflow && ! TYPE_UNSIGNED (type))
14683 || TREE_OVERFLOW (arg0));
14684 break;
14687 gcc_unreachable ();
14690 return t;
14693 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14694 an integer constant or real constant.
14696 TYPE is the type of the result. */
14698 tree
14699 fold_abs_const (tree arg0, tree type)
14701 tree t = NULL_TREE;
14703 switch (TREE_CODE (arg0))
14705 case INTEGER_CST:
14707 /* If the value is unsigned or non-negative, then the absolute value
14708 is the same as the ordinary value. */
14709 wide_int val = wi::to_wide (arg0);
14710 wi::overflow_type overflow = wi::OVF_NONE;
14711 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14714 /* If the value is negative, then the absolute value is
14715 its negation. */
14716 else
14717 val = wi::neg (val, &overflow);
14719 /* Force to the destination type, set TREE_OVERFLOW for signed
14720 TYPE only. */
14721 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14723 break;
14725 case REAL_CST:
14726 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14727 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14728 else
14729 t = arg0;
14730 break;
14732 default:
14733 gcc_unreachable ();
14736 return t;
14739 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14740 constant. TYPE is the type of the result. */
14742 static tree
14743 fold_not_const (const_tree arg0, tree type)
14745 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14747 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14750 /* Given CODE, a relational operator, the target type, TYPE and two
14751 constant operands OP0 and OP1, return the result of the
14752 relational operation. If the result is not a compile time
14753 constant, then return NULL_TREE. */
14755 static tree
14756 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14758 int result, invert;
14760 /* From here on, the only cases we handle are when the result is
14761 known to be a constant. */
14763 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14765 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14766 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14768 /* Handle the cases where either operand is a NaN. */
14769 if (real_isnan (c0) || real_isnan (c1))
14771 switch (code)
14773 case EQ_EXPR:
14774 case ORDERED_EXPR:
14775 result = 0;
14776 break;
14778 case NE_EXPR:
14779 case UNORDERED_EXPR:
14780 case UNLT_EXPR:
14781 case UNLE_EXPR:
14782 case UNGT_EXPR:
14783 case UNGE_EXPR:
14784 case UNEQ_EXPR:
14785 result = 1;
14786 break;
14788 case LT_EXPR:
14789 case LE_EXPR:
14790 case GT_EXPR:
14791 case GE_EXPR:
14792 case LTGT_EXPR:
14793 if (flag_trapping_math)
14794 return NULL_TREE;
14795 result = 0;
14796 break;
14798 default:
14799 gcc_unreachable ();
14802 return constant_boolean_node (result, type);
14805 return constant_boolean_node (real_compare (code, c0, c1), type);
14808 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14810 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14811 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14812 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14815 /* Handle equality/inequality of complex constants. */
14816 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14818 tree rcond = fold_relational_const (code, type,
14819 TREE_REALPART (op0),
14820 TREE_REALPART (op1));
14821 tree icond = fold_relational_const (code, type,
14822 TREE_IMAGPART (op0),
14823 TREE_IMAGPART (op1));
14824 if (code == EQ_EXPR)
14825 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14826 else if (code == NE_EXPR)
14827 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14828 else
14829 return NULL_TREE;
14832 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14834 if (!VECTOR_TYPE_P (type))
14836 /* Have vector comparison with scalar boolean result. */
14837 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14838 && known_eq (VECTOR_CST_NELTS (op0),
14839 VECTOR_CST_NELTS (op1)));
14840 unsigned HOST_WIDE_INT nunits;
14841 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14842 return NULL_TREE;
14843 for (unsigned i = 0; i < nunits; i++)
14845 tree elem0 = VECTOR_CST_ELT (op0, i);
14846 tree elem1 = VECTOR_CST_ELT (op1, i);
14847 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14848 if (tmp == NULL_TREE)
14849 return NULL_TREE;
14850 if (integer_zerop (tmp))
14851 return constant_boolean_node (code == NE_EXPR, type);
14853 return constant_boolean_node (code == EQ_EXPR, type);
14855 tree_vector_builder elts;
14856 if (!elts.new_binary_operation (type, op0, op1, false))
14857 return NULL_TREE;
14858 unsigned int count = elts.encoded_nelts ();
14859 for (unsigned i = 0; i < count; i++)
14861 tree elem_type = TREE_TYPE (type);
14862 tree elem0 = VECTOR_CST_ELT (op0, i);
14863 tree elem1 = VECTOR_CST_ELT (op1, i);
14865 tree tem = fold_relational_const (code, elem_type,
14866 elem0, elem1);
14868 if (tem == NULL_TREE)
14869 return NULL_TREE;
14871 elts.quick_push (build_int_cst (elem_type,
14872 integer_zerop (tem) ? 0 : -1));
14875 return elts.build ();
14878 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14880 To compute GT, swap the arguments and do LT.
14881 To compute GE, do LT and invert the result.
14882 To compute LE, swap the arguments, do LT and invert the result.
14883 To compute NE, do EQ and invert the result.
14885 Therefore, the code below must handle only EQ and LT. */
14887 if (code == LE_EXPR || code == GT_EXPR)
14889 std::swap (op0, op1);
14890 code = swap_tree_comparison (code);
14893 /* Note that it is safe to invert for real values here because we
14894 have already handled the one case that it matters. */
14896 invert = 0;
14897 if (code == NE_EXPR || code == GE_EXPR)
14899 invert = 1;
14900 code = invert_tree_comparison (code, false);
14903 /* Compute a result for LT or EQ if args permit;
14904 Otherwise return T. */
14905 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14907 if (code == EQ_EXPR)
14908 result = tree_int_cst_equal (op0, op1);
14909 else
14910 result = tree_int_cst_lt (op0, op1);
14912 else
14913 return NULL_TREE;
14915 if (invert)
14916 result ^= 1;
14917 return constant_boolean_node (result, type);
14920 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14921 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14922 itself. */
14924 tree
14925 fold_build_cleanup_point_expr (tree type, tree expr)
14927 /* If the expression does not have side effects then we don't have to wrap
14928 it with a cleanup point expression. */
14929 if (!TREE_SIDE_EFFECTS (expr))
14930 return expr;
14932 /* If the expression is a return, check to see if the expression inside the
14933 return has no side effects or the right hand side of the modify expression
14934 inside the return. If either don't have side effects set we don't need to
14935 wrap the expression in a cleanup point expression. Note we don't check the
14936 left hand side of the modify because it should always be a return decl. */
14937 if (TREE_CODE (expr) == RETURN_EXPR)
14939 tree op = TREE_OPERAND (expr, 0);
14940 if (!op || !TREE_SIDE_EFFECTS (op))
14941 return expr;
14942 op = TREE_OPERAND (op, 1);
14943 if (!TREE_SIDE_EFFECTS (op))
14944 return expr;
14947 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14950 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14951 of an indirection through OP0, or NULL_TREE if no simplification is
14952 possible. */
14954 tree
14955 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14957 tree sub = op0;
14958 tree subtype;
14959 poly_uint64 const_op01;
14961 STRIP_NOPS (sub);
14962 subtype = TREE_TYPE (sub);
14963 if (!POINTER_TYPE_P (subtype)
14964 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14965 return NULL_TREE;
14967 if (TREE_CODE (sub) == ADDR_EXPR)
14969 tree op = TREE_OPERAND (sub, 0);
14970 tree optype = TREE_TYPE (op);
14972 /* *&CONST_DECL -> to the value of the const decl. */
14973 if (TREE_CODE (op) == CONST_DECL)
14974 return DECL_INITIAL (op);
14975 /* *&p => p; make sure to handle *&"str"[cst] here. */
14976 if (type == optype)
14978 tree fop = fold_read_from_constant_string (op);
14979 if (fop)
14980 return fop;
14981 else
14982 return op;
14984 /* *(foo *)&fooarray => fooarray[0] */
14985 else if (TREE_CODE (optype) == ARRAY_TYPE
14986 && type == TREE_TYPE (optype)
14987 && (!in_gimple_form
14988 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14990 tree type_domain = TYPE_DOMAIN (optype);
14991 tree min_val = size_zero_node;
14992 if (type_domain && TYPE_MIN_VALUE (type_domain))
14993 min_val = TYPE_MIN_VALUE (type_domain);
14994 if (in_gimple_form
14995 && TREE_CODE (min_val) != INTEGER_CST)
14996 return NULL_TREE;
14997 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14998 NULL_TREE, NULL_TREE);
15000 /* *(foo *)&complexfoo => __real__ complexfoo */
15001 else if (TREE_CODE (optype) == COMPLEX_TYPE
15002 && type == TREE_TYPE (optype))
15003 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15004 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15005 else if (VECTOR_TYPE_P (optype)
15006 && type == TREE_TYPE (optype))
15008 tree part_width = TYPE_SIZE (type);
15009 tree index = bitsize_int (0);
15010 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15011 index);
15015 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15016 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15018 tree op00 = TREE_OPERAND (sub, 0);
15019 tree op01 = TREE_OPERAND (sub, 1);
15021 STRIP_NOPS (op00);
15022 if (TREE_CODE (op00) == ADDR_EXPR)
15024 tree op00type;
15025 op00 = TREE_OPERAND (op00, 0);
15026 op00type = TREE_TYPE (op00);
15028 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15029 if (VECTOR_TYPE_P (op00type)
15030 && type == TREE_TYPE (op00type)
15031 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15032 but we want to treat offsets with MSB set as negative.
15033 For the code below negative offsets are invalid and
15034 TYPE_SIZE of the element is something unsigned, so
15035 check whether op01 fits into poly_int64, which implies
15036 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15037 then just use poly_uint64 because we want to treat the
15038 value as unsigned. */
15039 && tree_fits_poly_int64_p (op01))
15041 tree part_width = TYPE_SIZE (type);
15042 poly_uint64 max_offset
15043 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15044 * TYPE_VECTOR_SUBPARTS (op00type));
15045 if (known_lt (const_op01, max_offset))
15047 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15048 return fold_build3_loc (loc,
15049 BIT_FIELD_REF, type, op00,
15050 part_width, index);
15053 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15054 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15055 && type == TREE_TYPE (op00type))
15057 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15058 const_op01))
15059 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15061 /* ((foo *)&fooarray)[1] => fooarray[1] */
15062 else if (TREE_CODE (op00type) == ARRAY_TYPE
15063 && type == TREE_TYPE (op00type))
15065 tree type_domain = TYPE_DOMAIN (op00type);
15066 tree min_val = size_zero_node;
15067 if (type_domain && TYPE_MIN_VALUE (type_domain))
15068 min_val = TYPE_MIN_VALUE (type_domain);
15069 poly_uint64 type_size, index;
15070 if (poly_int_tree_p (min_val)
15071 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15072 && multiple_p (const_op01, type_size, &index))
15074 poly_offset_int off = index + wi::to_poly_offset (min_val);
15075 op01 = wide_int_to_tree (sizetype, off);
15076 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15077 NULL_TREE, NULL_TREE);
15083 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15084 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15085 && type == TREE_TYPE (TREE_TYPE (subtype))
15086 && (!in_gimple_form
15087 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15089 tree type_domain;
15090 tree min_val = size_zero_node;
15091 sub = build_fold_indirect_ref_loc (loc, sub);
15092 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15093 if (type_domain && TYPE_MIN_VALUE (type_domain))
15094 min_val = TYPE_MIN_VALUE (type_domain);
15095 if (in_gimple_form
15096 && TREE_CODE (min_val) != INTEGER_CST)
15097 return NULL_TREE;
15098 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15099 NULL_TREE);
15102 return NULL_TREE;
15105 /* Builds an expression for an indirection through T, simplifying some
15106 cases. */
15108 tree
15109 build_fold_indirect_ref_loc (location_t loc, tree t)
15111 tree type = TREE_TYPE (TREE_TYPE (t));
15112 tree sub = fold_indirect_ref_1 (loc, type, t);
15114 if (sub)
15115 return sub;
15117 return build1_loc (loc, INDIRECT_REF, type, t);
15120 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15122 tree
15123 fold_indirect_ref_loc (location_t loc, tree t)
15125 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15127 if (sub)
15128 return sub;
15129 else
15130 return t;
15133 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15134 whose result is ignored. The type of the returned tree need not be
15135 the same as the original expression. */
15137 tree
15138 fold_ignored_result (tree t)
15140 if (!TREE_SIDE_EFFECTS (t))
15141 return integer_zero_node;
15143 for (;;)
15144 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15146 case tcc_unary:
15147 t = TREE_OPERAND (t, 0);
15148 break;
15150 case tcc_binary:
15151 case tcc_comparison:
15152 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15153 t = TREE_OPERAND (t, 0);
15154 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15155 t = TREE_OPERAND (t, 1);
15156 else
15157 return t;
15158 break;
15160 case tcc_expression:
15161 switch (TREE_CODE (t))
15163 case COMPOUND_EXPR:
15164 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15165 return t;
15166 t = TREE_OPERAND (t, 0);
15167 break;
15169 case COND_EXPR:
15170 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15171 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15172 return t;
15173 t = TREE_OPERAND (t, 0);
15174 break;
15176 default:
15177 return t;
15179 break;
15181 default:
15182 return t;
15186 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15188 tree
15189 round_up_loc (location_t loc, tree value, unsigned int divisor)
15191 tree div = NULL_TREE;
15193 if (divisor == 1)
15194 return value;
15196 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15197 have to do anything. Only do this when we are not given a const,
15198 because in that case, this check is more expensive than just
15199 doing it. */
15200 if (TREE_CODE (value) != INTEGER_CST)
15202 div = build_int_cst (TREE_TYPE (value), divisor);
15204 if (multiple_of_p (TREE_TYPE (value), value, div))
15205 return value;
15208 /* If divisor is a power of two, simplify this to bit manipulation. */
15209 if (pow2_or_zerop (divisor))
15211 if (TREE_CODE (value) == INTEGER_CST)
15213 wide_int val = wi::to_wide (value);
15214 bool overflow_p;
15216 if ((val & (divisor - 1)) == 0)
15217 return value;
15219 overflow_p = TREE_OVERFLOW (value);
15220 val += divisor - 1;
15221 val &= (int) -divisor;
15222 if (val == 0)
15223 overflow_p = true;
15225 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15227 else
15229 tree t;
15231 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15232 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15233 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15234 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15237 else
15239 if (!div)
15240 div = build_int_cst (TREE_TYPE (value), divisor);
15241 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15242 value = size_binop_loc (loc, MULT_EXPR, value, div);
15245 return value;
15248 /* Likewise, but round down. */
15250 tree
15251 round_down_loc (location_t loc, tree value, int divisor)
15253 tree div = NULL_TREE;
15255 gcc_assert (divisor > 0);
15256 if (divisor == 1)
15257 return value;
15259 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15260 have to do anything. Only do this when we are not given a const,
15261 because in that case, this check is more expensive than just
15262 doing it. */
15263 if (TREE_CODE (value) != INTEGER_CST)
15265 div = build_int_cst (TREE_TYPE (value), divisor);
15267 if (multiple_of_p (TREE_TYPE (value), value, div))
15268 return value;
15271 /* If divisor is a power of two, simplify this to bit manipulation. */
15272 if (pow2_or_zerop (divisor))
15274 tree t;
15276 t = build_int_cst (TREE_TYPE (value), -divisor);
15277 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15279 else
15281 if (!div)
15282 div = build_int_cst (TREE_TYPE (value), divisor);
15283 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15284 value = size_binop_loc (loc, MULT_EXPR, value, div);
15287 return value;
15290 /* Returns the pointer to the base of the object addressed by EXP and
15291 extracts the information about the offset of the access, storing it
15292 to PBITPOS and POFFSET. */
15294 static tree
15295 split_address_to_core_and_offset (tree exp,
15296 poly_int64_pod *pbitpos, tree *poffset)
15298 tree core;
15299 machine_mode mode;
15300 int unsignedp, reversep, volatilep;
15301 poly_int64 bitsize;
15302 location_t loc = EXPR_LOCATION (exp);
15304 if (TREE_CODE (exp) == ADDR_EXPR)
15306 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15307 poffset, &mode, &unsignedp, &reversep,
15308 &volatilep);
15309 core = build_fold_addr_expr_loc (loc, core);
15311 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15313 core = TREE_OPERAND (exp, 0);
15314 STRIP_NOPS (core);
15315 *pbitpos = 0;
15316 *poffset = TREE_OPERAND (exp, 1);
15317 if (poly_int_tree_p (*poffset))
15319 poly_offset_int tem
15320 = wi::sext (wi::to_poly_offset (*poffset),
15321 TYPE_PRECISION (TREE_TYPE (*poffset)));
15322 tem <<= LOG2_BITS_PER_UNIT;
15323 if (tem.to_shwi (pbitpos))
15324 *poffset = NULL_TREE;
15327 else
15329 core = exp;
15330 *pbitpos = 0;
15331 *poffset = NULL_TREE;
15334 return core;
15337 /* Returns true if addresses of E1 and E2 differ by a constant, false
15338 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15340 bool
15341 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15343 tree core1, core2;
15344 poly_int64 bitpos1, bitpos2;
15345 tree toffset1, toffset2, tdiff, type;
15347 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15348 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15350 poly_int64 bytepos1, bytepos2;
15351 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15352 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15353 || !operand_equal_p (core1, core2, 0))
15354 return false;
15356 if (toffset1 && toffset2)
15358 type = TREE_TYPE (toffset1);
15359 if (type != TREE_TYPE (toffset2))
15360 toffset2 = fold_convert (type, toffset2);
15362 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15363 if (!cst_and_fits_in_hwi (tdiff))
15364 return false;
15366 *diff = int_cst_value (tdiff);
15368 else if (toffset1 || toffset2)
15370 /* If only one of the offsets is non-constant, the difference cannot
15371 be a constant. */
15372 return false;
15374 else
15375 *diff = 0;
15377 *diff += bytepos1 - bytepos2;
15378 return true;
15381 /* Return OFF converted to a pointer offset type suitable as offset for
15382 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15383 tree
15384 convert_to_ptrofftype_loc (location_t loc, tree off)
15386 return fold_convert_loc (loc, sizetype, off);
15389 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15390 tree
15391 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15393 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15394 ptr, convert_to_ptrofftype_loc (loc, off));
15397 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15398 tree
15399 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15401 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15402 ptr, size_int (off));
15405 /* Return a pointer P to a NUL-terminated string representing the sequence
15406 of constant characters referred to by SRC (or a subsequence of such
15407 characters within it if SRC is a reference to a string plus some
15408 constant offset). If STRLEN is non-null, store the number of bytes
15409 in the string constant including the terminating NUL char. *STRLEN is
15410 typically strlen(P) + 1 in the absence of embedded NUL characters. */
15412 const char *
15413 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
15415 tree offset_node;
15416 tree mem_size;
15418 if (strlen)
15419 *strlen = 0;
15421 src = string_constant (src, &offset_node, &mem_size, NULL);
15422 if (src == 0)
15423 return NULL;
15425 unsigned HOST_WIDE_INT offset = 0;
15426 if (offset_node != NULL_TREE)
15428 if (!tree_fits_uhwi_p (offset_node))
15429 return NULL;
15430 else
15431 offset = tree_to_uhwi (offset_node);
15434 if (!tree_fits_uhwi_p (mem_size))
15435 return NULL;
15437 /* STRING_LENGTH is the size of the string literal, including any
15438 embedded NULs. STRING_SIZE is the size of the array the string
15439 literal is stored in. */
15440 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
15441 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
15443 /* Ideally this would turn into a gcc_checking_assert over time. */
15444 if (string_length > string_size)
15445 string_length = string_size;
15447 const char *string = TREE_STRING_POINTER (src);
15449 /* Ideally this would turn into a gcc_checking_assert over time. */
15450 if (string_length > string_size)
15451 string_length = string_size;
15453 if (string_length == 0
15454 || offset >= string_size)
15455 return NULL;
15457 if (strlen)
15459 /* Compute and store the length of the substring at OFFSET.
15460 All offsets past the initial length refer to null strings. */
15461 if (offset < string_length)
15462 *strlen = string_length - offset;
15463 else
15464 *strlen = 1;
15466 else
15468 tree eltype = TREE_TYPE (TREE_TYPE (src));
15469 /* Support only properly NUL-terminated single byte strings. */
15470 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15471 return NULL;
15472 if (string[string_length - 1] != '\0')
15473 return NULL;
15476 return offset < string_length ? string + offset : "";
15479 /* Given a tree T, compute which bits in T may be nonzero. */
15481 wide_int
15482 tree_nonzero_bits (const_tree t)
15484 switch (TREE_CODE (t))
15486 case INTEGER_CST:
15487 return wi::to_wide (t);
15488 case SSA_NAME:
15489 return get_nonzero_bits (t);
15490 case NON_LVALUE_EXPR:
15491 case SAVE_EXPR:
15492 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15493 case BIT_AND_EXPR:
15494 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15495 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15496 case BIT_IOR_EXPR:
15497 case BIT_XOR_EXPR:
15498 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15499 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15500 case COND_EXPR:
15501 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15502 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15503 CASE_CONVERT:
15504 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15505 TYPE_PRECISION (TREE_TYPE (t)),
15506 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15507 case PLUS_EXPR:
15508 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15510 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15511 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15512 if (wi::bit_and (nzbits1, nzbits2) == 0)
15513 return wi::bit_or (nzbits1, nzbits2);
15515 break;
15516 case LSHIFT_EXPR:
15517 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15519 tree type = TREE_TYPE (t);
15520 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15521 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15522 TYPE_PRECISION (type));
15523 return wi::neg_p (arg1)
15524 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15525 : wi::lshift (nzbits, arg1);
15527 break;
15528 case RSHIFT_EXPR:
15529 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15531 tree type = TREE_TYPE (t);
15532 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15533 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15534 TYPE_PRECISION (type));
15535 return wi::neg_p (arg1)
15536 ? wi::lshift (nzbits, -arg1)
15537 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15539 break;
15540 default:
15541 break;
15544 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15547 #if CHECKING_P
15549 namespace selftest {
15551 /* Helper functions for writing tests of folding trees. */
15553 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15555 static void
15556 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15557 tree constant)
15559 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15562 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15563 wrapping WRAPPED_EXPR. */
15565 static void
15566 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15567 tree wrapped_expr)
15569 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15570 ASSERT_NE (wrapped_expr, result);
15571 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15572 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15575 /* Verify that various arithmetic binary operations are folded
15576 correctly. */
15578 static void
15579 test_arithmetic_folding ()
15581 tree type = integer_type_node;
15582 tree x = create_tmp_var_raw (type, "x");
15583 tree zero = build_zero_cst (type);
15584 tree one = build_int_cst (type, 1);
15586 /* Addition. */
15587 /* 1 <-- (0 + 1) */
15588 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15589 one);
15590 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15591 one);
15593 /* (nonlvalue)x <-- (x + 0) */
15594 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15597 /* Subtraction. */
15598 /* 0 <-- (x - x) */
15599 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15600 zero);
15601 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15604 /* Multiplication. */
15605 /* 0 <-- (x * 0) */
15606 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15607 zero);
15609 /* (nonlvalue)x <-- (x * 1) */
15610 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15614 /* Verify that various binary operations on vectors are folded
15615 correctly. */
15617 static void
15618 test_vector_folding ()
15620 tree inner_type = integer_type_node;
15621 tree type = build_vector_type (inner_type, 4);
15622 tree zero = build_zero_cst (type);
15623 tree one = build_one_cst (type);
15624 tree index = build_index_vector (type, 0, 1);
15626 /* Verify equality tests that return a scalar boolean result. */
15627 tree res_type = boolean_type_node;
15628 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15629 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15630 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15631 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15632 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15633 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15634 index, one)));
15635 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15636 index, index)));
15637 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15638 index, index)));
15641 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15643 static void
15644 test_vec_duplicate_folding ()
15646 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15647 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15648 /* This will be 1 if VEC_MODE isn't a vector mode. */
15649 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15651 tree type = build_vector_type (ssizetype, nunits);
15652 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15653 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15654 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15657 /* Run all of the selftests within this file. */
15659 void
15660 fold_const_c_tests ()
15662 test_arithmetic_folding ();
15663 test_vector_folding ();
15664 test_vec_duplicate_folding ();
15667 } // namespace selftest
15669 #endif /* CHECKING_P */