fix __builtin___clear_cache overrider fallout
[official-gcc.git] / gcc / fold-const.c
blobe77d74e40b06a68076a53dc2b9840ac0c640d69d
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
146 static location_t
147 expr_location_or (tree t, location_t loc)
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
168 return x;
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 widest_int quo;
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 return NULL_TREE;
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
196 static int fold_deferring_overflow_warnings;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
213 void
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 const char *warnmsg;
232 location_t locus;
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
248 if (!issue || warnmsg == NULL)
249 return;
251 if (gimple_no_warning_p (stmt))
252 return;
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
259 if (!issue_strict_overflow_warning (code))
260 return;
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL, 0);
278 /* Whether we are deferring overflow warnings. */
280 bool
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings > 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 if (fold_deferring_overflow_warnings > 0)
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
308 bool
309 negate_mathfn_p (combined_fn fn)
311 switch (fn)
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
346 default:
347 break;
349 return false;
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
355 bool
356 may_negate_without_overflow_p (const_tree t)
358 tree type;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
366 return !wi::only_sign_bit_p (wi::to_wide (t));
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
372 static bool
373 negate_expr_p (tree t)
375 tree type;
377 if (t == 0)
378 return false;
380 type = TREE_TYPE (t);
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
395 case FIXED_CST:
396 return true;
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
465 /* Fall through. */
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
501 break;
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
517 break;
519 default:
520 break;
522 return false;
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
533 tree type = TREE_TYPE (t);
534 tree tem;
536 switch (TREE_CODE (t))
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case COMPLEX_CST:
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
568 break;
570 case VECTOR_CST:
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
583 return elts.build ();
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
624 break;
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
638 /* Fall through. */
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
652 break;
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
687 break;
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
694 tree fndecl, arg;
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
700 break;
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
717 break;
719 default:
720 break;
723 return NULL_TREE;
726 /* A wrapper for fold_negate_expr_1. */
728 static tree
729 fold_negate_expr (location_t loc, tree t)
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
743 static tree
744 negate_expr (tree t)
746 tree type, tem;
747 location_t loc;
749 if (t == NULL_TREE)
750 return NULL_TREE;
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
780 If IN is itself a literal or constant, return it as appropriate.
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
861 else
862 var = in;
864 if (negate_p)
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
887 return var;
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 if (t1 == 0)
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
903 else if (t2 == 0)
904 return t1;
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
913 if (code == PLUS_EXPR)
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
928 else if (code == MINUS_EXPR)
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
953 switch (code)
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
961 default:
962 break;
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
995 case LSHIFT_EXPR:
996 if (wi::neg_p (arg2))
997 return false;
998 res = wi::lshift (arg1, arg2);
999 break;
1001 case RSHIFT_EXPR:
1002 if (wi::neg_p (arg2))
1003 return false;
1004 /* It's unclear from the C standard whether shifts can overflow.
1005 The following code ignores overflow; perhaps a C standard
1006 interpretation ruling is needed. */
1007 res = wi::rshift (arg1, arg2, sign);
1008 break;
1010 case RROTATE_EXPR:
1011 case LROTATE_EXPR:
1012 if (wi::neg_p (arg2))
1014 tmp = -arg2;
1015 if (code == RROTATE_EXPR)
1016 code = LROTATE_EXPR;
1017 else
1018 code = RROTATE_EXPR;
1020 else
1021 tmp = arg2;
1023 if (code == RROTATE_EXPR)
1024 res = wi::rrotate (arg1, tmp);
1025 else
1026 res = wi::lrotate (arg1, tmp);
1027 break;
1029 case PLUS_EXPR:
1030 res = wi::add (arg1, arg2, sign, overflow);
1031 break;
1033 case MINUS_EXPR:
1034 res = wi::sub (arg1, arg2, sign, overflow);
1035 break;
1037 case MULT_EXPR:
1038 res = wi::mul (arg1, arg2, sign, overflow);
1039 break;
1041 case MULT_HIGHPART_EXPR:
1042 res = wi::mul_high (arg1, arg2, sign);
1043 break;
1045 case TRUNC_DIV_EXPR:
1046 case EXACT_DIV_EXPR:
1047 if (arg2 == 0)
1048 return false;
1049 res = wi::div_trunc (arg1, arg2, sign, overflow);
1050 break;
1052 case FLOOR_DIV_EXPR:
1053 if (arg2 == 0)
1054 return false;
1055 res = wi::div_floor (arg1, arg2, sign, overflow);
1056 break;
1058 case CEIL_DIV_EXPR:
1059 if (arg2 == 0)
1060 return false;
1061 res = wi::div_ceil (arg1, arg2, sign, overflow);
1062 break;
1064 case ROUND_DIV_EXPR:
1065 if (arg2 == 0)
1066 return false;
1067 res = wi::div_round (arg1, arg2, sign, overflow);
1068 break;
1070 case TRUNC_MOD_EXPR:
1071 if (arg2 == 0)
1072 return false;
1073 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1074 break;
1076 case FLOOR_MOD_EXPR:
1077 if (arg2 == 0)
1078 return false;
1079 res = wi::mod_floor (arg1, arg2, sign, overflow);
1080 break;
1082 case CEIL_MOD_EXPR:
1083 if (arg2 == 0)
1084 return false;
1085 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1086 break;
1088 case ROUND_MOD_EXPR:
1089 if (arg2 == 0)
1090 return false;
1091 res = wi::mod_round (arg1, arg2, sign, overflow);
1092 break;
1094 case MIN_EXPR:
1095 res = wi::min (arg1, arg2, sign);
1096 break;
1098 case MAX_EXPR:
1099 res = wi::max (arg1, arg2, sign);
1100 break;
1102 default:
1103 return false;
1105 return true;
1108 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1109 produce a new constant in RES. Return FALSE if we don't know how
1110 to evaluate CODE at compile-time. */
1112 static bool
1113 poly_int_binop (poly_wide_int &res, enum tree_code code,
1114 const_tree arg1, const_tree arg2,
1115 signop sign, wi::overflow_type *overflow)
1117 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1118 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1119 switch (code)
1121 case PLUS_EXPR:
1122 res = wi::add (wi::to_poly_wide (arg1),
1123 wi::to_poly_wide (arg2), sign, overflow);
1124 break;
1126 case MINUS_EXPR:
1127 res = wi::sub (wi::to_poly_wide (arg1),
1128 wi::to_poly_wide (arg2), sign, overflow);
1129 break;
1131 case MULT_EXPR:
1132 if (TREE_CODE (arg2) == INTEGER_CST)
1133 res = wi::mul (wi::to_poly_wide (arg1),
1134 wi::to_wide (arg2), sign, overflow);
1135 else if (TREE_CODE (arg1) == INTEGER_CST)
1136 res = wi::mul (wi::to_poly_wide (arg2),
1137 wi::to_wide (arg1), sign, overflow);
1138 else
1139 return NULL_TREE;
1140 break;
1142 case LSHIFT_EXPR:
1143 if (TREE_CODE (arg2) == INTEGER_CST)
1144 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1145 else
1146 return false;
1147 break;
1149 case BIT_IOR_EXPR:
1150 if (TREE_CODE (arg2) != INTEGER_CST
1151 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1152 &res))
1153 return false;
1154 break;
1156 default:
1157 return false;
1159 return true;
1162 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1163 produce a new constant. Return NULL_TREE if we don't know how to
1164 evaluate CODE at compile-time. */
1166 tree
1167 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1168 int overflowable)
1170 poly_wide_int poly_res;
1171 tree type = TREE_TYPE (arg1);
1172 signop sign = TYPE_SIGN (type);
1173 wi::overflow_type overflow = wi::OVF_NONE;
1175 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1177 wide_int warg1 = wi::to_wide (arg1), res;
1178 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1179 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1180 return NULL_TREE;
1181 poly_res = res;
1183 else if (!poly_int_tree_p (arg1)
1184 || !poly_int_tree_p (arg2)
1185 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1186 return NULL_TREE;
1187 return force_fit_type (type, poly_res, overflowable,
1188 (((sign == SIGNED || overflowable == -1)
1189 && overflow)
1190 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1193 /* Return true if binary operation OP distributes over addition in operand
1194 OPNO, with the other operand being held constant. OPNO counts from 1. */
1196 static bool
1197 distributes_over_addition_p (tree_code op, int opno)
1199 switch (op)
1201 case PLUS_EXPR:
1202 case MINUS_EXPR:
1203 case MULT_EXPR:
1204 return true;
1206 case LSHIFT_EXPR:
1207 return opno == 1;
1209 default:
1210 return false;
1214 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1215 constant. We assume ARG1 and ARG2 have the same data type, or at least
1216 are the same kind of constant and the same machine mode. Return zero if
1217 combining the constants is not allowed in the current operating mode. */
1219 static tree
1220 const_binop (enum tree_code code, tree arg1, tree arg2)
1222 /* Sanity check for the recursive cases. */
1223 if (!arg1 || !arg2)
1224 return NULL_TREE;
1226 STRIP_NOPS (arg1);
1227 STRIP_NOPS (arg2);
1229 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1231 if (code == POINTER_PLUS_EXPR)
1232 return int_const_binop (PLUS_EXPR,
1233 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1235 return int_const_binop (code, arg1, arg2);
1238 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1240 machine_mode mode;
1241 REAL_VALUE_TYPE d1;
1242 REAL_VALUE_TYPE d2;
1243 REAL_VALUE_TYPE value;
1244 REAL_VALUE_TYPE result;
1245 bool inexact;
1246 tree t, type;
1248 /* The following codes are handled by real_arithmetic. */
1249 switch (code)
1251 case PLUS_EXPR:
1252 case MINUS_EXPR:
1253 case MULT_EXPR:
1254 case RDIV_EXPR:
1255 case MIN_EXPR:
1256 case MAX_EXPR:
1257 break;
1259 default:
1260 return NULL_TREE;
1263 d1 = TREE_REAL_CST (arg1);
1264 d2 = TREE_REAL_CST (arg2);
1266 type = TREE_TYPE (arg1);
1267 mode = TYPE_MODE (type);
1269 /* Don't perform operation if we honor signaling NaNs and
1270 either operand is a signaling NaN. */
1271 if (HONOR_SNANS (mode)
1272 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1273 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1274 return NULL_TREE;
1276 /* Don't perform operation if it would raise a division
1277 by zero exception. */
1278 if (code == RDIV_EXPR
1279 && real_equal (&d2, &dconst0)
1280 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1281 return NULL_TREE;
1283 /* If either operand is a NaN, just return it. Otherwise, set up
1284 for floating-point trap; we return an overflow. */
1285 if (REAL_VALUE_ISNAN (d1))
1287 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1288 is off. */
1289 d1.signalling = 0;
1290 t = build_real (type, d1);
1291 return t;
1293 else if (REAL_VALUE_ISNAN (d2))
1295 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1296 is off. */
1297 d2.signalling = 0;
1298 t = build_real (type, d2);
1299 return t;
1302 inexact = real_arithmetic (&value, code, &d1, &d2);
1303 real_convert (&result, mode, &value);
1305 /* Don't constant fold this floating point operation if
1306 the result has overflowed and flag_trapping_math. */
1307 if (flag_trapping_math
1308 && MODE_HAS_INFINITIES (mode)
1309 && REAL_VALUE_ISINF (result)
1310 && !REAL_VALUE_ISINF (d1)
1311 && !REAL_VALUE_ISINF (d2))
1312 return NULL_TREE;
1314 /* Don't constant fold this floating point operation if the
1315 result may dependent upon the run-time rounding mode and
1316 flag_rounding_math is set, or if GCC's software emulation
1317 is unable to accurately represent the result. */
1318 if ((flag_rounding_math
1319 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1320 && (inexact || !real_identical (&result, &value)))
1321 return NULL_TREE;
1323 t = build_real (type, result);
1325 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1326 return t;
1329 if (TREE_CODE (arg1) == FIXED_CST)
1331 FIXED_VALUE_TYPE f1;
1332 FIXED_VALUE_TYPE f2;
1333 FIXED_VALUE_TYPE result;
1334 tree t, type;
1335 int sat_p;
1336 bool overflow_p;
1338 /* The following codes are handled by fixed_arithmetic. */
1339 switch (code)
1341 case PLUS_EXPR:
1342 case MINUS_EXPR:
1343 case MULT_EXPR:
1344 case TRUNC_DIV_EXPR:
1345 if (TREE_CODE (arg2) != FIXED_CST)
1346 return NULL_TREE;
1347 f2 = TREE_FIXED_CST (arg2);
1348 break;
1350 case LSHIFT_EXPR:
1351 case RSHIFT_EXPR:
1353 if (TREE_CODE (arg2) != INTEGER_CST)
1354 return NULL_TREE;
1355 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1356 f2.data.high = w2.elt (1);
1357 f2.data.low = w2.ulow ();
1358 f2.mode = SImode;
1360 break;
1362 default:
1363 return NULL_TREE;
1366 f1 = TREE_FIXED_CST (arg1);
1367 type = TREE_TYPE (arg1);
1368 sat_p = TYPE_SATURATING (type);
1369 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1370 t = build_fixed (type, result);
1371 /* Propagate overflow flags. */
1372 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1373 TREE_OVERFLOW (t) = 1;
1374 return t;
1377 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1379 tree type = TREE_TYPE (arg1);
1380 tree r1 = TREE_REALPART (arg1);
1381 tree i1 = TREE_IMAGPART (arg1);
1382 tree r2 = TREE_REALPART (arg2);
1383 tree i2 = TREE_IMAGPART (arg2);
1384 tree real, imag;
1386 switch (code)
1388 case PLUS_EXPR:
1389 case MINUS_EXPR:
1390 real = const_binop (code, r1, r2);
1391 imag = const_binop (code, i1, i2);
1392 break;
1394 case MULT_EXPR:
1395 if (COMPLEX_FLOAT_TYPE_P (type))
1396 return do_mpc_arg2 (arg1, arg2, type,
1397 /* do_nonfinite= */ folding_initializer,
1398 mpc_mul);
1400 real = const_binop (MINUS_EXPR,
1401 const_binop (MULT_EXPR, r1, r2),
1402 const_binop (MULT_EXPR, i1, i2));
1403 imag = const_binop (PLUS_EXPR,
1404 const_binop (MULT_EXPR, r1, i2),
1405 const_binop (MULT_EXPR, i1, r2));
1406 break;
1408 case RDIV_EXPR:
1409 if (COMPLEX_FLOAT_TYPE_P (type))
1410 return do_mpc_arg2 (arg1, arg2, type,
1411 /* do_nonfinite= */ folding_initializer,
1412 mpc_div);
1413 /* Fallthru. */
1414 case TRUNC_DIV_EXPR:
1415 case CEIL_DIV_EXPR:
1416 case FLOOR_DIV_EXPR:
1417 case ROUND_DIV_EXPR:
1418 if (flag_complex_method == 0)
1420 /* Keep this algorithm in sync with
1421 tree-complex.c:expand_complex_div_straight().
1423 Expand complex division to scalars, straightforward algorithm.
1424 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1425 t = br*br + bi*bi
1427 tree magsquared
1428 = const_binop (PLUS_EXPR,
1429 const_binop (MULT_EXPR, r2, r2),
1430 const_binop (MULT_EXPR, i2, i2));
1431 tree t1
1432 = const_binop (PLUS_EXPR,
1433 const_binop (MULT_EXPR, r1, r2),
1434 const_binop (MULT_EXPR, i1, i2));
1435 tree t2
1436 = const_binop (MINUS_EXPR,
1437 const_binop (MULT_EXPR, i1, r2),
1438 const_binop (MULT_EXPR, r1, i2));
1440 real = const_binop (code, t1, magsquared);
1441 imag = const_binop (code, t2, magsquared);
1443 else
1445 /* Keep this algorithm in sync with
1446 tree-complex.c:expand_complex_div_wide().
1448 Expand complex division to scalars, modified algorithm to minimize
1449 overflow with wide input ranges. */
1450 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1451 fold_abs_const (r2, TREE_TYPE (type)),
1452 fold_abs_const (i2, TREE_TYPE (type)));
1454 if (integer_nonzerop (compare))
1456 /* In the TRUE branch, we compute
1457 ratio = br/bi;
1458 div = (br * ratio) + bi;
1459 tr = (ar * ratio) + ai;
1460 ti = (ai * ratio) - ar;
1461 tr = tr / div;
1462 ti = ti / div; */
1463 tree ratio = const_binop (code, r2, i2);
1464 tree div = const_binop (PLUS_EXPR, i2,
1465 const_binop (MULT_EXPR, r2, ratio));
1466 real = const_binop (MULT_EXPR, r1, ratio);
1467 real = const_binop (PLUS_EXPR, real, i1);
1468 real = const_binop (code, real, div);
1470 imag = const_binop (MULT_EXPR, i1, ratio);
1471 imag = const_binop (MINUS_EXPR, imag, r1);
1472 imag = const_binop (code, imag, div);
1474 else
1476 /* In the FALSE branch, we compute
1477 ratio = d/c;
1478 divisor = (d * ratio) + c;
1479 tr = (b * ratio) + a;
1480 ti = b - (a * ratio);
1481 tr = tr / div;
1482 ti = ti / div; */
1483 tree ratio = const_binop (code, i2, r2);
1484 tree div = const_binop (PLUS_EXPR, r2,
1485 const_binop (MULT_EXPR, i2, ratio));
1487 real = const_binop (MULT_EXPR, i1, ratio);
1488 real = const_binop (PLUS_EXPR, real, r1);
1489 real = const_binop (code, real, div);
1491 imag = const_binop (MULT_EXPR, r1, ratio);
1492 imag = const_binop (MINUS_EXPR, i1, imag);
1493 imag = const_binop (code, imag, div);
1496 break;
1498 default:
1499 return NULL_TREE;
1502 if (real && imag)
1503 return build_complex (type, real, imag);
1506 if (TREE_CODE (arg1) == VECTOR_CST
1507 && TREE_CODE (arg2) == VECTOR_CST
1508 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1509 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1511 tree type = TREE_TYPE (arg1);
1512 bool step_ok_p;
1513 if (VECTOR_CST_STEPPED_P (arg1)
1514 && VECTOR_CST_STEPPED_P (arg2))
1515 /* We can operate directly on the encoding if:
1517 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1518 implies
1519 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1521 Addition and subtraction are the supported operators
1522 for which this is true. */
1523 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1524 else if (VECTOR_CST_STEPPED_P (arg1))
1525 /* We can operate directly on stepped encodings if:
1527 a3 - a2 == a2 - a1
1528 implies:
1529 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1531 which is true if (x -> x op c) distributes over addition. */
1532 step_ok_p = distributes_over_addition_p (code, 1);
1533 else
1534 /* Similarly in reverse. */
1535 step_ok_p = distributes_over_addition_p (code, 2);
1536 tree_vector_builder elts;
1537 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1538 return NULL_TREE;
1539 unsigned int count = elts.encoded_nelts ();
1540 for (unsigned int i = 0; i < count; ++i)
1542 tree elem1 = VECTOR_CST_ELT (arg1, i);
1543 tree elem2 = VECTOR_CST_ELT (arg2, i);
1545 tree elt = const_binop (code, elem1, elem2);
1547 /* It is possible that const_binop cannot handle the given
1548 code and return NULL_TREE */
1549 if (elt == NULL_TREE)
1550 return NULL_TREE;
1551 elts.quick_push (elt);
1554 return elts.build ();
1557 /* Shifts allow a scalar offset for a vector. */
1558 if (TREE_CODE (arg1) == VECTOR_CST
1559 && TREE_CODE (arg2) == INTEGER_CST)
1561 tree type = TREE_TYPE (arg1);
1562 bool step_ok_p = distributes_over_addition_p (code, 1);
1563 tree_vector_builder elts;
1564 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1565 return NULL_TREE;
1566 unsigned int count = elts.encoded_nelts ();
1567 for (unsigned int i = 0; i < count; ++i)
1569 tree elem1 = VECTOR_CST_ELT (arg1, i);
1571 tree elt = const_binop (code, elem1, arg2);
1573 /* It is possible that const_binop cannot handle the given
1574 code and return NULL_TREE. */
1575 if (elt == NULL_TREE)
1576 return NULL_TREE;
1577 elts.quick_push (elt);
1580 return elts.build ();
1582 return NULL_TREE;
1585 /* Overload that adds a TYPE parameter to be able to dispatch
1586 to fold_relational_const. */
1588 tree
1589 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1591 if (TREE_CODE_CLASS (code) == tcc_comparison)
1592 return fold_relational_const (code, type, arg1, arg2);
1594 /* ??? Until we make the const_binop worker take the type of the
1595 result as argument put those cases that need it here. */
1596 switch (code)
1598 case VEC_SERIES_EXPR:
1599 if (CONSTANT_CLASS_P (arg1)
1600 && CONSTANT_CLASS_P (arg2))
1601 return build_vec_series (type, arg1, arg2);
1602 return NULL_TREE;
1604 case COMPLEX_EXPR:
1605 if ((TREE_CODE (arg1) == REAL_CST
1606 && TREE_CODE (arg2) == REAL_CST)
1607 || (TREE_CODE (arg1) == INTEGER_CST
1608 && TREE_CODE (arg2) == INTEGER_CST))
1609 return build_complex (type, arg1, arg2);
1610 return NULL_TREE;
1612 case POINTER_DIFF_EXPR:
1613 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1615 poly_offset_int res = (wi::to_poly_offset (arg1)
1616 - wi::to_poly_offset (arg2));
1617 return force_fit_type (type, res, 1,
1618 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1620 return NULL_TREE;
1622 case VEC_PACK_TRUNC_EXPR:
1623 case VEC_PACK_FIX_TRUNC_EXPR:
1624 case VEC_PACK_FLOAT_EXPR:
1626 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1628 if (TREE_CODE (arg1) != VECTOR_CST
1629 || TREE_CODE (arg2) != VECTOR_CST)
1630 return NULL_TREE;
1632 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1633 return NULL_TREE;
1635 out_nelts = in_nelts * 2;
1636 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1637 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1639 tree_vector_builder elts (type, out_nelts, 1);
1640 for (i = 0; i < out_nelts; i++)
1642 tree elt = (i < in_nelts
1643 ? VECTOR_CST_ELT (arg1, i)
1644 : VECTOR_CST_ELT (arg2, i - in_nelts));
1645 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1646 ? NOP_EXPR
1647 : code == VEC_PACK_FLOAT_EXPR
1648 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1649 TREE_TYPE (type), elt);
1650 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1651 return NULL_TREE;
1652 elts.quick_push (elt);
1655 return elts.build ();
1658 case VEC_WIDEN_MULT_LO_EXPR:
1659 case VEC_WIDEN_MULT_HI_EXPR:
1660 case VEC_WIDEN_MULT_EVEN_EXPR:
1661 case VEC_WIDEN_MULT_ODD_EXPR:
1663 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1665 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1666 return NULL_TREE;
1668 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1669 return NULL_TREE;
1670 out_nelts = in_nelts / 2;
1671 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1672 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1674 if (code == VEC_WIDEN_MULT_LO_EXPR)
1675 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1676 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1677 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1678 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1679 scale = 1, ofs = 0;
1680 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1681 scale = 1, ofs = 1;
1683 tree_vector_builder elts (type, out_nelts, 1);
1684 for (out = 0; out < out_nelts; out++)
1686 unsigned int in = (out << scale) + ofs;
1687 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1688 VECTOR_CST_ELT (arg1, in));
1689 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1690 VECTOR_CST_ELT (arg2, in));
1692 if (t1 == NULL_TREE || t2 == NULL_TREE)
1693 return NULL_TREE;
1694 tree elt = const_binop (MULT_EXPR, t1, t2);
1695 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1696 return NULL_TREE;
1697 elts.quick_push (elt);
1700 return elts.build ();
1703 default:;
1706 if (TREE_CODE_CLASS (code) != tcc_binary)
1707 return NULL_TREE;
1709 /* Make sure type and arg0 have the same saturating flag. */
1710 gcc_checking_assert (TYPE_SATURATING (type)
1711 == TYPE_SATURATING (TREE_TYPE (arg1)));
1713 return const_binop (code, arg1, arg2);
1716 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1717 Return zero if computing the constants is not possible. */
1719 tree
1720 const_unop (enum tree_code code, tree type, tree arg0)
1722 /* Don't perform the operation, other than NEGATE and ABS, if
1723 flag_signaling_nans is on and the operand is a signaling NaN. */
1724 if (TREE_CODE (arg0) == REAL_CST
1725 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1726 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1727 && code != NEGATE_EXPR
1728 && code != ABS_EXPR
1729 && code != ABSU_EXPR)
1730 return NULL_TREE;
1732 switch (code)
1734 CASE_CONVERT:
1735 case FLOAT_EXPR:
1736 case FIX_TRUNC_EXPR:
1737 case FIXED_CONVERT_EXPR:
1738 return fold_convert_const (code, type, arg0);
1740 case ADDR_SPACE_CONVERT_EXPR:
1741 /* If the source address is 0, and the source address space
1742 cannot have a valid object at 0, fold to dest type null. */
1743 if (integer_zerop (arg0)
1744 && !(targetm.addr_space.zero_address_valid
1745 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1746 return fold_convert_const (code, type, arg0);
1747 break;
1749 case VIEW_CONVERT_EXPR:
1750 return fold_view_convert_expr (type, arg0);
1752 case NEGATE_EXPR:
1754 /* Can't call fold_negate_const directly here as that doesn't
1755 handle all cases and we might not be able to negate some
1756 constants. */
1757 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1758 if (tem && CONSTANT_CLASS_P (tem))
1759 return tem;
1760 break;
1763 case ABS_EXPR:
1764 case ABSU_EXPR:
1765 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1766 return fold_abs_const (arg0, type);
1767 break;
1769 case CONJ_EXPR:
1770 if (TREE_CODE (arg0) == COMPLEX_CST)
1772 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1773 TREE_TYPE (type));
1774 return build_complex (type, TREE_REALPART (arg0), ipart);
1776 break;
1778 case BIT_NOT_EXPR:
1779 if (TREE_CODE (arg0) == INTEGER_CST)
1780 return fold_not_const (arg0, type);
1781 else if (POLY_INT_CST_P (arg0))
1782 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1783 /* Perform BIT_NOT_EXPR on each element individually. */
1784 else if (TREE_CODE (arg0) == VECTOR_CST)
1786 tree elem;
1788 /* This can cope with stepped encodings because ~x == -1 - x. */
1789 tree_vector_builder elements;
1790 elements.new_unary_operation (type, arg0, true);
1791 unsigned int i, count = elements.encoded_nelts ();
1792 for (i = 0; i < count; ++i)
1794 elem = VECTOR_CST_ELT (arg0, i);
1795 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1796 if (elem == NULL_TREE)
1797 break;
1798 elements.quick_push (elem);
1800 if (i == count)
1801 return elements.build ();
1803 break;
1805 case TRUTH_NOT_EXPR:
1806 if (TREE_CODE (arg0) == INTEGER_CST)
1807 return constant_boolean_node (integer_zerop (arg0), type);
1808 break;
1810 case REALPART_EXPR:
1811 if (TREE_CODE (arg0) == COMPLEX_CST)
1812 return fold_convert (type, TREE_REALPART (arg0));
1813 break;
1815 case IMAGPART_EXPR:
1816 if (TREE_CODE (arg0) == COMPLEX_CST)
1817 return fold_convert (type, TREE_IMAGPART (arg0));
1818 break;
1820 case VEC_UNPACK_LO_EXPR:
1821 case VEC_UNPACK_HI_EXPR:
1822 case VEC_UNPACK_FLOAT_LO_EXPR:
1823 case VEC_UNPACK_FLOAT_HI_EXPR:
1824 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1825 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1827 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1828 enum tree_code subcode;
1830 if (TREE_CODE (arg0) != VECTOR_CST)
1831 return NULL_TREE;
1833 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1834 return NULL_TREE;
1835 out_nelts = in_nelts / 2;
1836 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1838 unsigned int offset = 0;
1839 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1840 || code == VEC_UNPACK_FLOAT_LO_EXPR
1841 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1842 offset = out_nelts;
1844 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1845 subcode = NOP_EXPR;
1846 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1848 subcode = FLOAT_EXPR;
1849 else
1850 subcode = FIX_TRUNC_EXPR;
1852 tree_vector_builder elts (type, out_nelts, 1);
1853 for (i = 0; i < out_nelts; i++)
1855 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1856 VECTOR_CST_ELT (arg0, i + offset));
1857 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1858 return NULL_TREE;
1859 elts.quick_push (elt);
1862 return elts.build ();
1865 case VEC_DUPLICATE_EXPR:
1866 if (CONSTANT_CLASS_P (arg0))
1867 return build_vector_from_val (type, arg0);
1868 return NULL_TREE;
1870 default:
1871 break;
1874 return NULL_TREE;
1877 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1878 indicates which particular sizetype to create. */
1880 tree
1881 size_int_kind (poly_int64 number, enum size_type_kind kind)
1883 return build_int_cst (sizetype_tab[(int) kind], number);
1886 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1887 is a tree code. The type of the result is taken from the operands.
1888 Both must be equivalent integer types, ala int_binop_types_match_p.
1889 If the operands are constant, so is the result. */
1891 tree
1892 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1894 tree type = TREE_TYPE (arg0);
1896 if (arg0 == error_mark_node || arg1 == error_mark_node)
1897 return error_mark_node;
1899 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1900 TREE_TYPE (arg1)));
1902 /* Handle the special case of two poly_int constants faster. */
1903 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1905 /* And some specific cases even faster than that. */
1906 if (code == PLUS_EXPR)
1908 if (integer_zerop (arg0)
1909 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1910 return arg1;
1911 if (integer_zerop (arg1)
1912 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1913 return arg0;
1915 else if (code == MINUS_EXPR)
1917 if (integer_zerop (arg1)
1918 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1919 return arg0;
1921 else if (code == MULT_EXPR)
1923 if (integer_onep (arg0)
1924 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1925 return arg1;
1928 /* Handle general case of two integer constants. For sizetype
1929 constant calculations we always want to know about overflow,
1930 even in the unsigned case. */
1931 tree res = int_const_binop (code, arg0, arg1, -1);
1932 if (res != NULL_TREE)
1933 return res;
1936 return fold_build2_loc (loc, code, type, arg0, arg1);
1939 /* Given two values, either both of sizetype or both of bitsizetype,
1940 compute the difference between the two values. Return the value
1941 in signed type corresponding to the type of the operands. */
1943 tree
1944 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1946 tree type = TREE_TYPE (arg0);
1947 tree ctype;
1949 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1950 TREE_TYPE (arg1)));
1952 /* If the type is already signed, just do the simple thing. */
1953 if (!TYPE_UNSIGNED (type))
1954 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1956 if (type == sizetype)
1957 ctype = ssizetype;
1958 else if (type == bitsizetype)
1959 ctype = sbitsizetype;
1960 else
1961 ctype = signed_type_for (type);
1963 /* If either operand is not a constant, do the conversions to the signed
1964 type and subtract. The hardware will do the right thing with any
1965 overflow in the subtraction. */
1966 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1967 return size_binop_loc (loc, MINUS_EXPR,
1968 fold_convert_loc (loc, ctype, arg0),
1969 fold_convert_loc (loc, ctype, arg1));
1971 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1972 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1973 overflow) and negate (which can't either). Special-case a result
1974 of zero while we're here. */
1975 if (tree_int_cst_equal (arg0, arg1))
1976 return build_int_cst (ctype, 0);
1977 else if (tree_int_cst_lt (arg1, arg0))
1978 return fold_convert_loc (loc, ctype,
1979 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1980 else
1981 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1982 fold_convert_loc (loc, ctype,
1983 size_binop_loc (loc,
1984 MINUS_EXPR,
1985 arg1, arg0)));
1988 /* A subroutine of fold_convert_const handling conversions of an
1989 INTEGER_CST to another integer type. */
1991 static tree
1992 fold_convert_const_int_from_int (tree type, const_tree arg1)
1994 /* Given an integer constant, make new constant with new type,
1995 appropriately sign-extended or truncated. Use widest_int
1996 so that any extension is done according ARG1's type. */
1997 return force_fit_type (type, wi::to_widest (arg1),
1998 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1999 TREE_OVERFLOW (arg1));
2002 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2003 to an integer type. */
2005 static tree
2006 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2008 bool overflow = false;
2009 tree t;
2011 /* The following code implements the floating point to integer
2012 conversion rules required by the Java Language Specification,
2013 that IEEE NaNs are mapped to zero and values that overflow
2014 the target precision saturate, i.e. values greater than
2015 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2016 are mapped to INT_MIN. These semantics are allowed by the
2017 C and C++ standards that simply state that the behavior of
2018 FP-to-integer conversion is unspecified upon overflow. */
2020 wide_int val;
2021 REAL_VALUE_TYPE r;
2022 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2024 switch (code)
2026 case FIX_TRUNC_EXPR:
2027 real_trunc (&r, VOIDmode, &x);
2028 break;
2030 default:
2031 gcc_unreachable ();
2034 /* If R is NaN, return zero and show we have an overflow. */
2035 if (REAL_VALUE_ISNAN (r))
2037 overflow = true;
2038 val = wi::zero (TYPE_PRECISION (type));
2041 /* See if R is less than the lower bound or greater than the
2042 upper bound. */
2044 if (! overflow)
2046 tree lt = TYPE_MIN_VALUE (type);
2047 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2048 if (real_less (&r, &l))
2050 overflow = true;
2051 val = wi::to_wide (lt);
2055 if (! overflow)
2057 tree ut = TYPE_MAX_VALUE (type);
2058 if (ut)
2060 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2061 if (real_less (&u, &r))
2063 overflow = true;
2064 val = wi::to_wide (ut);
2069 if (! overflow)
2070 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2072 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2073 return t;
2076 /* A subroutine of fold_convert_const handling conversions of a
2077 FIXED_CST to an integer type. */
2079 static tree
2080 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2082 tree t;
2083 double_int temp, temp_trunc;
2084 scalar_mode mode;
2086 /* Right shift FIXED_CST to temp by fbit. */
2087 temp = TREE_FIXED_CST (arg1).data;
2088 mode = TREE_FIXED_CST (arg1).mode;
2089 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2091 temp = temp.rshift (GET_MODE_FBIT (mode),
2092 HOST_BITS_PER_DOUBLE_INT,
2093 SIGNED_FIXED_POINT_MODE_P (mode));
2095 /* Left shift temp to temp_trunc by fbit. */
2096 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2097 HOST_BITS_PER_DOUBLE_INT,
2098 SIGNED_FIXED_POINT_MODE_P (mode));
2100 else
2102 temp = double_int_zero;
2103 temp_trunc = double_int_zero;
2106 /* If FIXED_CST is negative, we need to round the value toward 0.
2107 By checking if the fractional bits are not zero to add 1 to temp. */
2108 if (SIGNED_FIXED_POINT_MODE_P (mode)
2109 && temp_trunc.is_negative ()
2110 && TREE_FIXED_CST (arg1).data != temp_trunc)
2111 temp += double_int_one;
2113 /* Given a fixed-point constant, make new constant with new type,
2114 appropriately sign-extended or truncated. */
2115 t = force_fit_type (type, temp, -1,
2116 (temp.is_negative ()
2117 && (TYPE_UNSIGNED (type)
2118 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2119 | TREE_OVERFLOW (arg1));
2121 return t;
2124 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2125 to another floating point type. */
2127 static tree
2128 fold_convert_const_real_from_real (tree type, const_tree arg1)
2130 REAL_VALUE_TYPE value;
2131 tree t;
2133 /* Don't perform the operation if flag_signaling_nans is on
2134 and the operand is a signaling NaN. */
2135 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2136 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2137 return NULL_TREE;
2139 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2140 t = build_real (type, value);
2142 /* If converting an infinity or NAN to a representation that doesn't
2143 have one, set the overflow bit so that we can produce some kind of
2144 error message at the appropriate point if necessary. It's not the
2145 most user-friendly message, but it's better than nothing. */
2146 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2147 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2148 TREE_OVERFLOW (t) = 1;
2149 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2150 && !MODE_HAS_NANS (TYPE_MODE (type)))
2151 TREE_OVERFLOW (t) = 1;
2152 /* Regular overflow, conversion produced an infinity in a mode that
2153 can't represent them. */
2154 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2155 && REAL_VALUE_ISINF (value)
2156 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2157 TREE_OVERFLOW (t) = 1;
2158 else
2159 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2160 return t;
2163 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2164 to a floating point type. */
2166 static tree
2167 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2169 REAL_VALUE_TYPE value;
2170 tree t;
2172 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2173 &TREE_FIXED_CST (arg1));
2174 t = build_real (type, value);
2176 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2177 return t;
2180 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2181 to another fixed-point type. */
2183 static tree
2184 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2186 FIXED_VALUE_TYPE value;
2187 tree t;
2188 bool overflow_p;
2190 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2191 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2192 t = build_fixed (type, value);
2194 /* Propagate overflow flags. */
2195 if (overflow_p | TREE_OVERFLOW (arg1))
2196 TREE_OVERFLOW (t) = 1;
2197 return t;
2200 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2201 to a fixed-point type. */
2203 static tree
2204 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2206 FIXED_VALUE_TYPE value;
2207 tree t;
2208 bool overflow_p;
2209 double_int di;
2211 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2213 di.low = TREE_INT_CST_ELT (arg1, 0);
2214 if (TREE_INT_CST_NUNITS (arg1) == 1)
2215 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2216 else
2217 di.high = TREE_INT_CST_ELT (arg1, 1);
2219 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2220 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2221 TYPE_SATURATING (type));
2222 t = build_fixed (type, value);
2224 /* Propagate overflow flags. */
2225 if (overflow_p | TREE_OVERFLOW (arg1))
2226 TREE_OVERFLOW (t) = 1;
2227 return t;
2230 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2231 to a fixed-point type. */
2233 static tree
2234 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2236 FIXED_VALUE_TYPE value;
2237 tree t;
2238 bool overflow_p;
2240 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2241 &TREE_REAL_CST (arg1),
2242 TYPE_SATURATING (type));
2243 t = build_fixed (type, value);
2245 /* Propagate overflow flags. */
2246 if (overflow_p | TREE_OVERFLOW (arg1))
2247 TREE_OVERFLOW (t) = 1;
2248 return t;
2251 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2252 type TYPE. If no simplification can be done return NULL_TREE. */
2254 static tree
2255 fold_convert_const (enum tree_code code, tree type, tree arg1)
2257 tree arg_type = TREE_TYPE (arg1);
2258 if (arg_type == type)
2259 return arg1;
2261 /* We can't widen types, since the runtime value could overflow the
2262 original type before being extended to the new type. */
2263 if (POLY_INT_CST_P (arg1)
2264 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2265 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2266 return build_poly_int_cst (type,
2267 poly_wide_int::from (poly_int_cst_value (arg1),
2268 TYPE_PRECISION (type),
2269 TYPE_SIGN (arg_type)));
2271 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2272 || TREE_CODE (type) == OFFSET_TYPE)
2274 if (TREE_CODE (arg1) == INTEGER_CST)
2275 return fold_convert_const_int_from_int (type, arg1);
2276 else if (TREE_CODE (arg1) == REAL_CST)
2277 return fold_convert_const_int_from_real (code, type, arg1);
2278 else if (TREE_CODE (arg1) == FIXED_CST)
2279 return fold_convert_const_int_from_fixed (type, arg1);
2281 else if (TREE_CODE (type) == REAL_TYPE)
2283 if (TREE_CODE (arg1) == INTEGER_CST)
2284 return build_real_from_int_cst (type, arg1);
2285 else if (TREE_CODE (arg1) == REAL_CST)
2286 return fold_convert_const_real_from_real (type, arg1);
2287 else if (TREE_CODE (arg1) == FIXED_CST)
2288 return fold_convert_const_real_from_fixed (type, arg1);
2290 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2292 if (TREE_CODE (arg1) == FIXED_CST)
2293 return fold_convert_const_fixed_from_fixed (type, arg1);
2294 else if (TREE_CODE (arg1) == INTEGER_CST)
2295 return fold_convert_const_fixed_from_int (type, arg1);
2296 else if (TREE_CODE (arg1) == REAL_CST)
2297 return fold_convert_const_fixed_from_real (type, arg1);
2299 else if (TREE_CODE (type) == VECTOR_TYPE)
2301 if (TREE_CODE (arg1) == VECTOR_CST
2302 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2304 tree elttype = TREE_TYPE (type);
2305 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2306 /* We can't handle steps directly when extending, since the
2307 values need to wrap at the original precision first. */
2308 bool step_ok_p
2309 = (INTEGRAL_TYPE_P (elttype)
2310 && INTEGRAL_TYPE_P (arg1_elttype)
2311 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2312 tree_vector_builder v;
2313 if (!v.new_unary_operation (type, arg1, step_ok_p))
2314 return NULL_TREE;
2315 unsigned int len = v.encoded_nelts ();
2316 for (unsigned int i = 0; i < len; ++i)
2318 tree elt = VECTOR_CST_ELT (arg1, i);
2319 tree cvt = fold_convert_const (code, elttype, elt);
2320 if (cvt == NULL_TREE)
2321 return NULL_TREE;
2322 v.quick_push (cvt);
2324 return v.build ();
2327 return NULL_TREE;
2330 /* Construct a vector of zero elements of vector type TYPE. */
2332 static tree
2333 build_zero_vector (tree type)
2335 tree t;
2337 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2338 return build_vector_from_val (type, t);
2341 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2343 bool
2344 fold_convertible_p (const_tree type, const_tree arg)
2346 tree orig = TREE_TYPE (arg);
2348 if (type == orig)
2349 return true;
2351 if (TREE_CODE (arg) == ERROR_MARK
2352 || TREE_CODE (type) == ERROR_MARK
2353 || TREE_CODE (orig) == ERROR_MARK)
2354 return false;
2356 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2357 return true;
2359 switch (TREE_CODE (type))
2361 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2362 case POINTER_TYPE: case REFERENCE_TYPE:
2363 case OFFSET_TYPE:
2364 return (INTEGRAL_TYPE_P (orig)
2365 || (POINTER_TYPE_P (orig)
2366 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2367 || TREE_CODE (orig) == OFFSET_TYPE);
2369 case REAL_TYPE:
2370 case FIXED_POINT_TYPE:
2371 case VOID_TYPE:
2372 return TREE_CODE (type) == TREE_CODE (orig);
2374 case VECTOR_TYPE:
2375 return (VECTOR_TYPE_P (orig)
2376 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2377 TYPE_VECTOR_SUBPARTS (orig))
2378 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2380 default:
2381 return false;
2385 /* Convert expression ARG to type TYPE. Used by the middle-end for
2386 simple conversions in preference to calling the front-end's convert. */
2388 tree
2389 fold_convert_loc (location_t loc, tree type, tree arg)
2391 tree orig = TREE_TYPE (arg);
2392 tree tem;
2394 if (type == orig)
2395 return arg;
2397 if (TREE_CODE (arg) == ERROR_MARK
2398 || TREE_CODE (type) == ERROR_MARK
2399 || TREE_CODE (orig) == ERROR_MARK)
2400 return error_mark_node;
2402 switch (TREE_CODE (type))
2404 case POINTER_TYPE:
2405 case REFERENCE_TYPE:
2406 /* Handle conversions between pointers to different address spaces. */
2407 if (POINTER_TYPE_P (orig)
2408 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2409 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2410 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2411 /* fall through */
2413 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2414 case OFFSET_TYPE:
2415 if (TREE_CODE (arg) == INTEGER_CST)
2417 tem = fold_convert_const (NOP_EXPR, type, arg);
2418 if (tem != NULL_TREE)
2419 return tem;
2421 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2422 || TREE_CODE (orig) == OFFSET_TYPE)
2423 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2424 if (TREE_CODE (orig) == COMPLEX_TYPE)
2425 return fold_convert_loc (loc, type,
2426 fold_build1_loc (loc, REALPART_EXPR,
2427 TREE_TYPE (orig), arg));
2428 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2429 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2430 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2432 case REAL_TYPE:
2433 if (TREE_CODE (arg) == INTEGER_CST)
2435 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2436 if (tem != NULL_TREE)
2437 return tem;
2439 else if (TREE_CODE (arg) == REAL_CST)
2441 tem = fold_convert_const (NOP_EXPR, type, arg);
2442 if (tem != NULL_TREE)
2443 return tem;
2445 else if (TREE_CODE (arg) == FIXED_CST)
2447 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2448 if (tem != NULL_TREE)
2449 return tem;
2452 switch (TREE_CODE (orig))
2454 case INTEGER_TYPE:
2455 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2456 case POINTER_TYPE: case REFERENCE_TYPE:
2457 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2459 case REAL_TYPE:
2460 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2462 case FIXED_POINT_TYPE:
2463 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2465 case COMPLEX_TYPE:
2466 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2467 return fold_convert_loc (loc, type, tem);
2469 default:
2470 gcc_unreachable ();
2473 case FIXED_POINT_TYPE:
2474 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2475 || TREE_CODE (arg) == REAL_CST)
2477 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2478 if (tem != NULL_TREE)
2479 goto fold_convert_exit;
2482 switch (TREE_CODE (orig))
2484 case FIXED_POINT_TYPE:
2485 case INTEGER_TYPE:
2486 case ENUMERAL_TYPE:
2487 case BOOLEAN_TYPE:
2488 case REAL_TYPE:
2489 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2491 case COMPLEX_TYPE:
2492 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2493 return fold_convert_loc (loc, type, tem);
2495 default:
2496 gcc_unreachable ();
2499 case COMPLEX_TYPE:
2500 switch (TREE_CODE (orig))
2502 case INTEGER_TYPE:
2503 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2504 case POINTER_TYPE: case REFERENCE_TYPE:
2505 case REAL_TYPE:
2506 case FIXED_POINT_TYPE:
2507 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2508 fold_convert_loc (loc, TREE_TYPE (type), arg),
2509 fold_convert_loc (loc, TREE_TYPE (type),
2510 integer_zero_node));
2511 case COMPLEX_TYPE:
2513 tree rpart, ipart;
2515 if (TREE_CODE (arg) == COMPLEX_EXPR)
2517 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2518 TREE_OPERAND (arg, 0));
2519 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2520 TREE_OPERAND (arg, 1));
2521 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2524 arg = save_expr (arg);
2525 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2526 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2527 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2528 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2529 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2532 default:
2533 gcc_unreachable ();
2536 case VECTOR_TYPE:
2537 if (integer_zerop (arg))
2538 return build_zero_vector (type);
2539 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2540 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2541 || TREE_CODE (orig) == VECTOR_TYPE);
2542 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2544 case VOID_TYPE:
2545 tem = fold_ignored_result (arg);
2546 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2548 default:
2549 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2550 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2551 gcc_unreachable ();
2553 fold_convert_exit:
2554 protected_set_expr_location_unshare (tem, loc);
2555 return tem;
2558 /* Return false if expr can be assumed not to be an lvalue, true
2559 otherwise. */
2561 static bool
2562 maybe_lvalue_p (const_tree x)
2564 /* We only need to wrap lvalue tree codes. */
2565 switch (TREE_CODE (x))
2567 case VAR_DECL:
2568 case PARM_DECL:
2569 case RESULT_DECL:
2570 case LABEL_DECL:
2571 case FUNCTION_DECL:
2572 case SSA_NAME:
2574 case COMPONENT_REF:
2575 case MEM_REF:
2576 case INDIRECT_REF:
2577 case ARRAY_REF:
2578 case ARRAY_RANGE_REF:
2579 case BIT_FIELD_REF:
2580 case OBJ_TYPE_REF:
2582 case REALPART_EXPR:
2583 case IMAGPART_EXPR:
2584 case PREINCREMENT_EXPR:
2585 case PREDECREMENT_EXPR:
2586 case SAVE_EXPR:
2587 case TRY_CATCH_EXPR:
2588 case WITH_CLEANUP_EXPR:
2589 case COMPOUND_EXPR:
2590 case MODIFY_EXPR:
2591 case TARGET_EXPR:
2592 case COND_EXPR:
2593 case BIND_EXPR:
2594 case VIEW_CONVERT_EXPR:
2595 break;
2597 default:
2598 /* Assume the worst for front-end tree codes. */
2599 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2600 break;
2601 return false;
2604 return true;
2607 /* Return an expr equal to X but certainly not valid as an lvalue. */
2609 tree
2610 non_lvalue_loc (location_t loc, tree x)
2612 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2613 us. */
2614 if (in_gimple_form)
2615 return x;
2617 if (! maybe_lvalue_p (x))
2618 return x;
2619 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2622 /* When pedantic, return an expr equal to X but certainly not valid as a
2623 pedantic lvalue. Otherwise, return X. */
2625 static tree
2626 pedantic_non_lvalue_loc (location_t loc, tree x)
2628 return protected_set_expr_location_unshare (x, loc);
2631 /* Given a tree comparison code, return the code that is the logical inverse.
2632 It is generally not safe to do this for floating-point comparisons, except
2633 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2634 ERROR_MARK in this case. */
2636 enum tree_code
2637 invert_tree_comparison (enum tree_code code, bool honor_nans)
2639 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2640 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2641 return ERROR_MARK;
2643 switch (code)
2645 case EQ_EXPR:
2646 return NE_EXPR;
2647 case NE_EXPR:
2648 return EQ_EXPR;
2649 case GT_EXPR:
2650 return honor_nans ? UNLE_EXPR : LE_EXPR;
2651 case GE_EXPR:
2652 return honor_nans ? UNLT_EXPR : LT_EXPR;
2653 case LT_EXPR:
2654 return honor_nans ? UNGE_EXPR : GE_EXPR;
2655 case LE_EXPR:
2656 return honor_nans ? UNGT_EXPR : GT_EXPR;
2657 case LTGT_EXPR:
2658 return UNEQ_EXPR;
2659 case UNEQ_EXPR:
2660 return LTGT_EXPR;
2661 case UNGT_EXPR:
2662 return LE_EXPR;
2663 case UNGE_EXPR:
2664 return LT_EXPR;
2665 case UNLT_EXPR:
2666 return GE_EXPR;
2667 case UNLE_EXPR:
2668 return GT_EXPR;
2669 case ORDERED_EXPR:
2670 return UNORDERED_EXPR;
2671 case UNORDERED_EXPR:
2672 return ORDERED_EXPR;
2673 default:
2674 gcc_unreachable ();
2678 /* Similar, but return the comparison that results if the operands are
2679 swapped. This is safe for floating-point. */
2681 enum tree_code
2682 swap_tree_comparison (enum tree_code code)
2684 switch (code)
2686 case EQ_EXPR:
2687 case NE_EXPR:
2688 case ORDERED_EXPR:
2689 case UNORDERED_EXPR:
2690 case LTGT_EXPR:
2691 case UNEQ_EXPR:
2692 return code;
2693 case GT_EXPR:
2694 return LT_EXPR;
2695 case GE_EXPR:
2696 return LE_EXPR;
2697 case LT_EXPR:
2698 return GT_EXPR;
2699 case LE_EXPR:
2700 return GE_EXPR;
2701 case UNGT_EXPR:
2702 return UNLT_EXPR;
2703 case UNGE_EXPR:
2704 return UNLE_EXPR;
2705 case UNLT_EXPR:
2706 return UNGT_EXPR;
2707 case UNLE_EXPR:
2708 return UNGE_EXPR;
2709 default:
2710 gcc_unreachable ();
2715 /* Convert a comparison tree code from an enum tree_code representation
2716 into a compcode bit-based encoding. This function is the inverse of
2717 compcode_to_comparison. */
2719 static enum comparison_code
2720 comparison_to_compcode (enum tree_code code)
2722 switch (code)
2724 case LT_EXPR:
2725 return COMPCODE_LT;
2726 case EQ_EXPR:
2727 return COMPCODE_EQ;
2728 case LE_EXPR:
2729 return COMPCODE_LE;
2730 case GT_EXPR:
2731 return COMPCODE_GT;
2732 case NE_EXPR:
2733 return COMPCODE_NE;
2734 case GE_EXPR:
2735 return COMPCODE_GE;
2736 case ORDERED_EXPR:
2737 return COMPCODE_ORD;
2738 case UNORDERED_EXPR:
2739 return COMPCODE_UNORD;
2740 case UNLT_EXPR:
2741 return COMPCODE_UNLT;
2742 case UNEQ_EXPR:
2743 return COMPCODE_UNEQ;
2744 case UNLE_EXPR:
2745 return COMPCODE_UNLE;
2746 case UNGT_EXPR:
2747 return COMPCODE_UNGT;
2748 case LTGT_EXPR:
2749 return COMPCODE_LTGT;
2750 case UNGE_EXPR:
2751 return COMPCODE_UNGE;
2752 default:
2753 gcc_unreachable ();
2757 /* Convert a compcode bit-based encoding of a comparison operator back
2758 to GCC's enum tree_code representation. This function is the
2759 inverse of comparison_to_compcode. */
2761 static enum tree_code
2762 compcode_to_comparison (enum comparison_code code)
2764 switch (code)
2766 case COMPCODE_LT:
2767 return LT_EXPR;
2768 case COMPCODE_EQ:
2769 return EQ_EXPR;
2770 case COMPCODE_LE:
2771 return LE_EXPR;
2772 case COMPCODE_GT:
2773 return GT_EXPR;
2774 case COMPCODE_NE:
2775 return NE_EXPR;
2776 case COMPCODE_GE:
2777 return GE_EXPR;
2778 case COMPCODE_ORD:
2779 return ORDERED_EXPR;
2780 case COMPCODE_UNORD:
2781 return UNORDERED_EXPR;
2782 case COMPCODE_UNLT:
2783 return UNLT_EXPR;
2784 case COMPCODE_UNEQ:
2785 return UNEQ_EXPR;
2786 case COMPCODE_UNLE:
2787 return UNLE_EXPR;
2788 case COMPCODE_UNGT:
2789 return UNGT_EXPR;
2790 case COMPCODE_LTGT:
2791 return LTGT_EXPR;
2792 case COMPCODE_UNGE:
2793 return UNGE_EXPR;
2794 default:
2795 gcc_unreachable ();
2799 /* Return true if COND1 tests the opposite condition of COND2. */
2801 bool
2802 inverse_conditions_p (const_tree cond1, const_tree cond2)
2804 return (COMPARISON_CLASS_P (cond1)
2805 && COMPARISON_CLASS_P (cond2)
2806 && (invert_tree_comparison
2807 (TREE_CODE (cond1),
2808 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2809 && operand_equal_p (TREE_OPERAND (cond1, 0),
2810 TREE_OPERAND (cond2, 0), 0)
2811 && operand_equal_p (TREE_OPERAND (cond1, 1),
2812 TREE_OPERAND (cond2, 1), 0));
2815 /* Return a tree for the comparison which is the combination of
2816 doing the AND or OR (depending on CODE) of the two operations LCODE
2817 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2818 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2819 if this makes the transformation invalid. */
2821 tree
2822 combine_comparisons (location_t loc,
2823 enum tree_code code, enum tree_code lcode,
2824 enum tree_code rcode, tree truth_type,
2825 tree ll_arg, tree lr_arg)
2827 bool honor_nans = HONOR_NANS (ll_arg);
2828 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2829 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2830 int compcode;
2832 switch (code)
2834 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2835 compcode = lcompcode & rcompcode;
2836 break;
2838 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2839 compcode = lcompcode | rcompcode;
2840 break;
2842 default:
2843 return NULL_TREE;
2846 if (!honor_nans)
2848 /* Eliminate unordered comparisons, as well as LTGT and ORD
2849 which are not used unless the mode has NaNs. */
2850 compcode &= ~COMPCODE_UNORD;
2851 if (compcode == COMPCODE_LTGT)
2852 compcode = COMPCODE_NE;
2853 else if (compcode == COMPCODE_ORD)
2854 compcode = COMPCODE_TRUE;
2856 else if (flag_trapping_math)
2858 /* Check that the original operation and the optimized ones will trap
2859 under the same condition. */
2860 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2861 && (lcompcode != COMPCODE_EQ)
2862 && (lcompcode != COMPCODE_ORD);
2863 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2864 && (rcompcode != COMPCODE_EQ)
2865 && (rcompcode != COMPCODE_ORD);
2866 bool trap = (compcode & COMPCODE_UNORD) == 0
2867 && (compcode != COMPCODE_EQ)
2868 && (compcode != COMPCODE_ORD);
2870 /* In a short-circuited boolean expression the LHS might be
2871 such that the RHS, if evaluated, will never trap. For
2872 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2873 if neither x nor y is NaN. (This is a mixed blessing: for
2874 example, the expression above will never trap, hence
2875 optimizing it to x < y would be invalid). */
2876 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2877 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2878 rtrap = false;
2880 /* If the comparison was short-circuited, and only the RHS
2881 trapped, we may now generate a spurious trap. */
2882 if (rtrap && !ltrap
2883 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2884 return NULL_TREE;
2886 /* If we changed the conditions that cause a trap, we lose. */
2887 if ((ltrap || rtrap) != trap)
2888 return NULL_TREE;
2891 if (compcode == COMPCODE_TRUE)
2892 return constant_boolean_node (true, truth_type);
2893 else if (compcode == COMPCODE_FALSE)
2894 return constant_boolean_node (false, truth_type);
2895 else
2897 enum tree_code tcode;
2899 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2900 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2904 /* Return nonzero if two operands (typically of the same tree node)
2905 are necessarily equal. FLAGS modifies behavior as follows:
2907 If OEP_ONLY_CONST is set, only return nonzero for constants.
2908 This function tests whether the operands are indistinguishable;
2909 it does not test whether they are equal using C's == operation.
2910 The distinction is important for IEEE floating point, because
2911 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2912 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2914 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2915 even though it may hold multiple values during a function.
2916 This is because a GCC tree node guarantees that nothing else is
2917 executed between the evaluation of its "operands" (which may often
2918 be evaluated in arbitrary order). Hence if the operands themselves
2919 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2920 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2921 unset means assuming isochronic (or instantaneous) tree equivalence.
2922 Unless comparing arbitrary expression trees, such as from different
2923 statements, this flag can usually be left unset.
2925 If OEP_PURE_SAME is set, then pure functions with identical arguments
2926 are considered the same. It is used when the caller has other ways
2927 to ensure that global memory is unchanged in between.
2929 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2930 not values of expressions.
2932 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2933 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2935 If OEP_BITWISE is set, then require the values to be bitwise identical
2936 rather than simply numerically equal. Do not take advantage of things
2937 like math-related flags or undefined behavior; only return true for
2938 values that are provably bitwise identical in all circumstances.
2940 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2941 any operand with side effect. This is unnecesarily conservative in the
2942 case we know that arg0 and arg1 are in disjoint code paths (such as in
2943 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2944 addresses with TREE_CONSTANT flag set so we know that &var == &var
2945 even if var is volatile. */
2947 bool
2948 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2949 unsigned int flags)
2951 bool r;
2952 if (verify_hash_value (arg0, arg1, flags, &r))
2953 return r;
2955 STRIP_ANY_LOCATION_WRAPPER (arg0);
2956 STRIP_ANY_LOCATION_WRAPPER (arg1);
2958 /* If either is ERROR_MARK, they aren't equal. */
2959 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2960 || TREE_TYPE (arg0) == error_mark_node
2961 || TREE_TYPE (arg1) == error_mark_node)
2962 return false;
2964 /* Similar, if either does not have a type (like a template id),
2965 they aren't equal. */
2966 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2967 return false;
2969 /* Bitwise identity makes no sense if the values have different layouts. */
2970 if ((flags & OEP_BITWISE)
2971 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2972 return false;
2974 /* We cannot consider pointers to different address space equal. */
2975 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2976 && POINTER_TYPE_P (TREE_TYPE (arg1))
2977 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2978 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2979 return false;
2981 /* Check equality of integer constants before bailing out due to
2982 precision differences. */
2983 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2985 /* Address of INTEGER_CST is not defined; check that we did not forget
2986 to drop the OEP_ADDRESS_OF flags. */
2987 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2988 return tree_int_cst_equal (arg0, arg1);
2991 if (!(flags & OEP_ADDRESS_OF))
2993 /* If both types don't have the same signedness, then we can't consider
2994 them equal. We must check this before the STRIP_NOPS calls
2995 because they may change the signedness of the arguments. As pointers
2996 strictly don't have a signedness, require either two pointers or
2997 two non-pointers as well. */
2998 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2999 || POINTER_TYPE_P (TREE_TYPE (arg0))
3000 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3001 return false;
3003 /* If both types don't have the same precision, then it is not safe
3004 to strip NOPs. */
3005 if (element_precision (TREE_TYPE (arg0))
3006 != element_precision (TREE_TYPE (arg1)))
3007 return false;
3009 STRIP_NOPS (arg0);
3010 STRIP_NOPS (arg1);
3012 #if 0
3013 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3014 sanity check once the issue is solved. */
3015 else
3016 /* Addresses of conversions and SSA_NAMEs (and many other things)
3017 are not defined. Check that we did not forget to drop the
3018 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3019 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3020 && TREE_CODE (arg0) != SSA_NAME);
3021 #endif
3023 /* In case both args are comparisons but with different comparison
3024 code, try to swap the comparison operands of one arg to produce
3025 a match and compare that variant. */
3026 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3027 && COMPARISON_CLASS_P (arg0)
3028 && COMPARISON_CLASS_P (arg1))
3030 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3032 if (TREE_CODE (arg0) == swap_code)
3033 return operand_equal_p (TREE_OPERAND (arg0, 0),
3034 TREE_OPERAND (arg1, 1), flags)
3035 && operand_equal_p (TREE_OPERAND (arg0, 1),
3036 TREE_OPERAND (arg1, 0), flags);
3039 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3041 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3042 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3044 else if (flags & OEP_ADDRESS_OF)
3046 /* If we are interested in comparing addresses ignore
3047 MEM_REF wrappings of the base that can appear just for
3048 TBAA reasons. */
3049 if (TREE_CODE (arg0) == MEM_REF
3050 && DECL_P (arg1)
3051 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3052 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3053 && integer_zerop (TREE_OPERAND (arg0, 1)))
3054 return true;
3055 else if (TREE_CODE (arg1) == MEM_REF
3056 && DECL_P (arg0)
3057 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3058 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3059 && integer_zerop (TREE_OPERAND (arg1, 1)))
3060 return true;
3061 return false;
3063 else
3064 return false;
3067 /* When not checking adddresses, this is needed for conversions and for
3068 COMPONENT_REF. Might as well play it safe and always test this. */
3069 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3070 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3071 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3072 && !(flags & OEP_ADDRESS_OF)))
3073 return false;
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below.
3082 If we are taking an invariant address of two identical objects
3083 they are necessarily equal as well. */
3084 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3085 && (TREE_CODE (arg0) == SAVE_EXPR
3086 || (flags & OEP_MATCH_SIDE_EFFECTS)
3087 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3088 return true;
3090 /* Next handle constant cases, those for which we can return 1 even
3091 if ONLY_CONST is set. */
3092 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3093 switch (TREE_CODE (arg0))
3095 case INTEGER_CST:
3096 return tree_int_cst_equal (arg0, arg1);
3098 case FIXED_CST:
3099 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3100 TREE_FIXED_CST (arg1));
3102 case REAL_CST:
3103 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3104 return true;
3106 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3108 /* If we do not distinguish between signed and unsigned zero,
3109 consider them equal. */
3110 if (real_zerop (arg0) && real_zerop (arg1))
3111 return true;
3113 return false;
3115 case VECTOR_CST:
3117 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3118 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3119 return false;
3121 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3122 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3123 return false;
3125 unsigned int count = vector_cst_encoded_nelts (arg0);
3126 for (unsigned int i = 0; i < count; ++i)
3127 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3128 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3129 return false;
3130 return true;
3133 case COMPLEX_CST:
3134 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3135 flags)
3136 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3137 flags));
3139 case STRING_CST:
3140 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3141 && ! memcmp (TREE_STRING_POINTER (arg0),
3142 TREE_STRING_POINTER (arg1),
3143 TREE_STRING_LENGTH (arg0)));
3145 case ADDR_EXPR:
3146 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3147 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3148 flags | OEP_ADDRESS_OF
3149 | OEP_MATCH_SIDE_EFFECTS);
3150 case CONSTRUCTOR:
3151 /* In GIMPLE empty constructors are allowed in initializers of
3152 aggregates. */
3153 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3154 default:
3155 break;
3158 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3159 two instances of undefined behavior will give identical results. */
3160 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3161 return false;
3163 /* Define macros to test an operand from arg0 and arg1 for equality and a
3164 variant that allows null and views null as being different from any
3165 non-null value. In the latter case, if either is null, the both
3166 must be; otherwise, do the normal comparison. */
3167 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3168 TREE_OPERAND (arg1, N), flags)
3170 #define OP_SAME_WITH_NULL(N) \
3171 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3172 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3174 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3176 case tcc_unary:
3177 /* Two conversions are equal only if signedness and modes match. */
3178 switch (TREE_CODE (arg0))
3180 CASE_CONVERT:
3181 case FIX_TRUNC_EXPR:
3182 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3183 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3184 return false;
3185 break;
3186 default:
3187 break;
3190 return OP_SAME (0);
3193 case tcc_comparison:
3194 case tcc_binary:
3195 if (OP_SAME (0) && OP_SAME (1))
3196 return true;
3198 /* For commutative ops, allow the other order. */
3199 return (commutative_tree_code (TREE_CODE (arg0))
3200 && operand_equal_p (TREE_OPERAND (arg0, 0),
3201 TREE_OPERAND (arg1, 1), flags)
3202 && operand_equal_p (TREE_OPERAND (arg0, 1),
3203 TREE_OPERAND (arg1, 0), flags));
3205 case tcc_reference:
3206 /* If either of the pointer (or reference) expressions we are
3207 dereferencing contain a side effect, these cannot be equal,
3208 but their addresses can be. */
3209 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3210 && (TREE_SIDE_EFFECTS (arg0)
3211 || TREE_SIDE_EFFECTS (arg1)))
3212 return false;
3214 switch (TREE_CODE (arg0))
3216 case INDIRECT_REF:
3217 if (!(flags & OEP_ADDRESS_OF))
3219 if (TYPE_ALIGN (TREE_TYPE (arg0))
3220 != TYPE_ALIGN (TREE_TYPE (arg1)))
3221 return false;
3222 /* Verify that the access types are compatible. */
3223 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3224 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3225 return false;
3227 flags &= ~OEP_ADDRESS_OF;
3228 return OP_SAME (0);
3230 case IMAGPART_EXPR:
3231 /* Require the same offset. */
3232 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3233 TYPE_SIZE (TREE_TYPE (arg1)),
3234 flags & ~OEP_ADDRESS_OF))
3235 return false;
3237 /* Fallthru. */
3238 case REALPART_EXPR:
3239 case VIEW_CONVERT_EXPR:
3240 return OP_SAME (0);
3242 case TARGET_MEM_REF:
3243 case MEM_REF:
3244 if (!(flags & OEP_ADDRESS_OF))
3246 /* Require equal access sizes */
3247 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3248 && (!TYPE_SIZE (TREE_TYPE (arg0))
3249 || !TYPE_SIZE (TREE_TYPE (arg1))
3250 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3251 TYPE_SIZE (TREE_TYPE (arg1)),
3252 flags)))
3253 return false;
3254 /* Verify that access happens in similar types. */
3255 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3256 return false;
3257 /* Verify that accesses are TBAA compatible. */
3258 if (!alias_ptr_types_compatible_p
3259 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3260 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3261 || (MR_DEPENDENCE_CLIQUE (arg0)
3262 != MR_DEPENDENCE_CLIQUE (arg1))
3263 || (MR_DEPENDENCE_BASE (arg0)
3264 != MR_DEPENDENCE_BASE (arg1)))
3265 return false;
3266 /* Verify that alignment is compatible. */
3267 if (TYPE_ALIGN (TREE_TYPE (arg0))
3268 != TYPE_ALIGN (TREE_TYPE (arg1)))
3269 return false;
3271 flags &= ~OEP_ADDRESS_OF;
3272 return (OP_SAME (0) && OP_SAME (1)
3273 /* TARGET_MEM_REF require equal extra operands. */
3274 && (TREE_CODE (arg0) != TARGET_MEM_REF
3275 || (OP_SAME_WITH_NULL (2)
3276 && OP_SAME_WITH_NULL (3)
3277 && OP_SAME_WITH_NULL (4))));
3279 case ARRAY_REF:
3280 case ARRAY_RANGE_REF:
3281 if (!OP_SAME (0))
3282 return false;
3283 flags &= ~OEP_ADDRESS_OF;
3284 /* Compare the array index by value if it is constant first as we
3285 may have different types but same value here. */
3286 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3287 TREE_OPERAND (arg1, 1))
3288 || OP_SAME (1))
3289 && OP_SAME_WITH_NULL (2)
3290 && OP_SAME_WITH_NULL (3)
3291 /* Compare low bound and element size as with OEP_ADDRESS_OF
3292 we have to account for the offset of the ref. */
3293 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3294 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3295 || (operand_equal_p (array_ref_low_bound
3296 (CONST_CAST_TREE (arg0)),
3297 array_ref_low_bound
3298 (CONST_CAST_TREE (arg1)), flags)
3299 && operand_equal_p (array_ref_element_size
3300 (CONST_CAST_TREE (arg0)),
3301 array_ref_element_size
3302 (CONST_CAST_TREE (arg1)),
3303 flags))));
3305 case COMPONENT_REF:
3306 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3307 may be NULL when we're called to compare MEM_EXPRs. */
3308 if (!OP_SAME_WITH_NULL (0))
3309 return false;
3311 bool compare_address = flags & OEP_ADDRESS_OF;
3313 /* Most of time we only need to compare FIELD_DECLs for equality.
3314 However when determining address look into actual offsets.
3315 These may match for unions and unshared record types. */
3316 flags &= ~OEP_ADDRESS_OF;
3317 if (!OP_SAME (1))
3319 if (compare_address)
3321 if (TREE_OPERAND (arg0, 2)
3322 || TREE_OPERAND (arg1, 2))
3323 return OP_SAME_WITH_NULL (2);
3324 tree field0 = TREE_OPERAND (arg0, 1);
3325 tree field1 = TREE_OPERAND (arg1, 1);
3327 if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3328 DECL_FIELD_OFFSET (field1), flags)
3329 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3330 DECL_FIELD_BIT_OFFSET (field1),
3331 flags))
3332 return false;
3334 else
3335 return false;
3338 return OP_SAME_WITH_NULL (2);
3340 case BIT_FIELD_REF:
3341 if (!OP_SAME (0))
3342 return false;
3343 flags &= ~OEP_ADDRESS_OF;
3344 return OP_SAME (1) && OP_SAME (2);
3346 default:
3347 return false;
3350 case tcc_expression:
3351 switch (TREE_CODE (arg0))
3353 case ADDR_EXPR:
3354 /* Be sure we pass right ADDRESS_OF flag. */
3355 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3356 return operand_equal_p (TREE_OPERAND (arg0, 0),
3357 TREE_OPERAND (arg1, 0),
3358 flags | OEP_ADDRESS_OF);
3360 case TRUTH_NOT_EXPR:
3361 return OP_SAME (0);
3363 case TRUTH_ANDIF_EXPR:
3364 case TRUTH_ORIF_EXPR:
3365 return OP_SAME (0) && OP_SAME (1);
3367 case WIDEN_MULT_PLUS_EXPR:
3368 case WIDEN_MULT_MINUS_EXPR:
3369 if (!OP_SAME (2))
3370 return false;
3371 /* The multiplcation operands are commutative. */
3372 /* FALLTHRU */
3374 case TRUTH_AND_EXPR:
3375 case TRUTH_OR_EXPR:
3376 case TRUTH_XOR_EXPR:
3377 if (OP_SAME (0) && OP_SAME (1))
3378 return true;
3380 /* Otherwise take into account this is a commutative operation. */
3381 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3382 TREE_OPERAND (arg1, 1), flags)
3383 && operand_equal_p (TREE_OPERAND (arg0, 1),
3384 TREE_OPERAND (arg1, 0), flags));
3386 case COND_EXPR:
3387 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3388 return false;
3389 flags &= ~OEP_ADDRESS_OF;
3390 return OP_SAME (0);
3392 case BIT_INSERT_EXPR:
3393 /* BIT_INSERT_EXPR has an implict operand as the type precision
3394 of op1. Need to check to make sure they are the same. */
3395 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3396 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3397 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3398 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3399 return false;
3400 /* FALLTHRU */
3402 case VEC_COND_EXPR:
3403 case DOT_PROD_EXPR:
3404 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3406 case MODIFY_EXPR:
3407 case INIT_EXPR:
3408 case COMPOUND_EXPR:
3409 case PREDECREMENT_EXPR:
3410 case PREINCREMENT_EXPR:
3411 case POSTDECREMENT_EXPR:
3412 case POSTINCREMENT_EXPR:
3413 if (flags & OEP_LEXICOGRAPHIC)
3414 return OP_SAME (0) && OP_SAME (1);
3415 return false;
3417 case CLEANUP_POINT_EXPR:
3418 case EXPR_STMT:
3419 case SAVE_EXPR:
3420 if (flags & OEP_LEXICOGRAPHIC)
3421 return OP_SAME (0);
3422 return false;
3424 case OBJ_TYPE_REF:
3425 /* Virtual table reference. */
3426 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3427 OBJ_TYPE_REF_EXPR (arg1), flags))
3428 return false;
3429 flags &= ~OEP_ADDRESS_OF;
3430 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3431 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3432 return false;
3433 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3434 OBJ_TYPE_REF_OBJECT (arg1), flags))
3435 return false;
3436 if (virtual_method_call_p (arg0))
3438 if (!virtual_method_call_p (arg1))
3439 return false;
3440 return types_same_for_odr (obj_type_ref_class (arg0),
3441 obj_type_ref_class (arg1));
3443 return false;
3445 default:
3446 return false;
3449 case tcc_vl_exp:
3450 switch (TREE_CODE (arg0))
3452 case CALL_EXPR:
3453 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3454 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3455 /* If not both CALL_EXPRs are either internal or normal function
3456 functions, then they are not equal. */
3457 return false;
3458 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3460 /* If the CALL_EXPRs call different internal functions, then they
3461 are not equal. */
3462 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3463 return false;
3465 else
3467 /* If the CALL_EXPRs call different functions, then they are not
3468 equal. */
3469 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3470 flags))
3471 return false;
3474 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3476 unsigned int cef = call_expr_flags (arg0);
3477 if (flags & OEP_PURE_SAME)
3478 cef &= ECF_CONST | ECF_PURE;
3479 else
3480 cef &= ECF_CONST;
3481 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3482 return false;
3485 /* Now see if all the arguments are the same. */
3487 const_call_expr_arg_iterator iter0, iter1;
3488 const_tree a0, a1;
3489 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3490 a1 = first_const_call_expr_arg (arg1, &iter1);
3491 a0 && a1;
3492 a0 = next_const_call_expr_arg (&iter0),
3493 a1 = next_const_call_expr_arg (&iter1))
3494 if (! operand_equal_p (a0, a1, flags))
3495 return false;
3497 /* If we get here and both argument lists are exhausted
3498 then the CALL_EXPRs are equal. */
3499 return ! (a0 || a1);
3501 default:
3502 return false;
3505 case tcc_declaration:
3506 /* Consider __builtin_sqrt equal to sqrt. */
3507 return (TREE_CODE (arg0) == FUNCTION_DECL
3508 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3509 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3510 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3511 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3513 case tcc_exceptional:
3514 if (TREE_CODE (arg0) == CONSTRUCTOR)
3516 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3517 return false;
3519 /* In GIMPLE constructors are used only to build vectors from
3520 elements. Individual elements in the constructor must be
3521 indexed in increasing order and form an initial sequence.
3523 We make no effort to compare constructors in generic.
3524 (see sem_variable::equals in ipa-icf which can do so for
3525 constants). */
3526 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3527 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3528 return false;
3530 /* Be sure that vectors constructed have the same representation.
3531 We only tested element precision and modes to match.
3532 Vectors may be BLKmode and thus also check that the number of
3533 parts match. */
3534 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3535 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3536 return false;
3538 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3539 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3540 unsigned int len = vec_safe_length (v0);
3542 if (len != vec_safe_length (v1))
3543 return false;
3545 for (unsigned int i = 0; i < len; i++)
3547 constructor_elt *c0 = &(*v0)[i];
3548 constructor_elt *c1 = &(*v1)[i];
3550 if (!operand_equal_p (c0->value, c1->value, flags)
3551 /* In GIMPLE the indexes can be either NULL or matching i.
3552 Double check this so we won't get false
3553 positives for GENERIC. */
3554 || (c0->index
3555 && (TREE_CODE (c0->index) != INTEGER_CST
3556 || compare_tree_int (c0->index, i)))
3557 || (c1->index
3558 && (TREE_CODE (c1->index) != INTEGER_CST
3559 || compare_tree_int (c1->index, i))))
3560 return false;
3562 return true;
3564 else if (TREE_CODE (arg0) == STATEMENT_LIST
3565 && (flags & OEP_LEXICOGRAPHIC))
3567 /* Compare the STATEMENT_LISTs. */
3568 tree_stmt_iterator tsi1, tsi2;
3569 tree body1 = CONST_CAST_TREE (arg0);
3570 tree body2 = CONST_CAST_TREE (arg1);
3571 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3572 tsi_next (&tsi1), tsi_next (&tsi2))
3574 /* The lists don't have the same number of statements. */
3575 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3576 return false;
3577 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3578 return true;
3579 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3580 flags & (OEP_LEXICOGRAPHIC
3581 | OEP_NO_HASH_CHECK)))
3582 return false;
3585 return false;
3587 case tcc_statement:
3588 switch (TREE_CODE (arg0))
3590 case RETURN_EXPR:
3591 if (flags & OEP_LEXICOGRAPHIC)
3592 return OP_SAME_WITH_NULL (0);
3593 return false;
3594 case DEBUG_BEGIN_STMT:
3595 if (flags & OEP_LEXICOGRAPHIC)
3596 return true;
3597 return false;
3598 default:
3599 return false;
3602 default:
3603 return false;
3606 #undef OP_SAME
3607 #undef OP_SAME_WITH_NULL
3610 /* Generate a hash value for an expression. This can be used iteratively
3611 by passing a previous result as the HSTATE argument. */
3613 void
3614 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3615 unsigned int flags)
3617 int i;
3618 enum tree_code code;
3619 enum tree_code_class tclass;
3621 if (t == NULL_TREE || t == error_mark_node)
3623 hstate.merge_hash (0);
3624 return;
3627 STRIP_ANY_LOCATION_WRAPPER (t);
3629 if (!(flags & OEP_ADDRESS_OF))
3630 STRIP_NOPS (t);
3632 code = TREE_CODE (t);
3634 switch (code)
3636 /* Alas, constants aren't shared, so we can't rely on pointer
3637 identity. */
3638 case VOID_CST:
3639 hstate.merge_hash (0);
3640 return;
3641 case INTEGER_CST:
3642 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3643 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3644 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3645 return;
3646 case REAL_CST:
3648 unsigned int val2;
3649 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3650 val2 = rvc_zero;
3651 else
3652 val2 = real_hash (TREE_REAL_CST_PTR (t));
3653 hstate.merge_hash (val2);
3654 return;
3656 case FIXED_CST:
3658 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3659 hstate.merge_hash (val2);
3660 return;
3662 case STRING_CST:
3663 hstate.add ((const void *) TREE_STRING_POINTER (t),
3664 TREE_STRING_LENGTH (t));
3665 return;
3666 case COMPLEX_CST:
3667 hash_operand (TREE_REALPART (t), hstate, flags);
3668 hash_operand (TREE_IMAGPART (t), hstate, flags);
3669 return;
3670 case VECTOR_CST:
3672 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3673 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3674 unsigned int count = vector_cst_encoded_nelts (t);
3675 for (unsigned int i = 0; i < count; ++i)
3676 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3677 return;
3679 case SSA_NAME:
3680 /* We can just compare by pointer. */
3681 hstate.add_hwi (SSA_NAME_VERSION (t));
3682 return;
3683 case PLACEHOLDER_EXPR:
3684 /* The node itself doesn't matter. */
3685 return;
3686 case BLOCK:
3687 case OMP_CLAUSE:
3688 /* Ignore. */
3689 return;
3690 case TREE_LIST:
3691 /* A list of expressions, for a CALL_EXPR or as the elements of a
3692 VECTOR_CST. */
3693 for (; t; t = TREE_CHAIN (t))
3694 hash_operand (TREE_VALUE (t), hstate, flags);
3695 return;
3696 case CONSTRUCTOR:
3698 unsigned HOST_WIDE_INT idx;
3699 tree field, value;
3700 flags &= ~OEP_ADDRESS_OF;
3701 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3702 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3704 /* In GIMPLE the indexes can be either NULL or matching i. */
3705 if (field == NULL_TREE)
3706 field = bitsize_int (idx);
3707 hash_operand (field, hstate, flags);
3708 hash_operand (value, hstate, flags);
3710 return;
3712 case STATEMENT_LIST:
3714 tree_stmt_iterator i;
3715 for (i = tsi_start (CONST_CAST_TREE (t));
3716 !tsi_end_p (i); tsi_next (&i))
3717 hash_operand (tsi_stmt (i), hstate, flags);
3718 return;
3720 case TREE_VEC:
3721 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3722 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3723 return;
3724 case IDENTIFIER_NODE:
3725 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3726 return;
3727 case FUNCTION_DECL:
3728 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3729 Otherwise nodes that compare equal according to operand_equal_p might
3730 get different hash codes. However, don't do this for machine specific
3731 or front end builtins, since the function code is overloaded in those
3732 cases. */
3733 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3734 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3736 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3737 code = TREE_CODE (t);
3739 /* FALL THROUGH */
3740 default:
3741 if (POLY_INT_CST_P (t))
3743 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3744 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3745 return;
3747 tclass = TREE_CODE_CLASS (code);
3749 if (tclass == tcc_declaration)
3751 /* DECL's have a unique ID */
3752 hstate.add_hwi (DECL_UID (t));
3754 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3756 /* For comparisons that can be swapped, use the lower
3757 tree code. */
3758 enum tree_code ccode = swap_tree_comparison (code);
3759 if (code < ccode)
3760 ccode = code;
3761 hstate.add_object (ccode);
3762 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3763 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3765 else if (CONVERT_EXPR_CODE_P (code))
3767 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3768 operand_equal_p. */
3769 enum tree_code ccode = NOP_EXPR;
3770 hstate.add_object (ccode);
3772 /* Don't hash the type, that can lead to having nodes which
3773 compare equal according to operand_equal_p, but which
3774 have different hash codes. Make sure to include signedness
3775 in the hash computation. */
3776 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3777 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3779 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3780 else if (code == MEM_REF
3781 && (flags & OEP_ADDRESS_OF) != 0
3782 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3783 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3784 && integer_zerop (TREE_OPERAND (t, 1)))
3785 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3786 hstate, flags);
3787 /* Don't ICE on FE specific trees, or their arguments etc.
3788 during operand_equal_p hash verification. */
3789 else if (!IS_EXPR_CODE_CLASS (tclass))
3790 gcc_assert (flags & OEP_HASH_CHECK);
3791 else
3793 unsigned int sflags = flags;
3795 hstate.add_object (code);
3797 switch (code)
3799 case ADDR_EXPR:
3800 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3801 flags |= OEP_ADDRESS_OF;
3802 sflags = flags;
3803 break;
3805 case INDIRECT_REF:
3806 case MEM_REF:
3807 case TARGET_MEM_REF:
3808 flags &= ~OEP_ADDRESS_OF;
3809 sflags = flags;
3810 break;
3812 case COMPONENT_REF:
3813 if (sflags & OEP_ADDRESS_OF)
3815 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3816 if (TREE_OPERAND (t, 2))
3817 hash_operand (TREE_OPERAND (t, 2), hstate,
3818 flags & ~OEP_ADDRESS_OF);
3819 else
3821 tree field = TREE_OPERAND (t, 1);
3822 hash_operand (DECL_FIELD_OFFSET (field),
3823 hstate, flags & ~OEP_ADDRESS_OF);
3824 hash_operand (DECL_FIELD_BIT_OFFSET (field),
3825 hstate, flags & ~OEP_ADDRESS_OF);
3827 return;
3829 break;
3830 case ARRAY_REF:
3831 case ARRAY_RANGE_REF:
3832 case BIT_FIELD_REF:
3833 sflags &= ~OEP_ADDRESS_OF;
3834 break;
3836 case COND_EXPR:
3837 flags &= ~OEP_ADDRESS_OF;
3838 break;
3840 case WIDEN_MULT_PLUS_EXPR:
3841 case WIDEN_MULT_MINUS_EXPR:
3843 /* The multiplication operands are commutative. */
3844 inchash::hash one, two;
3845 hash_operand (TREE_OPERAND (t, 0), one, flags);
3846 hash_operand (TREE_OPERAND (t, 1), two, flags);
3847 hstate.add_commutative (one, two);
3848 hash_operand (TREE_OPERAND (t, 2), two, flags);
3849 return;
3852 case CALL_EXPR:
3853 if (CALL_EXPR_FN (t) == NULL_TREE)
3854 hstate.add_int (CALL_EXPR_IFN (t));
3855 break;
3857 case TARGET_EXPR:
3858 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3859 Usually different TARGET_EXPRs just should use
3860 different temporaries in their slots. */
3861 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3862 return;
3864 case OBJ_TYPE_REF:
3865 /* Virtual table reference. */
3866 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3867 flags &= ~OEP_ADDRESS_OF;
3868 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3869 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3870 if (!virtual_method_call_p (t))
3871 return;
3872 if (tree c = obj_type_ref_class (t))
3874 c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3875 /* We compute mangled names only when free_lang_data is run.
3876 In that case we can hash precisely. */
3877 if (TREE_CODE (c) == TYPE_DECL
3878 && DECL_ASSEMBLER_NAME_SET_P (c))
3879 hstate.add_object
3880 (IDENTIFIER_HASH_VALUE
3881 (DECL_ASSEMBLER_NAME (c)));
3883 return;
3884 default:
3885 break;
3888 /* Don't hash the type, that can lead to having nodes which
3889 compare equal according to operand_equal_p, but which
3890 have different hash codes. */
3891 if (code == NON_LVALUE_EXPR)
3893 /* Make sure to include signness in the hash computation. */
3894 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3895 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3898 else if (commutative_tree_code (code))
3900 /* It's a commutative expression. We want to hash it the same
3901 however it appears. We do this by first hashing both operands
3902 and then rehashing based on the order of their independent
3903 hashes. */
3904 inchash::hash one, two;
3905 hash_operand (TREE_OPERAND (t, 0), one, flags);
3906 hash_operand (TREE_OPERAND (t, 1), two, flags);
3907 hstate.add_commutative (one, two);
3909 else
3910 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3911 hash_operand (TREE_OPERAND (t, i), hstate,
3912 i == 0 ? flags : sflags);
3914 return;
3918 bool
3919 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3920 unsigned int flags, bool *ret)
3922 /* When checking, verify at the outermost operand_equal_p call that
3923 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3924 hash value. */
3925 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3927 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3929 if (arg0 != arg1)
3931 inchash::hash hstate0 (0), hstate1 (0);
3932 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3933 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3934 hashval_t h0 = hstate0.end ();
3935 hashval_t h1 = hstate1.end ();
3936 gcc_assert (h0 == h1);
3938 *ret = true;
3940 else
3941 *ret = false;
3943 return true;
3946 return false;
3950 static operand_compare default_compare_instance;
3952 /* Conveinece wrapper around operand_compare class because usually we do
3953 not need to play with the valueizer. */
3955 bool
3956 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3958 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3961 namespace inchash
3964 /* Generate a hash value for an expression. This can be used iteratively
3965 by passing a previous result as the HSTATE argument.
3967 This function is intended to produce the same hash for expressions which
3968 would compare equal using operand_equal_p. */
3969 void
3970 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3972 default_compare_instance.hash_operand (t, hstate, flags);
3977 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3978 with a different signedness or a narrower precision. */
3980 static bool
3981 operand_equal_for_comparison_p (tree arg0, tree arg1)
3983 if (operand_equal_p (arg0, arg1, 0))
3984 return true;
3986 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3987 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3988 return false;
3990 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3991 and see if the inner values are the same. This removes any
3992 signedness comparison, which doesn't matter here. */
3993 tree op0 = arg0;
3994 tree op1 = arg1;
3995 STRIP_NOPS (op0);
3996 STRIP_NOPS (op1);
3997 if (operand_equal_p (op0, op1, 0))
3998 return true;
4000 /* Discard a single widening conversion from ARG1 and see if the inner
4001 value is the same as ARG0. */
4002 if (CONVERT_EXPR_P (arg1)
4003 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4004 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4005 < TYPE_PRECISION (TREE_TYPE (arg1))
4006 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4007 return true;
4009 return false;
4012 /* See if ARG is an expression that is either a comparison or is performing
4013 arithmetic on comparisons. The comparisons must only be comparing
4014 two different values, which will be stored in *CVAL1 and *CVAL2; if
4015 they are nonzero it means that some operands have already been found.
4016 No variables may be used anywhere else in the expression except in the
4017 comparisons.
4019 If this is true, return 1. Otherwise, return zero. */
4021 static bool
4022 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4024 enum tree_code code = TREE_CODE (arg);
4025 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4027 /* We can handle some of the tcc_expression cases here. */
4028 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4029 tclass = tcc_unary;
4030 else if (tclass == tcc_expression
4031 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4032 || code == COMPOUND_EXPR))
4033 tclass = tcc_binary;
4035 switch (tclass)
4037 case tcc_unary:
4038 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4040 case tcc_binary:
4041 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4042 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4044 case tcc_constant:
4045 return true;
4047 case tcc_expression:
4048 if (code == COND_EXPR)
4049 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4050 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4051 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4052 return false;
4054 case tcc_comparison:
4055 /* First see if we can handle the first operand, then the second. For
4056 the second operand, we know *CVAL1 can't be zero. It must be that
4057 one side of the comparison is each of the values; test for the
4058 case where this isn't true by failing if the two operands
4059 are the same. */
4061 if (operand_equal_p (TREE_OPERAND (arg, 0),
4062 TREE_OPERAND (arg, 1), 0))
4063 return false;
4065 if (*cval1 == 0)
4066 *cval1 = TREE_OPERAND (arg, 0);
4067 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4069 else if (*cval2 == 0)
4070 *cval2 = TREE_OPERAND (arg, 0);
4071 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4073 else
4074 return false;
4076 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4078 else if (*cval2 == 0)
4079 *cval2 = TREE_OPERAND (arg, 1);
4080 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4082 else
4083 return false;
4085 return true;
4087 default:
4088 return false;
4092 /* ARG is a tree that is known to contain just arithmetic operations and
4093 comparisons. Evaluate the operations in the tree substituting NEW0 for
4094 any occurrence of OLD0 as an operand of a comparison and likewise for
4095 NEW1 and OLD1. */
4097 static tree
4098 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4099 tree old1, tree new1)
4101 tree type = TREE_TYPE (arg);
4102 enum tree_code code = TREE_CODE (arg);
4103 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4105 /* We can handle some of the tcc_expression cases here. */
4106 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4107 tclass = tcc_unary;
4108 else if (tclass == tcc_expression
4109 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4110 tclass = tcc_binary;
4112 switch (tclass)
4114 case tcc_unary:
4115 return fold_build1_loc (loc, code, type,
4116 eval_subst (loc, TREE_OPERAND (arg, 0),
4117 old0, new0, old1, new1));
4119 case tcc_binary:
4120 return fold_build2_loc (loc, code, type,
4121 eval_subst (loc, TREE_OPERAND (arg, 0),
4122 old0, new0, old1, new1),
4123 eval_subst (loc, TREE_OPERAND (arg, 1),
4124 old0, new0, old1, new1));
4126 case tcc_expression:
4127 switch (code)
4129 case SAVE_EXPR:
4130 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4131 old1, new1);
4133 case COMPOUND_EXPR:
4134 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4135 old1, new1);
4137 case COND_EXPR:
4138 return fold_build3_loc (loc, code, type,
4139 eval_subst (loc, TREE_OPERAND (arg, 0),
4140 old0, new0, old1, new1),
4141 eval_subst (loc, TREE_OPERAND (arg, 1),
4142 old0, new0, old1, new1),
4143 eval_subst (loc, TREE_OPERAND (arg, 2),
4144 old0, new0, old1, new1));
4145 default:
4146 break;
4148 /* Fall through - ??? */
4150 case tcc_comparison:
4152 tree arg0 = TREE_OPERAND (arg, 0);
4153 tree arg1 = TREE_OPERAND (arg, 1);
4155 /* We need to check both for exact equality and tree equality. The
4156 former will be true if the operand has a side-effect. In that
4157 case, we know the operand occurred exactly once. */
4159 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4160 arg0 = new0;
4161 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4162 arg0 = new1;
4164 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4165 arg1 = new0;
4166 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4167 arg1 = new1;
4169 return fold_build2_loc (loc, code, type, arg0, arg1);
4172 default:
4173 return arg;
4177 /* Return a tree for the case when the result of an expression is RESULT
4178 converted to TYPE and OMITTED was previously an operand of the expression
4179 but is now not needed (e.g., we folded OMITTED * 0).
4181 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4182 the conversion of RESULT to TYPE. */
4184 tree
4185 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4187 tree t = fold_convert_loc (loc, type, result);
4189 /* If the resulting operand is an empty statement, just return the omitted
4190 statement casted to void. */
4191 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4192 return build1_loc (loc, NOP_EXPR, void_type_node,
4193 fold_ignored_result (omitted));
4195 if (TREE_SIDE_EFFECTS (omitted))
4196 return build2_loc (loc, COMPOUND_EXPR, type,
4197 fold_ignored_result (omitted), t);
4199 return non_lvalue_loc (loc, t);
4202 /* Return a tree for the case when the result of an expression is RESULT
4203 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4204 of the expression but are now not needed.
4206 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4207 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4208 evaluated before OMITTED2. Otherwise, if neither has side effects,
4209 just do the conversion of RESULT to TYPE. */
4211 tree
4212 omit_two_operands_loc (location_t loc, tree type, tree result,
4213 tree omitted1, tree omitted2)
4215 tree t = fold_convert_loc (loc, type, result);
4217 if (TREE_SIDE_EFFECTS (omitted2))
4218 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4219 if (TREE_SIDE_EFFECTS (omitted1))
4220 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4222 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4226 /* Return a simplified tree node for the truth-negation of ARG. This
4227 never alters ARG itself. We assume that ARG is an operation that
4228 returns a truth value (0 or 1).
4230 FIXME: one would think we would fold the result, but it causes
4231 problems with the dominator optimizer. */
4233 static tree
4234 fold_truth_not_expr (location_t loc, tree arg)
4236 tree type = TREE_TYPE (arg);
4237 enum tree_code code = TREE_CODE (arg);
4238 location_t loc1, loc2;
4240 /* If this is a comparison, we can simply invert it, except for
4241 floating-point non-equality comparisons, in which case we just
4242 enclose a TRUTH_NOT_EXPR around what we have. */
4244 if (TREE_CODE_CLASS (code) == tcc_comparison)
4246 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4247 if (FLOAT_TYPE_P (op_type)
4248 && flag_trapping_math
4249 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4250 && code != NE_EXPR && code != EQ_EXPR)
4251 return NULL_TREE;
4253 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4254 if (code == ERROR_MARK)
4255 return NULL_TREE;
4257 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4258 TREE_OPERAND (arg, 1));
4259 if (TREE_NO_WARNING (arg))
4260 TREE_NO_WARNING (ret) = 1;
4261 return ret;
4264 switch (code)
4266 case INTEGER_CST:
4267 return constant_boolean_node (integer_zerop (arg), type);
4269 case TRUTH_AND_EXPR:
4270 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4271 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4272 return build2_loc (loc, TRUTH_OR_EXPR, type,
4273 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4274 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4276 case TRUTH_OR_EXPR:
4277 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4278 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4279 return build2_loc (loc, TRUTH_AND_EXPR, type,
4280 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4281 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4283 case TRUTH_XOR_EXPR:
4284 /* Here we can invert either operand. We invert the first operand
4285 unless the second operand is a TRUTH_NOT_EXPR in which case our
4286 result is the XOR of the first operand with the inside of the
4287 negation of the second operand. */
4289 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4290 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4291 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4292 else
4293 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4294 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4295 TREE_OPERAND (arg, 1));
4297 case TRUTH_ANDIF_EXPR:
4298 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4299 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4300 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4301 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4302 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4304 case TRUTH_ORIF_EXPR:
4305 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4306 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4307 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4308 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4309 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4311 case TRUTH_NOT_EXPR:
4312 return TREE_OPERAND (arg, 0);
4314 case COND_EXPR:
4316 tree arg1 = TREE_OPERAND (arg, 1);
4317 tree arg2 = TREE_OPERAND (arg, 2);
4319 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4320 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4322 /* A COND_EXPR may have a throw as one operand, which
4323 then has void type. Just leave void operands
4324 as they are. */
4325 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4326 VOID_TYPE_P (TREE_TYPE (arg1))
4327 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4328 VOID_TYPE_P (TREE_TYPE (arg2))
4329 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4332 case COMPOUND_EXPR:
4333 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4334 return build2_loc (loc, COMPOUND_EXPR, type,
4335 TREE_OPERAND (arg, 0),
4336 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4338 case NON_LVALUE_EXPR:
4339 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4340 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4342 CASE_CONVERT:
4343 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4344 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4346 /* fall through */
4348 case FLOAT_EXPR:
4349 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4350 return build1_loc (loc, TREE_CODE (arg), type,
4351 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4353 case BIT_AND_EXPR:
4354 if (!integer_onep (TREE_OPERAND (arg, 1)))
4355 return NULL_TREE;
4356 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4358 case SAVE_EXPR:
4359 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4361 case CLEANUP_POINT_EXPR:
4362 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4363 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4364 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4366 default:
4367 return NULL_TREE;
4371 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4372 assume that ARG is an operation that returns a truth value (0 or 1
4373 for scalars, 0 or -1 for vectors). Return the folded expression if
4374 folding is successful. Otherwise, return NULL_TREE. */
4376 static tree
4377 fold_invert_truthvalue (location_t loc, tree arg)
4379 tree type = TREE_TYPE (arg);
4380 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4381 ? BIT_NOT_EXPR
4382 : TRUTH_NOT_EXPR,
4383 type, arg);
4386 /* Return a simplified tree node for the truth-negation of ARG. This
4387 never alters ARG itself. We assume that ARG is an operation that
4388 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4390 tree
4391 invert_truthvalue_loc (location_t loc, tree arg)
4393 if (TREE_CODE (arg) == ERROR_MARK)
4394 return arg;
4396 tree type = TREE_TYPE (arg);
4397 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4398 ? BIT_NOT_EXPR
4399 : TRUTH_NOT_EXPR,
4400 type, arg);
4403 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4404 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4405 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4406 is the original memory reference used to preserve the alias set of
4407 the access. */
4409 static tree
4410 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4411 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4412 int unsignedp, int reversep)
4414 tree result, bftype;
4416 /* Attempt not to lose the access path if possible. */
4417 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4419 tree ninner = TREE_OPERAND (orig_inner, 0);
4420 machine_mode nmode;
4421 poly_int64 nbitsize, nbitpos;
4422 tree noffset;
4423 int nunsignedp, nreversep, nvolatilep = 0;
4424 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4425 &noffset, &nmode, &nunsignedp,
4426 &nreversep, &nvolatilep);
4427 if (base == inner
4428 && noffset == NULL_TREE
4429 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4430 && !reversep
4431 && !nreversep
4432 && !nvolatilep)
4434 inner = ninner;
4435 bitpos -= nbitpos;
4439 alias_set_type iset = get_alias_set (orig_inner);
4440 if (iset == 0 && get_alias_set (inner) != iset)
4441 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4442 build_fold_addr_expr (inner),
4443 build_int_cst (ptr_type_node, 0));
4445 if (known_eq (bitpos, 0) && !reversep)
4447 tree size = TYPE_SIZE (TREE_TYPE (inner));
4448 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4449 || POINTER_TYPE_P (TREE_TYPE (inner)))
4450 && tree_fits_shwi_p (size)
4451 && tree_to_shwi (size) == bitsize)
4452 return fold_convert_loc (loc, type, inner);
4455 bftype = type;
4456 if (TYPE_PRECISION (bftype) != bitsize
4457 || TYPE_UNSIGNED (bftype) == !unsignedp)
4458 bftype = build_nonstandard_integer_type (bitsize, 0);
4460 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4461 bitsize_int (bitsize), bitsize_int (bitpos));
4462 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4464 if (bftype != type)
4465 result = fold_convert_loc (loc, type, result);
4467 return result;
4470 /* Optimize a bit-field compare.
4472 There are two cases: First is a compare against a constant and the
4473 second is a comparison of two items where the fields are at the same
4474 bit position relative to the start of a chunk (byte, halfword, word)
4475 large enough to contain it. In these cases we can avoid the shift
4476 implicit in bitfield extractions.
4478 For constants, we emit a compare of the shifted constant with the
4479 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4480 compared. For two fields at the same position, we do the ANDs with the
4481 similar mask and compare the result of the ANDs.
4483 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4484 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4485 are the left and right operands of the comparison, respectively.
4487 If the optimization described above can be done, we return the resulting
4488 tree. Otherwise we return zero. */
4490 static tree
4491 optimize_bit_field_compare (location_t loc, enum tree_code code,
4492 tree compare_type, tree lhs, tree rhs)
4494 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4495 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4496 tree type = TREE_TYPE (lhs);
4497 tree unsigned_type;
4498 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4499 machine_mode lmode, rmode;
4500 scalar_int_mode nmode;
4501 int lunsignedp, runsignedp;
4502 int lreversep, rreversep;
4503 int lvolatilep = 0, rvolatilep = 0;
4504 tree linner, rinner = NULL_TREE;
4505 tree mask;
4506 tree offset;
4508 /* Get all the information about the extractions being done. If the bit size
4509 is the same as the size of the underlying object, we aren't doing an
4510 extraction at all and so can do nothing. We also don't want to
4511 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4512 then will no longer be able to replace it. */
4513 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4514 &lunsignedp, &lreversep, &lvolatilep);
4515 if (linner == lhs
4516 || !known_size_p (plbitsize)
4517 || !plbitsize.is_constant (&lbitsize)
4518 || !plbitpos.is_constant (&lbitpos)
4519 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4520 || offset != 0
4521 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4522 || lvolatilep)
4523 return 0;
4525 if (const_p)
4526 rreversep = lreversep;
4527 else
4529 /* If this is not a constant, we can only do something if bit positions,
4530 sizes, signedness and storage order are the same. */
4531 rinner
4532 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4533 &runsignedp, &rreversep, &rvolatilep);
4535 if (rinner == rhs
4536 || maybe_ne (lbitpos, rbitpos)
4537 || maybe_ne (lbitsize, rbitsize)
4538 || lunsignedp != runsignedp
4539 || lreversep != rreversep
4540 || offset != 0
4541 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4542 || rvolatilep)
4543 return 0;
4546 /* Honor the C++ memory model and mimic what RTL expansion does. */
4547 poly_uint64 bitstart = 0;
4548 poly_uint64 bitend = 0;
4549 if (TREE_CODE (lhs) == COMPONENT_REF)
4551 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4552 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4553 return 0;
4556 /* See if we can find a mode to refer to this field. We should be able to,
4557 but fail if we can't. */
4558 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4559 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4560 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4561 TYPE_ALIGN (TREE_TYPE (rinner))),
4562 BITS_PER_WORD, false, &nmode))
4563 return 0;
4565 /* Set signed and unsigned types of the precision of this mode for the
4566 shifts below. */
4567 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4569 /* Compute the bit position and size for the new reference and our offset
4570 within it. If the new reference is the same size as the original, we
4571 won't optimize anything, so return zero. */
4572 nbitsize = GET_MODE_BITSIZE (nmode);
4573 nbitpos = lbitpos & ~ (nbitsize - 1);
4574 lbitpos -= nbitpos;
4575 if (nbitsize == lbitsize)
4576 return 0;
4578 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4579 lbitpos = nbitsize - lbitsize - lbitpos;
4581 /* Make the mask to be used against the extracted field. */
4582 mask = build_int_cst_type (unsigned_type, -1);
4583 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4584 mask = const_binop (RSHIFT_EXPR, mask,
4585 size_int (nbitsize - lbitsize - lbitpos));
4587 if (! const_p)
4589 if (nbitpos < 0)
4590 return 0;
4592 /* If not comparing with constant, just rework the comparison
4593 and return. */
4594 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4595 nbitsize, nbitpos, 1, lreversep);
4596 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4597 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4598 nbitsize, nbitpos, 1, rreversep);
4599 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4600 return fold_build2_loc (loc, code, compare_type, t1, t2);
4603 /* Otherwise, we are handling the constant case. See if the constant is too
4604 big for the field. Warn and return a tree for 0 (false) if so. We do
4605 this not only for its own sake, but to avoid having to test for this
4606 error case below. If we didn't, we might generate wrong code.
4608 For unsigned fields, the constant shifted right by the field length should
4609 be all zero. For signed fields, the high-order bits should agree with
4610 the sign bit. */
4612 if (lunsignedp)
4614 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4616 warning (0, "comparison is always %d due to width of bit-field",
4617 code == NE_EXPR);
4618 return constant_boolean_node (code == NE_EXPR, compare_type);
4621 else
4623 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4624 if (tem != 0 && tem != -1)
4626 warning (0, "comparison is always %d due to width of bit-field",
4627 code == NE_EXPR);
4628 return constant_boolean_node (code == NE_EXPR, compare_type);
4632 if (nbitpos < 0)
4633 return 0;
4635 /* Single-bit compares should always be against zero. */
4636 if (lbitsize == 1 && ! integer_zerop (rhs))
4638 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4639 rhs = build_int_cst (type, 0);
4642 /* Make a new bitfield reference, shift the constant over the
4643 appropriate number of bits and mask it with the computed mask
4644 (in case this was a signed field). If we changed it, make a new one. */
4645 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4646 nbitsize, nbitpos, 1, lreversep);
4648 rhs = const_binop (BIT_AND_EXPR,
4649 const_binop (LSHIFT_EXPR,
4650 fold_convert_loc (loc, unsigned_type, rhs),
4651 size_int (lbitpos)),
4652 mask);
4654 lhs = build2_loc (loc, code, compare_type,
4655 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4656 return lhs;
4659 /* Subroutine for fold_truth_andor_1: decode a field reference.
4661 If EXP is a comparison reference, we return the innermost reference.
4663 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4664 set to the starting bit number.
4666 If the innermost field can be completely contained in a mode-sized
4667 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4669 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4670 otherwise it is not changed.
4672 *PUNSIGNEDP is set to the signedness of the field.
4674 *PREVERSEP is set to the storage order of the field.
4676 *PMASK is set to the mask used. This is either contained in a
4677 BIT_AND_EXPR or derived from the width of the field.
4679 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4681 Return 0 if this is not a component reference or is one that we can't
4682 do anything with. */
4684 static tree
4685 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4686 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4687 int *punsignedp, int *preversep, int *pvolatilep,
4688 tree *pmask, tree *pand_mask)
4690 tree exp = *exp_;
4691 tree outer_type = 0;
4692 tree and_mask = 0;
4693 tree mask, inner, offset;
4694 tree unsigned_type;
4695 unsigned int precision;
4697 /* All the optimizations using this function assume integer fields.
4698 There are problems with FP fields since the type_for_size call
4699 below can fail for, e.g., XFmode. */
4700 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4701 return NULL_TREE;
4703 /* We are interested in the bare arrangement of bits, so strip everything
4704 that doesn't affect the machine mode. However, record the type of the
4705 outermost expression if it may matter below. */
4706 if (CONVERT_EXPR_P (exp)
4707 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4708 outer_type = TREE_TYPE (exp);
4709 STRIP_NOPS (exp);
4711 if (TREE_CODE (exp) == BIT_AND_EXPR)
4713 and_mask = TREE_OPERAND (exp, 1);
4714 exp = TREE_OPERAND (exp, 0);
4715 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4716 if (TREE_CODE (and_mask) != INTEGER_CST)
4717 return NULL_TREE;
4720 poly_int64 poly_bitsize, poly_bitpos;
4721 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4722 pmode, punsignedp, preversep, pvolatilep);
4723 if ((inner == exp && and_mask == 0)
4724 || !poly_bitsize.is_constant (pbitsize)
4725 || !poly_bitpos.is_constant (pbitpos)
4726 || *pbitsize < 0
4727 || offset != 0
4728 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4729 /* Reject out-of-bound accesses (PR79731). */
4730 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4731 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4732 *pbitpos + *pbitsize) < 0))
4733 return NULL_TREE;
4735 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4736 if (unsigned_type == NULL_TREE)
4737 return NULL_TREE;
4739 *exp_ = exp;
4741 /* If the number of bits in the reference is the same as the bitsize of
4742 the outer type, then the outer type gives the signedness. Otherwise
4743 (in case of a small bitfield) the signedness is unchanged. */
4744 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4745 *punsignedp = TYPE_UNSIGNED (outer_type);
4747 /* Compute the mask to access the bitfield. */
4748 precision = TYPE_PRECISION (unsigned_type);
4750 mask = build_int_cst_type (unsigned_type, -1);
4752 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4753 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4755 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4756 if (and_mask != 0)
4757 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4758 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4760 *pmask = mask;
4761 *pand_mask = and_mask;
4762 return inner;
4765 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4766 bit positions and MASK is SIGNED. */
4768 static bool
4769 all_ones_mask_p (const_tree mask, unsigned int size)
4771 tree type = TREE_TYPE (mask);
4772 unsigned int precision = TYPE_PRECISION (type);
4774 /* If this function returns true when the type of the mask is
4775 UNSIGNED, then there will be errors. In particular see
4776 gcc.c-torture/execute/990326-1.c. There does not appear to be
4777 any documentation paper trail as to why this is so. But the pre
4778 wide-int worked with that restriction and it has been preserved
4779 here. */
4780 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4781 return false;
4783 return wi::mask (size, false, precision) == wi::to_wide (mask);
4786 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4787 represents the sign bit of EXP's type. If EXP represents a sign
4788 or zero extension, also test VAL against the unextended type.
4789 The return value is the (sub)expression whose sign bit is VAL,
4790 or NULL_TREE otherwise. */
4792 tree
4793 sign_bit_p (tree exp, const_tree val)
4795 int width;
4796 tree t;
4798 /* Tree EXP must have an integral type. */
4799 t = TREE_TYPE (exp);
4800 if (! INTEGRAL_TYPE_P (t))
4801 return NULL_TREE;
4803 /* Tree VAL must be an integer constant. */
4804 if (TREE_CODE (val) != INTEGER_CST
4805 || TREE_OVERFLOW (val))
4806 return NULL_TREE;
4808 width = TYPE_PRECISION (t);
4809 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4810 return exp;
4812 /* Handle extension from a narrower type. */
4813 if (TREE_CODE (exp) == NOP_EXPR
4814 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4815 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4817 return NULL_TREE;
4820 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4821 to be evaluated unconditionally. */
4823 static bool
4824 simple_operand_p (const_tree exp)
4826 /* Strip any conversions that don't change the machine mode. */
4827 STRIP_NOPS (exp);
4829 return (CONSTANT_CLASS_P (exp)
4830 || TREE_CODE (exp) == SSA_NAME
4831 || (DECL_P (exp)
4832 && ! TREE_ADDRESSABLE (exp)
4833 && ! TREE_THIS_VOLATILE (exp)
4834 && ! DECL_NONLOCAL (exp)
4835 /* Don't regard global variables as simple. They may be
4836 allocated in ways unknown to the compiler (shared memory,
4837 #pragma weak, etc). */
4838 && ! TREE_PUBLIC (exp)
4839 && ! DECL_EXTERNAL (exp)
4840 /* Weakrefs are not safe to be read, since they can be NULL.
4841 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4842 have DECL_WEAK flag set. */
4843 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4844 /* Loading a static variable is unduly expensive, but global
4845 registers aren't expensive. */
4846 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4849 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4850 to be evaluated unconditionally.
4851 I addition to simple_operand_p, we assume that comparisons, conversions,
4852 and logic-not operations are simple, if their operands are simple, too. */
4854 static bool
4855 simple_operand_p_2 (tree exp)
4857 enum tree_code code;
4859 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4860 return false;
4862 while (CONVERT_EXPR_P (exp))
4863 exp = TREE_OPERAND (exp, 0);
4865 code = TREE_CODE (exp);
4867 if (TREE_CODE_CLASS (code) == tcc_comparison)
4868 return (simple_operand_p (TREE_OPERAND (exp, 0))
4869 && simple_operand_p (TREE_OPERAND (exp, 1)));
4871 if (code == TRUTH_NOT_EXPR)
4872 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4874 return simple_operand_p (exp);
4878 /* The following functions are subroutines to fold_range_test and allow it to
4879 try to change a logical combination of comparisons into a range test.
4881 For example, both
4882 X == 2 || X == 3 || X == 4 || X == 5
4884 X >= 2 && X <= 5
4885 are converted to
4886 (unsigned) (X - 2) <= 3
4888 We describe each set of comparisons as being either inside or outside
4889 a range, using a variable named like IN_P, and then describe the
4890 range with a lower and upper bound. If one of the bounds is omitted,
4891 it represents either the highest or lowest value of the type.
4893 In the comments below, we represent a range by two numbers in brackets
4894 preceded by a "+" to designate being inside that range, or a "-" to
4895 designate being outside that range, so the condition can be inverted by
4896 flipping the prefix. An omitted bound is represented by a "-". For
4897 example, "- [-, 10]" means being outside the range starting at the lowest
4898 possible value and ending at 10, in other words, being greater than 10.
4899 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4900 always false.
4902 We set up things so that the missing bounds are handled in a consistent
4903 manner so neither a missing bound nor "true" and "false" need to be
4904 handled using a special case. */
4906 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4907 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4908 and UPPER1_P are nonzero if the respective argument is an upper bound
4909 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4910 must be specified for a comparison. ARG1 will be converted to ARG0's
4911 type if both are specified. */
4913 static tree
4914 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4915 tree arg1, int upper1_p)
4917 tree tem;
4918 int result;
4919 int sgn0, sgn1;
4921 /* If neither arg represents infinity, do the normal operation.
4922 Else, if not a comparison, return infinity. Else handle the special
4923 comparison rules. Note that most of the cases below won't occur, but
4924 are handled for consistency. */
4926 if (arg0 != 0 && arg1 != 0)
4928 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4929 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4930 STRIP_NOPS (tem);
4931 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4934 if (TREE_CODE_CLASS (code) != tcc_comparison)
4935 return 0;
4937 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4938 for neither. In real maths, we cannot assume open ended ranges are
4939 the same. But, this is computer arithmetic, where numbers are finite.
4940 We can therefore make the transformation of any unbounded range with
4941 the value Z, Z being greater than any representable number. This permits
4942 us to treat unbounded ranges as equal. */
4943 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4944 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4945 switch (code)
4947 case EQ_EXPR:
4948 result = sgn0 == sgn1;
4949 break;
4950 case NE_EXPR:
4951 result = sgn0 != sgn1;
4952 break;
4953 case LT_EXPR:
4954 result = sgn0 < sgn1;
4955 break;
4956 case LE_EXPR:
4957 result = sgn0 <= sgn1;
4958 break;
4959 case GT_EXPR:
4960 result = sgn0 > sgn1;
4961 break;
4962 case GE_EXPR:
4963 result = sgn0 >= sgn1;
4964 break;
4965 default:
4966 gcc_unreachable ();
4969 return constant_boolean_node (result, type);
4972 /* Helper routine for make_range. Perform one step for it, return
4973 new expression if the loop should continue or NULL_TREE if it should
4974 stop. */
4976 tree
4977 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4978 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4979 bool *strict_overflow_p)
4981 tree arg0_type = TREE_TYPE (arg0);
4982 tree n_low, n_high, low = *p_low, high = *p_high;
4983 int in_p = *p_in_p, n_in_p;
4985 switch (code)
4987 case TRUTH_NOT_EXPR:
4988 /* We can only do something if the range is testing for zero. */
4989 if (low == NULL_TREE || high == NULL_TREE
4990 || ! integer_zerop (low) || ! integer_zerop (high))
4991 return NULL_TREE;
4992 *p_in_p = ! in_p;
4993 return arg0;
4995 case EQ_EXPR: case NE_EXPR:
4996 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4997 /* We can only do something if the range is testing for zero
4998 and if the second operand is an integer constant. Note that
4999 saying something is "in" the range we make is done by
5000 complementing IN_P since it will set in the initial case of
5001 being not equal to zero; "out" is leaving it alone. */
5002 if (low == NULL_TREE || high == NULL_TREE
5003 || ! integer_zerop (low) || ! integer_zerop (high)
5004 || TREE_CODE (arg1) != INTEGER_CST)
5005 return NULL_TREE;
5007 switch (code)
5009 case NE_EXPR: /* - [c, c] */
5010 low = high = arg1;
5011 break;
5012 case EQ_EXPR: /* + [c, c] */
5013 in_p = ! in_p, low = high = arg1;
5014 break;
5015 case GT_EXPR: /* - [-, c] */
5016 low = 0, high = arg1;
5017 break;
5018 case GE_EXPR: /* + [c, -] */
5019 in_p = ! in_p, low = arg1, high = 0;
5020 break;
5021 case LT_EXPR: /* - [c, -] */
5022 low = arg1, high = 0;
5023 break;
5024 case LE_EXPR: /* + [-, c] */
5025 in_p = ! in_p, low = 0, high = arg1;
5026 break;
5027 default:
5028 gcc_unreachable ();
5031 /* If this is an unsigned comparison, we also know that EXP is
5032 greater than or equal to zero. We base the range tests we make
5033 on that fact, so we record it here so we can parse existing
5034 range tests. We test arg0_type since often the return type
5035 of, e.g. EQ_EXPR, is boolean. */
5036 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5038 if (! merge_ranges (&n_in_p, &n_low, &n_high,
5039 in_p, low, high, 1,
5040 build_int_cst (arg0_type, 0),
5041 NULL_TREE))
5042 return NULL_TREE;
5044 in_p = n_in_p, low = n_low, high = n_high;
5046 /* If the high bound is missing, but we have a nonzero low
5047 bound, reverse the range so it goes from zero to the low bound
5048 minus 1. */
5049 if (high == 0 && low && ! integer_zerop (low))
5051 in_p = ! in_p;
5052 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5053 build_int_cst (TREE_TYPE (low), 1), 0);
5054 low = build_int_cst (arg0_type, 0);
5058 *p_low = low;
5059 *p_high = high;
5060 *p_in_p = in_p;
5061 return arg0;
5063 case NEGATE_EXPR:
5064 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5065 low and high are non-NULL, then normalize will DTRT. */
5066 if (!TYPE_UNSIGNED (arg0_type)
5067 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5069 if (low == NULL_TREE)
5070 low = TYPE_MIN_VALUE (arg0_type);
5071 if (high == NULL_TREE)
5072 high = TYPE_MAX_VALUE (arg0_type);
5075 /* (-x) IN [a,b] -> x in [-b, -a] */
5076 n_low = range_binop (MINUS_EXPR, exp_type,
5077 build_int_cst (exp_type, 0),
5078 0, high, 1);
5079 n_high = range_binop (MINUS_EXPR, exp_type,
5080 build_int_cst (exp_type, 0),
5081 0, low, 0);
5082 if (n_high != 0 && TREE_OVERFLOW (n_high))
5083 return NULL_TREE;
5084 goto normalize;
5086 case BIT_NOT_EXPR:
5087 /* ~ X -> -X - 1 */
5088 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5089 build_int_cst (exp_type, 1));
5091 case PLUS_EXPR:
5092 case MINUS_EXPR:
5093 if (TREE_CODE (arg1) != INTEGER_CST)
5094 return NULL_TREE;
5096 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5097 move a constant to the other side. */
5098 if (!TYPE_UNSIGNED (arg0_type)
5099 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5100 return NULL_TREE;
5102 /* If EXP is signed, any overflow in the computation is undefined,
5103 so we don't worry about it so long as our computations on
5104 the bounds don't overflow. For unsigned, overflow is defined
5105 and this is exactly the right thing. */
5106 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5107 arg0_type, low, 0, arg1, 0);
5108 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5109 arg0_type, high, 1, arg1, 0);
5110 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5111 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5112 return NULL_TREE;
5114 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5115 *strict_overflow_p = true;
5117 normalize:
5118 /* Check for an unsigned range which has wrapped around the maximum
5119 value thus making n_high < n_low, and normalize it. */
5120 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5122 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5123 build_int_cst (TREE_TYPE (n_high), 1), 0);
5124 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5125 build_int_cst (TREE_TYPE (n_low), 1), 0);
5127 /* If the range is of the form +/- [ x+1, x ], we won't
5128 be able to normalize it. But then, it represents the
5129 whole range or the empty set, so make it
5130 +/- [ -, - ]. */
5131 if (tree_int_cst_equal (n_low, low)
5132 && tree_int_cst_equal (n_high, high))
5133 low = high = 0;
5134 else
5135 in_p = ! in_p;
5137 else
5138 low = n_low, high = n_high;
5140 *p_low = low;
5141 *p_high = high;
5142 *p_in_p = in_p;
5143 return arg0;
5145 CASE_CONVERT:
5146 case NON_LVALUE_EXPR:
5147 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5148 return NULL_TREE;
5150 if (! INTEGRAL_TYPE_P (arg0_type)
5151 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5152 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5153 return NULL_TREE;
5155 n_low = low, n_high = high;
5157 if (n_low != 0)
5158 n_low = fold_convert_loc (loc, arg0_type, n_low);
5160 if (n_high != 0)
5161 n_high = fold_convert_loc (loc, arg0_type, n_high);
5163 /* If we're converting arg0 from an unsigned type, to exp,
5164 a signed type, we will be doing the comparison as unsigned.
5165 The tests above have already verified that LOW and HIGH
5166 are both positive.
5168 So we have to ensure that we will handle large unsigned
5169 values the same way that the current signed bounds treat
5170 negative values. */
5172 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5174 tree high_positive;
5175 tree equiv_type;
5176 /* For fixed-point modes, we need to pass the saturating flag
5177 as the 2nd parameter. */
5178 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5179 equiv_type
5180 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5181 TYPE_SATURATING (arg0_type));
5182 else
5183 equiv_type
5184 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5186 /* A range without an upper bound is, naturally, unbounded.
5187 Since convert would have cropped a very large value, use
5188 the max value for the destination type. */
5189 high_positive
5190 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5191 : TYPE_MAX_VALUE (arg0_type);
5193 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5194 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5195 fold_convert_loc (loc, arg0_type,
5196 high_positive),
5197 build_int_cst (arg0_type, 1));
5199 /* If the low bound is specified, "and" the range with the
5200 range for which the original unsigned value will be
5201 positive. */
5202 if (low != 0)
5204 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5205 1, fold_convert_loc (loc, arg0_type,
5206 integer_zero_node),
5207 high_positive))
5208 return NULL_TREE;
5210 in_p = (n_in_p == in_p);
5212 else
5214 /* Otherwise, "or" the range with the range of the input
5215 that will be interpreted as negative. */
5216 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5217 1, fold_convert_loc (loc, arg0_type,
5218 integer_zero_node),
5219 high_positive))
5220 return NULL_TREE;
5222 in_p = (in_p != n_in_p);
5226 *p_low = n_low;
5227 *p_high = n_high;
5228 *p_in_p = in_p;
5229 return arg0;
5231 default:
5232 return NULL_TREE;
5236 /* Given EXP, a logical expression, set the range it is testing into
5237 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5238 actually being tested. *PLOW and *PHIGH will be made of the same
5239 type as the returned expression. If EXP is not a comparison, we
5240 will most likely not be returning a useful value and range. Set
5241 *STRICT_OVERFLOW_P to true if the return value is only valid
5242 because signed overflow is undefined; otherwise, do not change
5243 *STRICT_OVERFLOW_P. */
5245 tree
5246 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5247 bool *strict_overflow_p)
5249 enum tree_code code;
5250 tree arg0, arg1 = NULL_TREE;
5251 tree exp_type, nexp;
5252 int in_p;
5253 tree low, high;
5254 location_t loc = EXPR_LOCATION (exp);
5256 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5257 and see if we can refine the range. Some of the cases below may not
5258 happen, but it doesn't seem worth worrying about this. We "continue"
5259 the outer loop when we've changed something; otherwise we "break"
5260 the switch, which will "break" the while. */
5262 in_p = 0;
5263 low = high = build_int_cst (TREE_TYPE (exp), 0);
5265 while (1)
5267 code = TREE_CODE (exp);
5268 exp_type = TREE_TYPE (exp);
5269 arg0 = NULL_TREE;
5271 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5273 if (TREE_OPERAND_LENGTH (exp) > 0)
5274 arg0 = TREE_OPERAND (exp, 0);
5275 if (TREE_CODE_CLASS (code) == tcc_binary
5276 || TREE_CODE_CLASS (code) == tcc_comparison
5277 || (TREE_CODE_CLASS (code) == tcc_expression
5278 && TREE_OPERAND_LENGTH (exp) > 1))
5279 arg1 = TREE_OPERAND (exp, 1);
5281 if (arg0 == NULL_TREE)
5282 break;
5284 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5285 &high, &in_p, strict_overflow_p);
5286 if (nexp == NULL_TREE)
5287 break;
5288 exp = nexp;
5291 /* If EXP is a constant, we can evaluate whether this is true or false. */
5292 if (TREE_CODE (exp) == INTEGER_CST)
5294 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5295 exp, 0, low, 0))
5296 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5297 exp, 1, high, 1)));
5298 low = high = 0;
5299 exp = 0;
5302 *pin_p = in_p, *plow = low, *phigh = high;
5303 return exp;
5306 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5307 a bitwise check i.e. when
5308 LOW == 0xXX...X00...0
5309 HIGH == 0xXX...X11...1
5310 Return corresponding mask in MASK and stem in VALUE. */
5312 static bool
5313 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5314 tree *value)
5316 if (TREE_CODE (low) != INTEGER_CST
5317 || TREE_CODE (high) != INTEGER_CST)
5318 return false;
5320 unsigned prec = TYPE_PRECISION (type);
5321 wide_int lo = wi::to_wide (low, prec);
5322 wide_int hi = wi::to_wide (high, prec);
5324 wide_int end_mask = lo ^ hi;
5325 if ((end_mask & (end_mask + 1)) != 0
5326 || (lo & end_mask) != 0)
5327 return false;
5329 wide_int stem_mask = ~end_mask;
5330 wide_int stem = lo & stem_mask;
5331 if (stem != (hi & stem_mask))
5332 return false;
5334 *mask = wide_int_to_tree (type, stem_mask);
5335 *value = wide_int_to_tree (type, stem);
5337 return true;
5340 /* Helper routine for build_range_check and match.pd. Return the type to
5341 perform the check or NULL if it shouldn't be optimized. */
5343 tree
5344 range_check_type (tree etype)
5346 /* First make sure that arithmetics in this type is valid, then make sure
5347 that it wraps around. */
5348 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5349 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5351 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5353 tree utype, minv, maxv;
5355 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5356 for the type in question, as we rely on this here. */
5357 utype = unsigned_type_for (etype);
5358 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5359 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5360 build_int_cst (TREE_TYPE (maxv), 1), 1);
5361 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5363 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5364 minv, 1, maxv, 1)))
5365 etype = utype;
5366 else
5367 return NULL_TREE;
5369 else if (POINTER_TYPE_P (etype))
5370 etype = unsigned_type_for (etype);
5371 return etype;
5374 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5375 type, TYPE, return an expression to test if EXP is in (or out of, depending
5376 on IN_P) the range. Return 0 if the test couldn't be created. */
5378 tree
5379 build_range_check (location_t loc, tree type, tree exp, int in_p,
5380 tree low, tree high)
5382 tree etype = TREE_TYPE (exp), mask, value;
5384 /* Disable this optimization for function pointer expressions
5385 on targets that require function pointer canonicalization. */
5386 if (targetm.have_canonicalize_funcptr_for_compare ()
5387 && POINTER_TYPE_P (etype)
5388 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5389 return NULL_TREE;
5391 if (! in_p)
5393 value = build_range_check (loc, type, exp, 1, low, high);
5394 if (value != 0)
5395 return invert_truthvalue_loc (loc, value);
5397 return 0;
5400 if (low == 0 && high == 0)
5401 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5403 if (low == 0)
5404 return fold_build2_loc (loc, LE_EXPR, type, exp,
5405 fold_convert_loc (loc, etype, high));
5407 if (high == 0)
5408 return fold_build2_loc (loc, GE_EXPR, type, exp,
5409 fold_convert_loc (loc, etype, low));
5411 if (operand_equal_p (low, high, 0))
5412 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5413 fold_convert_loc (loc, etype, low));
5415 if (TREE_CODE (exp) == BIT_AND_EXPR
5416 && maskable_range_p (low, high, etype, &mask, &value))
5417 return fold_build2_loc (loc, EQ_EXPR, type,
5418 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5419 exp, mask),
5420 value);
5422 if (integer_zerop (low))
5424 if (! TYPE_UNSIGNED (etype))
5426 etype = unsigned_type_for (etype);
5427 high = fold_convert_loc (loc, etype, high);
5428 exp = fold_convert_loc (loc, etype, exp);
5430 return build_range_check (loc, type, exp, 1, 0, high);
5433 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5434 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5436 int prec = TYPE_PRECISION (etype);
5438 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5440 if (TYPE_UNSIGNED (etype))
5442 tree signed_etype = signed_type_for (etype);
5443 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5444 etype
5445 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5446 else
5447 etype = signed_etype;
5448 exp = fold_convert_loc (loc, etype, exp);
5450 return fold_build2_loc (loc, GT_EXPR, type, exp,
5451 build_int_cst (etype, 0));
5455 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5456 This requires wrap-around arithmetics for the type of the expression. */
5457 etype = range_check_type (etype);
5458 if (etype == NULL_TREE)
5459 return NULL_TREE;
5461 high = fold_convert_loc (loc, etype, high);
5462 low = fold_convert_loc (loc, etype, low);
5463 exp = fold_convert_loc (loc, etype, exp);
5465 value = const_binop (MINUS_EXPR, high, low);
5467 if (value != 0 && !TREE_OVERFLOW (value))
5468 return build_range_check (loc, type,
5469 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5470 1, build_int_cst (etype, 0), value);
5472 return 0;
5475 /* Return the predecessor of VAL in its type, handling the infinite case. */
5477 static tree
5478 range_predecessor (tree val)
5480 tree type = TREE_TYPE (val);
5482 if (INTEGRAL_TYPE_P (type)
5483 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5484 return 0;
5485 else
5486 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5487 build_int_cst (TREE_TYPE (val), 1), 0);
5490 /* Return the successor of VAL in its type, handling the infinite case. */
5492 static tree
5493 range_successor (tree val)
5495 tree type = TREE_TYPE (val);
5497 if (INTEGRAL_TYPE_P (type)
5498 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5499 return 0;
5500 else
5501 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5502 build_int_cst (TREE_TYPE (val), 1), 0);
5505 /* Given two ranges, see if we can merge them into one. Return 1 if we
5506 can, 0 if we can't. Set the output range into the specified parameters. */
5508 bool
5509 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5510 tree high0, int in1_p, tree low1, tree high1)
5512 int no_overlap;
5513 int subset;
5514 int temp;
5515 tree tem;
5516 int in_p;
5517 tree low, high;
5518 int lowequal = ((low0 == 0 && low1 == 0)
5519 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5520 low0, 0, low1, 0)));
5521 int highequal = ((high0 == 0 && high1 == 0)
5522 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5523 high0, 1, high1, 1)));
5525 /* Make range 0 be the range that starts first, or ends last if they
5526 start at the same value. Swap them if it isn't. */
5527 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5528 low0, 0, low1, 0))
5529 || (lowequal
5530 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5531 high1, 1, high0, 1))))
5533 temp = in0_p, in0_p = in1_p, in1_p = temp;
5534 tem = low0, low0 = low1, low1 = tem;
5535 tem = high0, high0 = high1, high1 = tem;
5538 /* If the second range is != high1 where high1 is the type maximum of
5539 the type, try first merging with < high1 range. */
5540 if (low1
5541 && high1
5542 && TREE_CODE (low1) == INTEGER_CST
5543 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5544 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5545 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5546 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5547 && operand_equal_p (low1, high1, 0))
5549 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5550 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5551 !in1_p, NULL_TREE, range_predecessor (low1)))
5552 return true;
5553 /* Similarly for the second range != low1 where low1 is the type minimum
5554 of the type, try first merging with > low1 range. */
5555 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5556 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5557 !in1_p, range_successor (low1), NULL_TREE))
5558 return true;
5561 /* Now flag two cases, whether the ranges are disjoint or whether the
5562 second range is totally subsumed in the first. Note that the tests
5563 below are simplified by the ones above. */
5564 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5565 high0, 1, low1, 0));
5566 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5567 high1, 1, high0, 1));
5569 /* We now have four cases, depending on whether we are including or
5570 excluding the two ranges. */
5571 if (in0_p && in1_p)
5573 /* If they don't overlap, the result is false. If the second range
5574 is a subset it is the result. Otherwise, the range is from the start
5575 of the second to the end of the first. */
5576 if (no_overlap)
5577 in_p = 0, low = high = 0;
5578 else if (subset)
5579 in_p = 1, low = low1, high = high1;
5580 else
5581 in_p = 1, low = low1, high = high0;
5584 else if (in0_p && ! in1_p)
5586 /* If they don't overlap, the result is the first range. If they are
5587 equal, the result is false. If the second range is a subset of the
5588 first, and the ranges begin at the same place, we go from just after
5589 the end of the second range to the end of the first. If the second
5590 range is not a subset of the first, or if it is a subset and both
5591 ranges end at the same place, the range starts at the start of the
5592 first range and ends just before the second range.
5593 Otherwise, we can't describe this as a single range. */
5594 if (no_overlap)
5595 in_p = 1, low = low0, high = high0;
5596 else if (lowequal && highequal)
5597 in_p = 0, low = high = 0;
5598 else if (subset && lowequal)
5600 low = range_successor (high1);
5601 high = high0;
5602 in_p = 1;
5603 if (low == 0)
5605 /* We are in the weird situation where high0 > high1 but
5606 high1 has no successor. Punt. */
5607 return 0;
5610 else if (! subset || highequal)
5612 low = low0;
5613 high = range_predecessor (low1);
5614 in_p = 1;
5615 if (high == 0)
5617 /* low0 < low1 but low1 has no predecessor. Punt. */
5618 return 0;
5621 else
5622 return 0;
5625 else if (! in0_p && in1_p)
5627 /* If they don't overlap, the result is the second range. If the second
5628 is a subset of the first, the result is false. Otherwise,
5629 the range starts just after the first range and ends at the
5630 end of the second. */
5631 if (no_overlap)
5632 in_p = 1, low = low1, high = high1;
5633 else if (subset || highequal)
5634 in_p = 0, low = high = 0;
5635 else
5637 low = range_successor (high0);
5638 high = high1;
5639 in_p = 1;
5640 if (low == 0)
5642 /* high1 > high0 but high0 has no successor. Punt. */
5643 return 0;
5648 else
5650 /* The case where we are excluding both ranges. Here the complex case
5651 is if they don't overlap. In that case, the only time we have a
5652 range is if they are adjacent. If the second is a subset of the
5653 first, the result is the first. Otherwise, the range to exclude
5654 starts at the beginning of the first range and ends at the end of the
5655 second. */
5656 if (no_overlap)
5658 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5659 range_successor (high0),
5660 1, low1, 0)))
5661 in_p = 0, low = low0, high = high1;
5662 else
5664 /* Canonicalize - [min, x] into - [-, x]. */
5665 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5666 switch (TREE_CODE (TREE_TYPE (low0)))
5668 case ENUMERAL_TYPE:
5669 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5670 GET_MODE_BITSIZE
5671 (TYPE_MODE (TREE_TYPE (low0)))))
5672 break;
5673 /* FALLTHROUGH */
5674 case INTEGER_TYPE:
5675 if (tree_int_cst_equal (low0,
5676 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5677 low0 = 0;
5678 break;
5679 case POINTER_TYPE:
5680 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5681 && integer_zerop (low0))
5682 low0 = 0;
5683 break;
5684 default:
5685 break;
5688 /* Canonicalize - [x, max] into - [x, -]. */
5689 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5690 switch (TREE_CODE (TREE_TYPE (high1)))
5692 case ENUMERAL_TYPE:
5693 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5694 GET_MODE_BITSIZE
5695 (TYPE_MODE (TREE_TYPE (high1)))))
5696 break;
5697 /* FALLTHROUGH */
5698 case INTEGER_TYPE:
5699 if (tree_int_cst_equal (high1,
5700 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5701 high1 = 0;
5702 break;
5703 case POINTER_TYPE:
5704 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5705 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5706 high1, 1,
5707 build_int_cst (TREE_TYPE (high1), 1),
5708 1)))
5709 high1 = 0;
5710 break;
5711 default:
5712 break;
5715 /* The ranges might be also adjacent between the maximum and
5716 minimum values of the given type. For
5717 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5718 return + [x + 1, y - 1]. */
5719 if (low0 == 0 && high1 == 0)
5721 low = range_successor (high0);
5722 high = range_predecessor (low1);
5723 if (low == 0 || high == 0)
5724 return 0;
5726 in_p = 1;
5728 else
5729 return 0;
5732 else if (subset)
5733 in_p = 0, low = low0, high = high0;
5734 else
5735 in_p = 0, low = low0, high = high1;
5738 *pin_p = in_p, *plow = low, *phigh = high;
5739 return 1;
5743 /* Subroutine of fold, looking inside expressions of the form
5744 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5745 of the COND_EXPR. This function is being used also to optimize
5746 A op B ? C : A, by reversing the comparison first.
5748 Return a folded expression whose code is not a COND_EXPR
5749 anymore, or NULL_TREE if no folding opportunity is found. */
5751 static tree
5752 fold_cond_expr_with_comparison (location_t loc, tree type,
5753 tree arg0, tree arg1, tree arg2)
5755 enum tree_code comp_code = TREE_CODE (arg0);
5756 tree arg00 = TREE_OPERAND (arg0, 0);
5757 tree arg01 = TREE_OPERAND (arg0, 1);
5758 tree arg1_type = TREE_TYPE (arg1);
5759 tree tem;
5761 STRIP_NOPS (arg1);
5762 STRIP_NOPS (arg2);
5764 /* If we have A op 0 ? A : -A, consider applying the following
5765 transformations:
5767 A == 0? A : -A same as -A
5768 A != 0? A : -A same as A
5769 A >= 0? A : -A same as abs (A)
5770 A > 0? A : -A same as abs (A)
5771 A <= 0? A : -A same as -abs (A)
5772 A < 0? A : -A same as -abs (A)
5774 None of these transformations work for modes with signed
5775 zeros. If A is +/-0, the first two transformations will
5776 change the sign of the result (from +0 to -0, or vice
5777 versa). The last four will fix the sign of the result,
5778 even though the original expressions could be positive or
5779 negative, depending on the sign of A.
5781 Note that all these transformations are correct if A is
5782 NaN, since the two alternatives (A and -A) are also NaNs. */
5783 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5784 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5785 ? real_zerop (arg01)
5786 : integer_zerop (arg01))
5787 && ((TREE_CODE (arg2) == NEGATE_EXPR
5788 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5789 /* In the case that A is of the form X-Y, '-A' (arg2) may
5790 have already been folded to Y-X, check for that. */
5791 || (TREE_CODE (arg1) == MINUS_EXPR
5792 && TREE_CODE (arg2) == MINUS_EXPR
5793 && operand_equal_p (TREE_OPERAND (arg1, 0),
5794 TREE_OPERAND (arg2, 1), 0)
5795 && operand_equal_p (TREE_OPERAND (arg1, 1),
5796 TREE_OPERAND (arg2, 0), 0))))
5797 switch (comp_code)
5799 case EQ_EXPR:
5800 case UNEQ_EXPR:
5801 tem = fold_convert_loc (loc, arg1_type, arg1);
5802 return fold_convert_loc (loc, type, negate_expr (tem));
5803 case NE_EXPR:
5804 case LTGT_EXPR:
5805 return fold_convert_loc (loc, type, arg1);
5806 case UNGE_EXPR:
5807 case UNGT_EXPR:
5808 if (flag_trapping_math)
5809 break;
5810 /* Fall through. */
5811 case GE_EXPR:
5812 case GT_EXPR:
5813 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5814 break;
5815 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5816 return fold_convert_loc (loc, type, tem);
5817 case UNLE_EXPR:
5818 case UNLT_EXPR:
5819 if (flag_trapping_math)
5820 break;
5821 /* FALLTHRU */
5822 case LE_EXPR:
5823 case LT_EXPR:
5824 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5825 break;
5826 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5827 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5829 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5830 is not, invokes UB both in abs and in the negation of it.
5831 So, use ABSU_EXPR instead. */
5832 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5833 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5834 tem = negate_expr (tem);
5835 return fold_convert_loc (loc, type, tem);
5837 else
5839 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5840 return negate_expr (fold_convert_loc (loc, type, tem));
5842 default:
5843 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5844 break;
5847 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5848 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5849 both transformations are correct when A is NaN: A != 0
5850 is then true, and A == 0 is false. */
5852 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5853 && integer_zerop (arg01) && integer_zerop (arg2))
5855 if (comp_code == NE_EXPR)
5856 return fold_convert_loc (loc, type, arg1);
5857 else if (comp_code == EQ_EXPR)
5858 return build_zero_cst (type);
5861 /* Try some transformations of A op B ? A : B.
5863 A == B? A : B same as B
5864 A != B? A : B same as A
5865 A >= B? A : B same as max (A, B)
5866 A > B? A : B same as max (B, A)
5867 A <= B? A : B same as min (A, B)
5868 A < B? A : B same as min (B, A)
5870 As above, these transformations don't work in the presence
5871 of signed zeros. For example, if A and B are zeros of
5872 opposite sign, the first two transformations will change
5873 the sign of the result. In the last four, the original
5874 expressions give different results for (A=+0, B=-0) and
5875 (A=-0, B=+0), but the transformed expressions do not.
5877 The first two transformations are correct if either A or B
5878 is a NaN. In the first transformation, the condition will
5879 be false, and B will indeed be chosen. In the case of the
5880 second transformation, the condition A != B will be true,
5881 and A will be chosen.
5883 The conversions to max() and min() are not correct if B is
5884 a number and A is not. The conditions in the original
5885 expressions will be false, so all four give B. The min()
5886 and max() versions would give a NaN instead. */
5887 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5888 && operand_equal_for_comparison_p (arg01, arg2)
5889 /* Avoid these transformations if the COND_EXPR may be used
5890 as an lvalue in the C++ front-end. PR c++/19199. */
5891 && (in_gimple_form
5892 || VECTOR_TYPE_P (type)
5893 || (! lang_GNU_CXX ()
5894 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5895 || ! maybe_lvalue_p (arg1)
5896 || ! maybe_lvalue_p (arg2)))
5898 tree comp_op0 = arg00;
5899 tree comp_op1 = arg01;
5900 tree comp_type = TREE_TYPE (comp_op0);
5902 switch (comp_code)
5904 case EQ_EXPR:
5905 return fold_convert_loc (loc, type, arg2);
5906 case NE_EXPR:
5907 return fold_convert_loc (loc, type, arg1);
5908 case LE_EXPR:
5909 case LT_EXPR:
5910 case UNLE_EXPR:
5911 case UNLT_EXPR:
5912 /* In C++ a ?: expression can be an lvalue, so put the
5913 operand which will be used if they are equal first
5914 so that we can convert this back to the
5915 corresponding COND_EXPR. */
5916 if (!HONOR_NANS (arg1))
5918 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5919 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5920 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5921 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5922 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5923 comp_op1, comp_op0);
5924 return fold_convert_loc (loc, type, tem);
5926 break;
5927 case GE_EXPR:
5928 case GT_EXPR:
5929 case UNGE_EXPR:
5930 case UNGT_EXPR:
5931 if (!HONOR_NANS (arg1))
5933 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5934 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5935 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5936 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5937 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5938 comp_op1, comp_op0);
5939 return fold_convert_loc (loc, type, tem);
5941 break;
5942 case UNEQ_EXPR:
5943 if (!HONOR_NANS (arg1))
5944 return fold_convert_loc (loc, type, arg2);
5945 break;
5946 case LTGT_EXPR:
5947 if (!HONOR_NANS (arg1))
5948 return fold_convert_loc (loc, type, arg1);
5949 break;
5950 default:
5951 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5952 break;
5956 return NULL_TREE;
5961 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5962 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5963 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5964 false) >= 2)
5965 #endif
5967 /* EXP is some logical combination of boolean tests. See if we can
5968 merge it into some range test. Return the new tree if so. */
5970 static tree
5971 fold_range_test (location_t loc, enum tree_code code, tree type,
5972 tree op0, tree op1)
5974 int or_op = (code == TRUTH_ORIF_EXPR
5975 || code == TRUTH_OR_EXPR);
5976 int in0_p, in1_p, in_p;
5977 tree low0, low1, low, high0, high1, high;
5978 bool strict_overflow_p = false;
5979 tree tem, lhs, rhs;
5980 const char * const warnmsg = G_("assuming signed overflow does not occur "
5981 "when simplifying range test");
5983 if (!INTEGRAL_TYPE_P (type))
5984 return 0;
5986 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5987 /* If op0 is known true or false and this is a short-circuiting
5988 operation we must not merge with op1 since that makes side-effects
5989 unconditional. So special-case this. */
5990 if (!lhs
5991 && ((code == TRUTH_ORIF_EXPR && in0_p)
5992 || (code == TRUTH_ANDIF_EXPR && !in0_p)))
5993 return op0;
5994 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5996 /* If this is an OR operation, invert both sides; we will invert
5997 again at the end. */
5998 if (or_op)
5999 in0_p = ! in0_p, in1_p = ! in1_p;
6001 /* If both expressions are the same, if we can merge the ranges, and we
6002 can build the range test, return it or it inverted. If one of the
6003 ranges is always true or always false, consider it to be the same
6004 expression as the other. */
6005 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6006 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6007 in1_p, low1, high1)
6008 && (tem = (build_range_check (loc, type,
6009 lhs != 0 ? lhs
6010 : rhs != 0 ? rhs : integer_zero_node,
6011 in_p, low, high))) != 0)
6013 if (strict_overflow_p)
6014 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6015 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6018 /* On machines where the branch cost is expensive, if this is a
6019 short-circuited branch and the underlying object on both sides
6020 is the same, make a non-short-circuit operation. */
6021 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6022 if (param_logical_op_non_short_circuit != -1)
6023 logical_op_non_short_circuit
6024 = param_logical_op_non_short_circuit;
6025 if (logical_op_non_short_circuit
6026 && !flag_sanitize_coverage
6027 && lhs != 0 && rhs != 0
6028 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6029 && operand_equal_p (lhs, rhs, 0))
6031 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
6032 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6033 which cases we can't do this. */
6034 if (simple_operand_p (lhs))
6035 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6036 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6037 type, op0, op1);
6039 else if (!lang_hooks.decls.global_bindings_p ()
6040 && !CONTAINS_PLACEHOLDER_P (lhs))
6042 tree common = save_expr (lhs);
6044 if ((lhs = build_range_check (loc, type, common,
6045 or_op ? ! in0_p : in0_p,
6046 low0, high0)) != 0
6047 && (rhs = build_range_check (loc, type, common,
6048 or_op ? ! in1_p : in1_p,
6049 low1, high1)) != 0)
6051 if (strict_overflow_p)
6052 fold_overflow_warning (warnmsg,
6053 WARN_STRICT_OVERFLOW_COMPARISON);
6054 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6055 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6056 type, lhs, rhs);
6061 return 0;
6064 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6065 bit value. Arrange things so the extra bits will be set to zero if and
6066 only if C is signed-extended to its full width. If MASK is nonzero,
6067 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6069 static tree
6070 unextend (tree c, int p, int unsignedp, tree mask)
6072 tree type = TREE_TYPE (c);
6073 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6074 tree temp;
6076 if (p == modesize || unsignedp)
6077 return c;
6079 /* We work by getting just the sign bit into the low-order bit, then
6080 into the high-order bit, then sign-extend. We then XOR that value
6081 with C. */
6082 temp = build_int_cst (TREE_TYPE (c),
6083 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6085 /* We must use a signed type in order to get an arithmetic right shift.
6086 However, we must also avoid introducing accidental overflows, so that
6087 a subsequent call to integer_zerop will work. Hence we must
6088 do the type conversion here. At this point, the constant is either
6089 zero or one, and the conversion to a signed type can never overflow.
6090 We could get an overflow if this conversion is done anywhere else. */
6091 if (TYPE_UNSIGNED (type))
6092 temp = fold_convert (signed_type_for (type), temp);
6094 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6095 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6096 if (mask != 0)
6097 temp = const_binop (BIT_AND_EXPR, temp,
6098 fold_convert (TREE_TYPE (c), mask));
6099 /* If necessary, convert the type back to match the type of C. */
6100 if (TYPE_UNSIGNED (type))
6101 temp = fold_convert (type, temp);
6103 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6106 /* For an expression that has the form
6107 (A && B) || ~B
6109 (A || B) && ~B,
6110 we can drop one of the inner expressions and simplify to
6111 A || ~B
6113 A && ~B
6114 LOC is the location of the resulting expression. OP is the inner
6115 logical operation; the left-hand side in the examples above, while CMPOP
6116 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6117 removing a condition that guards another, as in
6118 (A != NULL && A->...) || A == NULL
6119 which we must not transform. If RHS_ONLY is true, only eliminate the
6120 right-most operand of the inner logical operation. */
6122 static tree
6123 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6124 bool rhs_only)
6126 tree type = TREE_TYPE (cmpop);
6127 enum tree_code code = TREE_CODE (cmpop);
6128 enum tree_code truthop_code = TREE_CODE (op);
6129 tree lhs = TREE_OPERAND (op, 0);
6130 tree rhs = TREE_OPERAND (op, 1);
6131 tree orig_lhs = lhs, orig_rhs = rhs;
6132 enum tree_code rhs_code = TREE_CODE (rhs);
6133 enum tree_code lhs_code = TREE_CODE (lhs);
6134 enum tree_code inv_code;
6136 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6137 return NULL_TREE;
6139 if (TREE_CODE_CLASS (code) != tcc_comparison)
6140 return NULL_TREE;
6142 if (rhs_code == truthop_code)
6144 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6145 if (newrhs != NULL_TREE)
6147 rhs = newrhs;
6148 rhs_code = TREE_CODE (rhs);
6151 if (lhs_code == truthop_code && !rhs_only)
6153 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6154 if (newlhs != NULL_TREE)
6156 lhs = newlhs;
6157 lhs_code = TREE_CODE (lhs);
6161 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6162 if (inv_code == rhs_code
6163 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6164 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6165 return lhs;
6166 if (!rhs_only && inv_code == lhs_code
6167 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6168 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6169 return rhs;
6170 if (rhs != orig_rhs || lhs != orig_lhs)
6171 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6172 lhs, rhs);
6173 return NULL_TREE;
6176 /* Find ways of folding logical expressions of LHS and RHS:
6177 Try to merge two comparisons to the same innermost item.
6178 Look for range tests like "ch >= '0' && ch <= '9'".
6179 Look for combinations of simple terms on machines with expensive branches
6180 and evaluate the RHS unconditionally.
6182 For example, if we have p->a == 2 && p->b == 4 and we can make an
6183 object large enough to span both A and B, we can do this with a comparison
6184 against the object ANDed with the a mask.
6186 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6187 operations to do this with one comparison.
6189 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6190 function and the one above.
6192 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6193 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6195 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6196 two operands.
6198 We return the simplified tree or 0 if no optimization is possible. */
6200 static tree
6201 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6202 tree lhs, tree rhs)
6204 /* If this is the "or" of two comparisons, we can do something if
6205 the comparisons are NE_EXPR. If this is the "and", we can do something
6206 if the comparisons are EQ_EXPR. I.e.,
6207 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6209 WANTED_CODE is this operation code. For single bit fields, we can
6210 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6211 comparison for one-bit fields. */
6213 enum tree_code wanted_code;
6214 enum tree_code lcode, rcode;
6215 tree ll_arg, lr_arg, rl_arg, rr_arg;
6216 tree ll_inner, lr_inner, rl_inner, rr_inner;
6217 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6218 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6219 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6220 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6221 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6222 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6223 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6224 scalar_int_mode lnmode, rnmode;
6225 tree ll_mask, lr_mask, rl_mask, rr_mask;
6226 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6227 tree l_const, r_const;
6228 tree lntype, rntype, result;
6229 HOST_WIDE_INT first_bit, end_bit;
6230 int volatilep;
6232 /* Start by getting the comparison codes. Fail if anything is volatile.
6233 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6234 it were surrounded with a NE_EXPR. */
6236 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6237 return 0;
6239 lcode = TREE_CODE (lhs);
6240 rcode = TREE_CODE (rhs);
6242 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6244 lhs = build2 (NE_EXPR, truth_type, lhs,
6245 build_int_cst (TREE_TYPE (lhs), 0));
6246 lcode = NE_EXPR;
6249 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6251 rhs = build2 (NE_EXPR, truth_type, rhs,
6252 build_int_cst (TREE_TYPE (rhs), 0));
6253 rcode = NE_EXPR;
6256 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6257 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6258 return 0;
6260 ll_arg = TREE_OPERAND (lhs, 0);
6261 lr_arg = TREE_OPERAND (lhs, 1);
6262 rl_arg = TREE_OPERAND (rhs, 0);
6263 rr_arg = TREE_OPERAND (rhs, 1);
6265 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6266 if (simple_operand_p (ll_arg)
6267 && simple_operand_p (lr_arg))
6269 if (operand_equal_p (ll_arg, rl_arg, 0)
6270 && operand_equal_p (lr_arg, rr_arg, 0))
6272 result = combine_comparisons (loc, code, lcode, rcode,
6273 truth_type, ll_arg, lr_arg);
6274 if (result)
6275 return result;
6277 else if (operand_equal_p (ll_arg, rr_arg, 0)
6278 && operand_equal_p (lr_arg, rl_arg, 0))
6280 result = combine_comparisons (loc, code, lcode,
6281 swap_tree_comparison (rcode),
6282 truth_type, ll_arg, lr_arg);
6283 if (result)
6284 return result;
6288 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6289 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6291 /* If the RHS can be evaluated unconditionally and its operands are
6292 simple, it wins to evaluate the RHS unconditionally on machines
6293 with expensive branches. In this case, this isn't a comparison
6294 that can be merged. */
6296 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6297 false) >= 2
6298 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6299 && simple_operand_p (rl_arg)
6300 && simple_operand_p (rr_arg))
6302 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6303 if (code == TRUTH_OR_EXPR
6304 && lcode == NE_EXPR && integer_zerop (lr_arg)
6305 && rcode == NE_EXPR && integer_zerop (rr_arg)
6306 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6307 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6308 return build2_loc (loc, NE_EXPR, truth_type,
6309 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6310 ll_arg, rl_arg),
6311 build_int_cst (TREE_TYPE (ll_arg), 0));
6313 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6314 if (code == TRUTH_AND_EXPR
6315 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6316 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6317 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6318 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6319 return build2_loc (loc, EQ_EXPR, truth_type,
6320 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6321 ll_arg, rl_arg),
6322 build_int_cst (TREE_TYPE (ll_arg), 0));
6325 /* See if the comparisons can be merged. Then get all the parameters for
6326 each side. */
6328 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6329 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6330 return 0;
6332 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6333 volatilep = 0;
6334 ll_inner = decode_field_reference (loc, &ll_arg,
6335 &ll_bitsize, &ll_bitpos, &ll_mode,
6336 &ll_unsignedp, &ll_reversep, &volatilep,
6337 &ll_mask, &ll_and_mask);
6338 lr_inner = decode_field_reference (loc, &lr_arg,
6339 &lr_bitsize, &lr_bitpos, &lr_mode,
6340 &lr_unsignedp, &lr_reversep, &volatilep,
6341 &lr_mask, &lr_and_mask);
6342 rl_inner = decode_field_reference (loc, &rl_arg,
6343 &rl_bitsize, &rl_bitpos, &rl_mode,
6344 &rl_unsignedp, &rl_reversep, &volatilep,
6345 &rl_mask, &rl_and_mask);
6346 rr_inner = decode_field_reference (loc, &rr_arg,
6347 &rr_bitsize, &rr_bitpos, &rr_mode,
6348 &rr_unsignedp, &rr_reversep, &volatilep,
6349 &rr_mask, &rr_and_mask);
6351 /* It must be true that the inner operation on the lhs of each
6352 comparison must be the same if we are to be able to do anything.
6353 Then see if we have constants. If not, the same must be true for
6354 the rhs's. */
6355 if (volatilep
6356 || ll_reversep != rl_reversep
6357 || ll_inner == 0 || rl_inner == 0
6358 || ! operand_equal_p (ll_inner, rl_inner, 0))
6359 return 0;
6361 if (TREE_CODE (lr_arg) == INTEGER_CST
6362 && TREE_CODE (rr_arg) == INTEGER_CST)
6364 l_const = lr_arg, r_const = rr_arg;
6365 lr_reversep = ll_reversep;
6367 else if (lr_reversep != rr_reversep
6368 || lr_inner == 0 || rr_inner == 0
6369 || ! operand_equal_p (lr_inner, rr_inner, 0))
6370 return 0;
6371 else
6372 l_const = r_const = 0;
6374 /* If either comparison code is not correct for our logical operation,
6375 fail. However, we can convert a one-bit comparison against zero into
6376 the opposite comparison against that bit being set in the field. */
6378 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6379 if (lcode != wanted_code)
6381 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6383 /* Make the left operand unsigned, since we are only interested
6384 in the value of one bit. Otherwise we are doing the wrong
6385 thing below. */
6386 ll_unsignedp = 1;
6387 l_const = ll_mask;
6389 else
6390 return 0;
6393 /* This is analogous to the code for l_const above. */
6394 if (rcode != wanted_code)
6396 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6398 rl_unsignedp = 1;
6399 r_const = rl_mask;
6401 else
6402 return 0;
6405 /* See if we can find a mode that contains both fields being compared on
6406 the left. If we can't, fail. Otherwise, update all constants and masks
6407 to be relative to a field of that size. */
6408 first_bit = MIN (ll_bitpos, rl_bitpos);
6409 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6410 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6411 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6412 volatilep, &lnmode))
6413 return 0;
6415 lnbitsize = GET_MODE_BITSIZE (lnmode);
6416 lnbitpos = first_bit & ~ (lnbitsize - 1);
6417 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6418 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6420 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6422 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6423 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6426 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6427 size_int (xll_bitpos));
6428 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6429 size_int (xrl_bitpos));
6431 if (l_const)
6433 l_const = fold_convert_loc (loc, lntype, l_const);
6434 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6435 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6436 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6437 fold_build1_loc (loc, BIT_NOT_EXPR,
6438 lntype, ll_mask))))
6440 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6442 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6445 if (r_const)
6447 r_const = fold_convert_loc (loc, lntype, r_const);
6448 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6449 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6450 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6451 fold_build1_loc (loc, BIT_NOT_EXPR,
6452 lntype, rl_mask))))
6454 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6456 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6460 /* If the right sides are not constant, do the same for it. Also,
6461 disallow this optimization if a size, signedness or storage order
6462 mismatch occurs between the left and right sides. */
6463 if (l_const == 0)
6465 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6466 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6467 || ll_reversep != lr_reversep
6468 /* Make sure the two fields on the right
6469 correspond to the left without being swapped. */
6470 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6471 return 0;
6473 first_bit = MIN (lr_bitpos, rr_bitpos);
6474 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6475 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6476 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6477 volatilep, &rnmode))
6478 return 0;
6480 rnbitsize = GET_MODE_BITSIZE (rnmode);
6481 rnbitpos = first_bit & ~ (rnbitsize - 1);
6482 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6483 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6485 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6487 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6488 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6491 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6492 rntype, lr_mask),
6493 size_int (xlr_bitpos));
6494 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6495 rntype, rr_mask),
6496 size_int (xrr_bitpos));
6498 /* Make a mask that corresponds to both fields being compared.
6499 Do this for both items being compared. If the operands are the
6500 same size and the bits being compared are in the same position
6501 then we can do this by masking both and comparing the masked
6502 results. */
6503 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6504 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6505 if (lnbitsize == rnbitsize
6506 && xll_bitpos == xlr_bitpos
6507 && lnbitpos >= 0
6508 && rnbitpos >= 0)
6510 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6511 lntype, lnbitsize, lnbitpos,
6512 ll_unsignedp || rl_unsignedp, ll_reversep);
6513 if (! all_ones_mask_p (ll_mask, lnbitsize))
6514 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6516 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6517 rntype, rnbitsize, rnbitpos,
6518 lr_unsignedp || rr_unsignedp, lr_reversep);
6519 if (! all_ones_mask_p (lr_mask, rnbitsize))
6520 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6522 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6525 /* There is still another way we can do something: If both pairs of
6526 fields being compared are adjacent, we may be able to make a wider
6527 field containing them both.
6529 Note that we still must mask the lhs/rhs expressions. Furthermore,
6530 the mask must be shifted to account for the shift done by
6531 make_bit_field_ref. */
6532 if (((ll_bitsize + ll_bitpos == rl_bitpos
6533 && lr_bitsize + lr_bitpos == rr_bitpos)
6534 || (ll_bitpos == rl_bitpos + rl_bitsize
6535 && lr_bitpos == rr_bitpos + rr_bitsize))
6536 && ll_bitpos >= 0
6537 && rl_bitpos >= 0
6538 && lr_bitpos >= 0
6539 && rr_bitpos >= 0)
6541 tree type;
6543 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6544 ll_bitsize + rl_bitsize,
6545 MIN (ll_bitpos, rl_bitpos),
6546 ll_unsignedp, ll_reversep);
6547 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6548 lr_bitsize + rr_bitsize,
6549 MIN (lr_bitpos, rr_bitpos),
6550 lr_unsignedp, lr_reversep);
6552 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6553 size_int (MIN (xll_bitpos, xrl_bitpos)));
6554 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6555 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6557 /* Convert to the smaller type before masking out unwanted bits. */
6558 type = lntype;
6559 if (lntype != rntype)
6561 if (lnbitsize > rnbitsize)
6563 lhs = fold_convert_loc (loc, rntype, lhs);
6564 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6565 type = rntype;
6567 else if (lnbitsize < rnbitsize)
6569 rhs = fold_convert_loc (loc, lntype, rhs);
6570 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6571 type = lntype;
6575 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6576 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6578 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6579 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6581 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6584 return 0;
6587 /* Handle the case of comparisons with constants. If there is something in
6588 common between the masks, those bits of the constants must be the same.
6589 If not, the condition is always false. Test for this to avoid generating
6590 incorrect code below. */
6591 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6592 if (! integer_zerop (result)
6593 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6594 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6596 if (wanted_code == NE_EXPR)
6598 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6599 return constant_boolean_node (true, truth_type);
6601 else
6603 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6604 return constant_boolean_node (false, truth_type);
6608 if (lnbitpos < 0)
6609 return 0;
6611 /* Construct the expression we will return. First get the component
6612 reference we will make. Unless the mask is all ones the width of
6613 that field, perform the mask operation. Then compare with the
6614 merged constant. */
6615 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6616 lntype, lnbitsize, lnbitpos,
6617 ll_unsignedp || rl_unsignedp, ll_reversep);
6619 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6620 if (! all_ones_mask_p (ll_mask, lnbitsize))
6621 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6623 return build2_loc (loc, wanted_code, truth_type, result,
6624 const_binop (BIT_IOR_EXPR, l_const, r_const));
6627 /* T is an integer expression that is being multiplied, divided, or taken a
6628 modulus (CODE says which and what kind of divide or modulus) by a
6629 constant C. See if we can eliminate that operation by folding it with
6630 other operations already in T. WIDE_TYPE, if non-null, is a type that
6631 should be used for the computation if wider than our type.
6633 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6634 (X * 2) + (Y * 4). We must, however, be assured that either the original
6635 expression would not overflow or that overflow is undefined for the type
6636 in the language in question.
6638 If we return a non-null expression, it is an equivalent form of the
6639 original computation, but need not be in the original type.
6641 We set *STRICT_OVERFLOW_P to true if the return values depends on
6642 signed overflow being undefined. Otherwise we do not change
6643 *STRICT_OVERFLOW_P. */
6645 static tree
6646 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6647 bool *strict_overflow_p)
6649 /* To avoid exponential search depth, refuse to allow recursion past
6650 three levels. Beyond that (1) it's highly unlikely that we'll find
6651 something interesting and (2) we've probably processed it before
6652 when we built the inner expression. */
6654 static int depth;
6655 tree ret;
6657 if (depth > 3)
6658 return NULL;
6660 depth++;
6661 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6662 depth--;
6664 return ret;
6667 static tree
6668 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6669 bool *strict_overflow_p)
6671 tree type = TREE_TYPE (t);
6672 enum tree_code tcode = TREE_CODE (t);
6673 tree ctype = (wide_type != 0
6674 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6675 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6676 ? wide_type : type);
6677 tree t1, t2;
6678 int same_p = tcode == code;
6679 tree op0 = NULL_TREE, op1 = NULL_TREE;
6680 bool sub_strict_overflow_p;
6682 /* Don't deal with constants of zero here; they confuse the code below. */
6683 if (integer_zerop (c))
6684 return NULL_TREE;
6686 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6687 op0 = TREE_OPERAND (t, 0);
6689 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6690 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6692 /* Note that we need not handle conditional operations here since fold
6693 already handles those cases. So just do arithmetic here. */
6694 switch (tcode)
6696 case INTEGER_CST:
6697 /* For a constant, we can always simplify if we are a multiply
6698 or (for divide and modulus) if it is a multiple of our constant. */
6699 if (code == MULT_EXPR
6700 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6701 TYPE_SIGN (type)))
6703 tree tem = const_binop (code, fold_convert (ctype, t),
6704 fold_convert (ctype, c));
6705 /* If the multiplication overflowed, we lost information on it.
6706 See PR68142 and PR69845. */
6707 if (TREE_OVERFLOW (tem))
6708 return NULL_TREE;
6709 return tem;
6711 break;
6713 CASE_CONVERT: case NON_LVALUE_EXPR:
6714 /* If op0 is an expression ... */
6715 if ((COMPARISON_CLASS_P (op0)
6716 || UNARY_CLASS_P (op0)
6717 || BINARY_CLASS_P (op0)
6718 || VL_EXP_CLASS_P (op0)
6719 || EXPRESSION_CLASS_P (op0))
6720 /* ... and has wrapping overflow, and its type is smaller
6721 than ctype, then we cannot pass through as widening. */
6722 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6723 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6724 && (TYPE_PRECISION (ctype)
6725 > TYPE_PRECISION (TREE_TYPE (op0))))
6726 /* ... or this is a truncation (t is narrower than op0),
6727 then we cannot pass through this narrowing. */
6728 || (TYPE_PRECISION (type)
6729 < TYPE_PRECISION (TREE_TYPE (op0)))
6730 /* ... or signedness changes for division or modulus,
6731 then we cannot pass through this conversion. */
6732 || (code != MULT_EXPR
6733 && (TYPE_UNSIGNED (ctype)
6734 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6735 /* ... or has undefined overflow while the converted to
6736 type has not, we cannot do the operation in the inner type
6737 as that would introduce undefined overflow. */
6738 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6739 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6740 && !TYPE_OVERFLOW_UNDEFINED (type))))
6741 break;
6743 /* Pass the constant down and see if we can make a simplification. If
6744 we can, replace this expression with the inner simplification for
6745 possible later conversion to our or some other type. */
6746 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6747 && TREE_CODE (t2) == INTEGER_CST
6748 && !TREE_OVERFLOW (t2)
6749 && (t1 = extract_muldiv (op0, t2, code,
6750 code == MULT_EXPR ? ctype : NULL_TREE,
6751 strict_overflow_p)) != 0)
6752 return t1;
6753 break;
6755 case ABS_EXPR:
6756 /* If widening the type changes it from signed to unsigned, then we
6757 must avoid building ABS_EXPR itself as unsigned. */
6758 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6760 tree cstype = (*signed_type_for) (ctype);
6761 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6762 != 0)
6764 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6765 return fold_convert (ctype, t1);
6767 break;
6769 /* If the constant is negative, we cannot simplify this. */
6770 if (tree_int_cst_sgn (c) == -1)
6771 break;
6772 /* FALLTHROUGH */
6773 case NEGATE_EXPR:
6774 /* For division and modulus, type can't be unsigned, as e.g.
6775 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6776 For signed types, even with wrapping overflow, this is fine. */
6777 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6778 break;
6779 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6780 != 0)
6781 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6782 break;
6784 case MIN_EXPR: case MAX_EXPR:
6785 /* If widening the type changes the signedness, then we can't perform
6786 this optimization as that changes the result. */
6787 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6788 break;
6790 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6791 sub_strict_overflow_p = false;
6792 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6793 &sub_strict_overflow_p)) != 0
6794 && (t2 = extract_muldiv (op1, c, code, wide_type,
6795 &sub_strict_overflow_p)) != 0)
6797 if (tree_int_cst_sgn (c) < 0)
6798 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6799 if (sub_strict_overflow_p)
6800 *strict_overflow_p = true;
6801 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6802 fold_convert (ctype, t2));
6804 break;
6806 case LSHIFT_EXPR: case RSHIFT_EXPR:
6807 /* If the second operand is constant, this is a multiplication
6808 or floor division, by a power of two, so we can treat it that
6809 way unless the multiplier or divisor overflows. Signed
6810 left-shift overflow is implementation-defined rather than
6811 undefined in C90, so do not convert signed left shift into
6812 multiplication. */
6813 if (TREE_CODE (op1) == INTEGER_CST
6814 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6815 /* const_binop may not detect overflow correctly,
6816 so check for it explicitly here. */
6817 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6818 wi::to_wide (op1))
6819 && (t1 = fold_convert (ctype,
6820 const_binop (LSHIFT_EXPR, size_one_node,
6821 op1))) != 0
6822 && !TREE_OVERFLOW (t1))
6823 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6824 ? MULT_EXPR : FLOOR_DIV_EXPR,
6825 ctype,
6826 fold_convert (ctype, op0),
6827 t1),
6828 c, code, wide_type, strict_overflow_p);
6829 break;
6831 case PLUS_EXPR: case MINUS_EXPR:
6832 /* See if we can eliminate the operation on both sides. If we can, we
6833 can return a new PLUS or MINUS. If we can't, the only remaining
6834 cases where we can do anything are if the second operand is a
6835 constant. */
6836 sub_strict_overflow_p = false;
6837 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6838 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6839 if (t1 != 0 && t2 != 0
6840 && TYPE_OVERFLOW_WRAPS (ctype)
6841 && (code == MULT_EXPR
6842 /* If not multiplication, we can only do this if both operands
6843 are divisible by c. */
6844 || (multiple_of_p (ctype, op0, c)
6845 && multiple_of_p (ctype, op1, c))))
6847 if (sub_strict_overflow_p)
6848 *strict_overflow_p = true;
6849 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6850 fold_convert (ctype, t2));
6853 /* If this was a subtraction, negate OP1 and set it to be an addition.
6854 This simplifies the logic below. */
6855 if (tcode == MINUS_EXPR)
6857 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6858 /* If OP1 was not easily negatable, the constant may be OP0. */
6859 if (TREE_CODE (op0) == INTEGER_CST)
6861 std::swap (op0, op1);
6862 std::swap (t1, t2);
6866 if (TREE_CODE (op1) != INTEGER_CST)
6867 break;
6869 /* If either OP1 or C are negative, this optimization is not safe for
6870 some of the division and remainder types while for others we need
6871 to change the code. */
6872 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6874 if (code == CEIL_DIV_EXPR)
6875 code = FLOOR_DIV_EXPR;
6876 else if (code == FLOOR_DIV_EXPR)
6877 code = CEIL_DIV_EXPR;
6878 else if (code != MULT_EXPR
6879 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6880 break;
6883 /* If it's a multiply or a division/modulus operation of a multiple
6884 of our constant, do the operation and verify it doesn't overflow. */
6885 if (code == MULT_EXPR
6886 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6887 TYPE_SIGN (type)))
6889 op1 = const_binop (code, fold_convert (ctype, op1),
6890 fold_convert (ctype, c));
6891 /* We allow the constant to overflow with wrapping semantics. */
6892 if (op1 == 0
6893 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6894 break;
6896 else
6897 break;
6899 /* If we have an unsigned type, we cannot widen the operation since it
6900 will change the result if the original computation overflowed. */
6901 if (TYPE_UNSIGNED (ctype) && ctype != type)
6902 break;
6904 /* The last case is if we are a multiply. In that case, we can
6905 apply the distributive law to commute the multiply and addition
6906 if the multiplication of the constants doesn't overflow
6907 and overflow is defined. With undefined overflow
6908 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6909 But fold_plusminus_mult_expr would factor back any power-of-two
6910 value so do not distribute in the first place in this case. */
6911 if (code == MULT_EXPR
6912 && TYPE_OVERFLOW_WRAPS (ctype)
6913 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6914 return fold_build2 (tcode, ctype,
6915 fold_build2 (code, ctype,
6916 fold_convert (ctype, op0),
6917 fold_convert (ctype, c)),
6918 op1);
6920 break;
6922 case MULT_EXPR:
6923 /* We have a special case here if we are doing something like
6924 (C * 8) % 4 since we know that's zero. */
6925 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6926 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6927 /* If the multiplication can overflow we cannot optimize this. */
6928 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6929 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6930 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6931 TYPE_SIGN (type)))
6933 *strict_overflow_p = true;
6934 return omit_one_operand (type, integer_zero_node, op0);
6937 /* ... fall through ... */
6939 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6940 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6941 /* If we can extract our operation from the LHS, do so and return a
6942 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6943 do something only if the second operand is a constant. */
6944 if (same_p
6945 && TYPE_OVERFLOW_WRAPS (ctype)
6946 && (t1 = extract_muldiv (op0, c, code, wide_type,
6947 strict_overflow_p)) != 0)
6948 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6949 fold_convert (ctype, op1));
6950 else if (tcode == MULT_EXPR && code == MULT_EXPR
6951 && TYPE_OVERFLOW_WRAPS (ctype)
6952 && (t1 = extract_muldiv (op1, c, code, wide_type,
6953 strict_overflow_p)) != 0)
6954 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6955 fold_convert (ctype, t1));
6956 else if (TREE_CODE (op1) != INTEGER_CST)
6957 return 0;
6959 /* If these are the same operation types, we can associate them
6960 assuming no overflow. */
6961 if (tcode == code)
6963 bool overflow_p = false;
6964 wi::overflow_type overflow_mul;
6965 signop sign = TYPE_SIGN (ctype);
6966 unsigned prec = TYPE_PRECISION (ctype);
6967 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6968 wi::to_wide (c, prec),
6969 sign, &overflow_mul);
6970 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6971 if (overflow_mul
6972 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6973 overflow_p = true;
6974 if (!overflow_p)
6975 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6976 wide_int_to_tree (ctype, mul));
6979 /* If these operations "cancel" each other, we have the main
6980 optimizations of this pass, which occur when either constant is a
6981 multiple of the other, in which case we replace this with either an
6982 operation or CODE or TCODE.
6984 If we have an unsigned type, we cannot do this since it will change
6985 the result if the original computation overflowed. */
6986 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6987 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6988 || (tcode == MULT_EXPR
6989 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6990 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6991 && code != MULT_EXPR)))
6993 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6994 TYPE_SIGN (type)))
6996 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6997 *strict_overflow_p = true;
6998 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6999 fold_convert (ctype,
7000 const_binop (TRUNC_DIV_EXPR,
7001 op1, c)));
7003 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7004 TYPE_SIGN (type)))
7006 if (TYPE_OVERFLOW_UNDEFINED (ctype))
7007 *strict_overflow_p = true;
7008 return fold_build2 (code, ctype, fold_convert (ctype, op0),
7009 fold_convert (ctype,
7010 const_binop (TRUNC_DIV_EXPR,
7011 c, op1)));
7014 break;
7016 default:
7017 break;
7020 return 0;
7023 /* Return a node which has the indicated constant VALUE (either 0 or
7024 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7025 and is of the indicated TYPE. */
7027 tree
7028 constant_boolean_node (bool value, tree type)
7030 if (type == integer_type_node)
7031 return value ? integer_one_node : integer_zero_node;
7032 else if (type == boolean_type_node)
7033 return value ? boolean_true_node : boolean_false_node;
7034 else if (TREE_CODE (type) == VECTOR_TYPE)
7035 return build_vector_from_val (type,
7036 build_int_cst (TREE_TYPE (type),
7037 value ? -1 : 0));
7038 else
7039 return fold_convert (type, value ? integer_one_node : integer_zero_node);
7043 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7044 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
7045 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7046 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
7047 COND is the first argument to CODE; otherwise (as in the example
7048 given here), it is the second argument. TYPE is the type of the
7049 original expression. Return NULL_TREE if no simplification is
7050 possible. */
7052 static tree
7053 fold_binary_op_with_conditional_arg (location_t loc,
7054 enum tree_code code,
7055 tree type, tree op0, tree op1,
7056 tree cond, tree arg, int cond_first_p)
7058 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7059 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7060 tree test, true_value, false_value;
7061 tree lhs = NULL_TREE;
7062 tree rhs = NULL_TREE;
7063 enum tree_code cond_code = COND_EXPR;
7065 /* Do not move possibly trapping operations into the conditional as this
7066 pessimizes code and causes gimplification issues when applied late. */
7067 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7068 ANY_INTEGRAL_TYPE_P (type)
7069 && TYPE_OVERFLOW_TRAPS (type), op1))
7070 return NULL_TREE;
7072 if (TREE_CODE (cond) == COND_EXPR
7073 || TREE_CODE (cond) == VEC_COND_EXPR)
7075 test = TREE_OPERAND (cond, 0);
7076 true_value = TREE_OPERAND (cond, 1);
7077 false_value = TREE_OPERAND (cond, 2);
7078 /* If this operand throws an expression, then it does not make
7079 sense to try to perform a logical or arithmetic operation
7080 involving it. */
7081 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7082 lhs = true_value;
7083 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7084 rhs = false_value;
7086 else if (!(TREE_CODE (type) != VECTOR_TYPE
7087 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7089 tree testtype = TREE_TYPE (cond);
7090 test = cond;
7091 true_value = constant_boolean_node (true, testtype);
7092 false_value = constant_boolean_node (false, testtype);
7094 else
7095 /* Detect the case of mixing vector and scalar types - bail out. */
7096 return NULL_TREE;
7098 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7099 cond_code = VEC_COND_EXPR;
7101 /* This transformation is only worthwhile if we don't have to wrap ARG
7102 in a SAVE_EXPR and the operation can be simplified without recursing
7103 on at least one of the branches once its pushed inside the COND_EXPR. */
7104 if (!TREE_CONSTANT (arg)
7105 && (TREE_SIDE_EFFECTS (arg)
7106 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7107 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7108 return NULL_TREE;
7110 arg = fold_convert_loc (loc, arg_type, arg);
7111 if (lhs == 0)
7113 true_value = fold_convert_loc (loc, cond_type, true_value);
7114 if (cond_first_p)
7115 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7116 else
7117 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7119 if (rhs == 0)
7121 false_value = fold_convert_loc (loc, cond_type, false_value);
7122 if (cond_first_p)
7123 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7124 else
7125 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7128 /* Check that we have simplified at least one of the branches. */
7129 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7130 return NULL_TREE;
7132 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7136 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7138 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7139 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7140 ADDEND is the same as X.
7142 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7143 and finite. The problematic cases are when X is zero, and its mode
7144 has signed zeros. In the case of rounding towards -infinity,
7145 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7146 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7148 bool
7149 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7151 if (!real_zerop (addend))
7152 return false;
7154 /* Don't allow the fold with -fsignaling-nans. */
7155 if (HONOR_SNANS (type))
7156 return false;
7158 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7159 if (!HONOR_SIGNED_ZEROS (type))
7160 return true;
7162 /* There is no case that is safe for all rounding modes. */
7163 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7164 return false;
7166 /* In a vector or complex, we would need to check the sign of all zeros. */
7167 if (TREE_CODE (addend) == VECTOR_CST)
7168 addend = uniform_vector_p (addend);
7169 if (!addend || TREE_CODE (addend) != REAL_CST)
7170 return false;
7172 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7173 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7174 negate = !negate;
7176 /* The mode has signed zeros, and we have to honor their sign.
7177 In this situation, there is only one case we can return true for.
7178 X - 0 is the same as X with default rounding. */
7179 return negate;
7182 /* Subroutine of match.pd that optimizes comparisons of a division by
7183 a nonzero integer constant against an integer constant, i.e.
7184 X/C1 op C2.
7186 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7187 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7189 enum tree_code
7190 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7191 tree *hi, bool *neg_overflow)
7193 tree prod, tmp, type = TREE_TYPE (c1);
7194 signop sign = TYPE_SIGN (type);
7195 wi::overflow_type overflow;
7197 /* We have to do this the hard way to detect unsigned overflow.
7198 prod = int_const_binop (MULT_EXPR, c1, c2); */
7199 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7200 prod = force_fit_type (type, val, -1, overflow);
7201 *neg_overflow = false;
7203 if (sign == UNSIGNED)
7205 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7206 *lo = prod;
7208 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7209 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7210 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7212 else if (tree_int_cst_sgn (c1) >= 0)
7214 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7215 switch (tree_int_cst_sgn (c2))
7217 case -1:
7218 *neg_overflow = true;
7219 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7220 *hi = prod;
7221 break;
7223 case 0:
7224 *lo = fold_negate_const (tmp, type);
7225 *hi = tmp;
7226 break;
7228 case 1:
7229 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7230 *lo = prod;
7231 break;
7233 default:
7234 gcc_unreachable ();
7237 else
7239 /* A negative divisor reverses the relational operators. */
7240 code = swap_tree_comparison (code);
7242 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7243 switch (tree_int_cst_sgn (c2))
7245 case -1:
7246 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7247 *lo = prod;
7248 break;
7250 case 0:
7251 *hi = fold_negate_const (tmp, type);
7252 *lo = tmp;
7253 break;
7255 case 1:
7256 *neg_overflow = true;
7257 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7258 *hi = prod;
7259 break;
7261 default:
7262 gcc_unreachable ();
7266 if (code != EQ_EXPR && code != NE_EXPR)
7267 return code;
7269 if (TREE_OVERFLOW (*lo)
7270 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7271 *lo = NULL_TREE;
7272 if (TREE_OVERFLOW (*hi)
7273 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7274 *hi = NULL_TREE;
7276 return code;
7280 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7281 equality/inequality test, then return a simplified form of the test
7282 using a sign testing. Otherwise return NULL. TYPE is the desired
7283 result type. */
7285 static tree
7286 fold_single_bit_test_into_sign_test (location_t loc,
7287 enum tree_code code, tree arg0, tree arg1,
7288 tree result_type)
7290 /* If this is testing a single bit, we can optimize the test. */
7291 if ((code == NE_EXPR || code == EQ_EXPR)
7292 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7293 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7295 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7296 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7297 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7299 if (arg00 != NULL_TREE
7300 /* This is only a win if casting to a signed type is cheap,
7301 i.e. when arg00's type is not a partial mode. */
7302 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7304 tree stype = signed_type_for (TREE_TYPE (arg00));
7305 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7306 result_type,
7307 fold_convert_loc (loc, stype, arg00),
7308 build_int_cst (stype, 0));
7312 return NULL_TREE;
7315 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7316 equality/inequality test, then return a simplified form of
7317 the test using shifts and logical operations. Otherwise return
7318 NULL. TYPE is the desired result type. */
7320 tree
7321 fold_single_bit_test (location_t loc, enum tree_code code,
7322 tree arg0, tree arg1, tree result_type)
7324 /* If this is testing a single bit, we can optimize the test. */
7325 if ((code == NE_EXPR || code == EQ_EXPR)
7326 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7327 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7329 tree inner = TREE_OPERAND (arg0, 0);
7330 tree type = TREE_TYPE (arg0);
7331 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7332 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7333 int ops_unsigned;
7334 tree signed_type, unsigned_type, intermediate_type;
7335 tree tem, one;
7337 /* First, see if we can fold the single bit test into a sign-bit
7338 test. */
7339 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7340 result_type);
7341 if (tem)
7342 return tem;
7344 /* Otherwise we have (A & C) != 0 where C is a single bit,
7345 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7346 Similarly for (A & C) == 0. */
7348 /* If INNER is a right shift of a constant and it plus BITNUM does
7349 not overflow, adjust BITNUM and INNER. */
7350 if (TREE_CODE (inner) == RSHIFT_EXPR
7351 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7352 && bitnum < TYPE_PRECISION (type)
7353 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7354 TYPE_PRECISION (type) - bitnum))
7356 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7357 inner = TREE_OPERAND (inner, 0);
7360 /* If we are going to be able to omit the AND below, we must do our
7361 operations as unsigned. If we must use the AND, we have a choice.
7362 Normally unsigned is faster, but for some machines signed is. */
7363 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7364 && !flag_syntax_only) ? 0 : 1;
7366 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7367 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7368 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7369 inner = fold_convert_loc (loc, intermediate_type, inner);
7371 if (bitnum != 0)
7372 inner = build2 (RSHIFT_EXPR, intermediate_type,
7373 inner, size_int (bitnum));
7375 one = build_int_cst (intermediate_type, 1);
7377 if (code == EQ_EXPR)
7378 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7380 /* Put the AND last so it can combine with more things. */
7381 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7383 /* Make sure to return the proper type. */
7384 inner = fold_convert_loc (loc, result_type, inner);
7386 return inner;
7388 return NULL_TREE;
7391 /* Test whether it is preferable two swap two operands, ARG0 and
7392 ARG1, for example because ARG0 is an integer constant and ARG1
7393 isn't. */
7395 bool
7396 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7398 if (CONSTANT_CLASS_P (arg1))
7399 return 0;
7400 if (CONSTANT_CLASS_P (arg0))
7401 return 1;
7403 STRIP_NOPS (arg0);
7404 STRIP_NOPS (arg1);
7406 if (TREE_CONSTANT (arg1))
7407 return 0;
7408 if (TREE_CONSTANT (arg0))
7409 return 1;
7411 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7412 for commutative and comparison operators. Ensuring a canonical
7413 form allows the optimizers to find additional redundancies without
7414 having to explicitly check for both orderings. */
7415 if (TREE_CODE (arg0) == SSA_NAME
7416 && TREE_CODE (arg1) == SSA_NAME
7417 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7418 return 1;
7420 /* Put SSA_NAMEs last. */
7421 if (TREE_CODE (arg1) == SSA_NAME)
7422 return 0;
7423 if (TREE_CODE (arg0) == SSA_NAME)
7424 return 1;
7426 /* Put variables last. */
7427 if (DECL_P (arg1))
7428 return 0;
7429 if (DECL_P (arg0))
7430 return 1;
7432 return 0;
7436 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7437 means A >= Y && A != MAX, but in this case we know that
7438 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7440 static tree
7441 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7443 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7445 if (TREE_CODE (bound) == LT_EXPR)
7446 a = TREE_OPERAND (bound, 0);
7447 else if (TREE_CODE (bound) == GT_EXPR)
7448 a = TREE_OPERAND (bound, 1);
7449 else
7450 return NULL_TREE;
7452 typea = TREE_TYPE (a);
7453 if (!INTEGRAL_TYPE_P (typea)
7454 && !POINTER_TYPE_P (typea))
7455 return NULL_TREE;
7457 if (TREE_CODE (ineq) == LT_EXPR)
7459 a1 = TREE_OPERAND (ineq, 1);
7460 y = TREE_OPERAND (ineq, 0);
7462 else if (TREE_CODE (ineq) == GT_EXPR)
7464 a1 = TREE_OPERAND (ineq, 0);
7465 y = TREE_OPERAND (ineq, 1);
7467 else
7468 return NULL_TREE;
7470 if (TREE_TYPE (a1) != typea)
7471 return NULL_TREE;
7473 if (POINTER_TYPE_P (typea))
7475 /* Convert the pointer types into integer before taking the difference. */
7476 tree ta = fold_convert_loc (loc, ssizetype, a);
7477 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7478 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7480 else
7481 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7483 if (!diff || !integer_onep (diff))
7484 return NULL_TREE;
7486 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7489 /* Fold a sum or difference of at least one multiplication.
7490 Returns the folded tree or NULL if no simplification could be made. */
7492 static tree
7493 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7494 tree arg0, tree arg1)
7496 tree arg00, arg01, arg10, arg11;
7497 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7499 /* (A * C) +- (B * C) -> (A+-B) * C.
7500 (A * C) +- A -> A * (C+-1).
7501 We are most concerned about the case where C is a constant,
7502 but other combinations show up during loop reduction. Since
7503 it is not difficult, try all four possibilities. */
7505 if (TREE_CODE (arg0) == MULT_EXPR)
7507 arg00 = TREE_OPERAND (arg0, 0);
7508 arg01 = TREE_OPERAND (arg0, 1);
7510 else if (TREE_CODE (arg0) == INTEGER_CST)
7512 arg00 = build_one_cst (type);
7513 arg01 = arg0;
7515 else
7517 /* We cannot generate constant 1 for fract. */
7518 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7519 return NULL_TREE;
7520 arg00 = arg0;
7521 arg01 = build_one_cst (type);
7523 if (TREE_CODE (arg1) == MULT_EXPR)
7525 arg10 = TREE_OPERAND (arg1, 0);
7526 arg11 = TREE_OPERAND (arg1, 1);
7528 else if (TREE_CODE (arg1) == INTEGER_CST)
7530 arg10 = build_one_cst (type);
7531 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7532 the purpose of this canonicalization. */
7533 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7534 && negate_expr_p (arg1)
7535 && code == PLUS_EXPR)
7537 arg11 = negate_expr (arg1);
7538 code = MINUS_EXPR;
7540 else
7541 arg11 = arg1;
7543 else
7545 /* We cannot generate constant 1 for fract. */
7546 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7547 return NULL_TREE;
7548 arg10 = arg1;
7549 arg11 = build_one_cst (type);
7551 same = NULL_TREE;
7553 /* Prefer factoring a common non-constant. */
7554 if (operand_equal_p (arg00, arg10, 0))
7555 same = arg00, alt0 = arg01, alt1 = arg11;
7556 else if (operand_equal_p (arg01, arg11, 0))
7557 same = arg01, alt0 = arg00, alt1 = arg10;
7558 else if (operand_equal_p (arg00, arg11, 0))
7559 same = arg00, alt0 = arg01, alt1 = arg10;
7560 else if (operand_equal_p (arg01, arg10, 0))
7561 same = arg01, alt0 = arg00, alt1 = arg11;
7563 /* No identical multiplicands; see if we can find a common
7564 power-of-two factor in non-power-of-two multiplies. This
7565 can help in multi-dimensional array access. */
7566 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7568 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7569 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7570 HOST_WIDE_INT tmp;
7571 bool swap = false;
7572 tree maybe_same;
7574 /* Move min of absolute values to int11. */
7575 if (absu_hwi (int01) < absu_hwi (int11))
7577 tmp = int01, int01 = int11, int11 = tmp;
7578 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7579 maybe_same = arg01;
7580 swap = true;
7582 else
7583 maybe_same = arg11;
7585 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7586 if (factor > 1
7587 && pow2p_hwi (factor)
7588 && (int01 & (factor - 1)) == 0
7589 /* The remainder should not be a constant, otherwise we
7590 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7591 increased the number of multiplications necessary. */
7592 && TREE_CODE (arg10) != INTEGER_CST)
7594 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7595 build_int_cst (TREE_TYPE (arg00),
7596 int01 / int11));
7597 alt1 = arg10;
7598 same = maybe_same;
7599 if (swap)
7600 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7604 if (!same)
7605 return NULL_TREE;
7607 if (! ANY_INTEGRAL_TYPE_P (type)
7608 || TYPE_OVERFLOW_WRAPS (type)
7609 /* We are neither factoring zero nor minus one. */
7610 || TREE_CODE (same) == INTEGER_CST)
7611 return fold_build2_loc (loc, MULT_EXPR, type,
7612 fold_build2_loc (loc, code, type,
7613 fold_convert_loc (loc, type, alt0),
7614 fold_convert_loc (loc, type, alt1)),
7615 fold_convert_loc (loc, type, same));
7617 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7618 same may be minus one and thus the multiplication may overflow. Perform
7619 the sum operation in an unsigned type. */
7620 tree utype = unsigned_type_for (type);
7621 tree tem = fold_build2_loc (loc, code, utype,
7622 fold_convert_loc (loc, utype, alt0),
7623 fold_convert_loc (loc, utype, alt1));
7624 /* If the sum evaluated to a constant that is not -INF the multiplication
7625 cannot overflow. */
7626 if (TREE_CODE (tem) == INTEGER_CST
7627 && (wi::to_wide (tem)
7628 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7629 return fold_build2_loc (loc, MULT_EXPR, type,
7630 fold_convert (type, tem), same);
7632 /* Do not resort to unsigned multiplication because
7633 we lose the no-overflow property of the expression. */
7634 return NULL_TREE;
7637 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7638 specified by EXPR into the buffer PTR of length LEN bytes.
7639 Return the number of bytes placed in the buffer, or zero
7640 upon failure. */
7642 static int
7643 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7645 tree type = TREE_TYPE (expr);
7646 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7647 int byte, offset, word, words;
7648 unsigned char value;
7650 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7651 return 0;
7652 if (off == -1)
7653 off = 0;
7655 if (ptr == NULL)
7656 /* Dry run. */
7657 return MIN (len, total_bytes - off);
7659 words = total_bytes / UNITS_PER_WORD;
7661 for (byte = 0; byte < total_bytes; byte++)
7663 int bitpos = byte * BITS_PER_UNIT;
7664 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7665 number of bytes. */
7666 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7668 if (total_bytes > UNITS_PER_WORD)
7670 word = byte / UNITS_PER_WORD;
7671 if (WORDS_BIG_ENDIAN)
7672 word = (words - 1) - word;
7673 offset = word * UNITS_PER_WORD;
7674 if (BYTES_BIG_ENDIAN)
7675 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7676 else
7677 offset += byte % UNITS_PER_WORD;
7679 else
7680 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7681 if (offset >= off && offset - off < len)
7682 ptr[offset - off] = value;
7684 return MIN (len, total_bytes - off);
7688 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7689 specified by EXPR into the buffer PTR of length LEN bytes.
7690 Return the number of bytes placed in the buffer, or zero
7691 upon failure. */
7693 static int
7694 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7696 tree type = TREE_TYPE (expr);
7697 scalar_mode mode = SCALAR_TYPE_MODE (type);
7698 int total_bytes = GET_MODE_SIZE (mode);
7699 FIXED_VALUE_TYPE value;
7700 tree i_value, i_type;
7702 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7703 return 0;
7705 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7707 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7708 return 0;
7710 value = TREE_FIXED_CST (expr);
7711 i_value = double_int_to_tree (i_type, value.data);
7713 return native_encode_int (i_value, ptr, len, off);
7717 /* Subroutine of native_encode_expr. Encode the REAL_CST
7718 specified by EXPR into the buffer PTR of length LEN bytes.
7719 Return the number of bytes placed in the buffer, or zero
7720 upon failure. */
7722 static int
7723 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7725 tree type = TREE_TYPE (expr);
7726 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7727 int byte, offset, word, words, bitpos;
7728 unsigned char value;
7730 /* There are always 32 bits in each long, no matter the size of
7731 the hosts long. We handle floating point representations with
7732 up to 192 bits. */
7733 long tmp[6];
7735 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7736 return 0;
7737 if (off == -1)
7738 off = 0;
7740 if (ptr == NULL)
7741 /* Dry run. */
7742 return MIN (len, total_bytes - off);
7744 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7746 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7748 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7749 bitpos += BITS_PER_UNIT)
7751 byte = (bitpos / BITS_PER_UNIT) & 3;
7752 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7754 if (UNITS_PER_WORD < 4)
7756 word = byte / UNITS_PER_WORD;
7757 if (WORDS_BIG_ENDIAN)
7758 word = (words - 1) - word;
7759 offset = word * UNITS_PER_WORD;
7760 if (BYTES_BIG_ENDIAN)
7761 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7762 else
7763 offset += byte % UNITS_PER_WORD;
7765 else
7767 offset = byte;
7768 if (BYTES_BIG_ENDIAN)
7770 /* Reverse bytes within each long, or within the entire float
7771 if it's smaller than a long (for HFmode). */
7772 offset = MIN (3, total_bytes - 1) - offset;
7773 gcc_assert (offset >= 0);
7776 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7777 if (offset >= off
7778 && offset - off < len)
7779 ptr[offset - off] = value;
7781 return MIN (len, total_bytes - off);
7784 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7785 specified by EXPR into the buffer PTR of length LEN bytes.
7786 Return the number of bytes placed in the buffer, or zero
7787 upon failure. */
7789 static int
7790 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7792 int rsize, isize;
7793 tree part;
7795 part = TREE_REALPART (expr);
7796 rsize = native_encode_expr (part, ptr, len, off);
7797 if (off == -1 && rsize == 0)
7798 return 0;
7799 part = TREE_IMAGPART (expr);
7800 if (off != -1)
7801 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7802 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7803 len - rsize, off);
7804 if (off == -1 && isize != rsize)
7805 return 0;
7806 return rsize + isize;
7809 /* Like native_encode_vector, but only encode the first COUNT elements.
7810 The other arguments are as for native_encode_vector. */
7812 static int
7813 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7814 int off, unsigned HOST_WIDE_INT count)
7816 tree itype = TREE_TYPE (TREE_TYPE (expr));
7817 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7818 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7820 /* This is the only case in which elements can be smaller than a byte.
7821 Element 0 is always in the lsb of the containing byte. */
7822 unsigned int elt_bits = TYPE_PRECISION (itype);
7823 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7824 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7825 return 0;
7827 if (off == -1)
7828 off = 0;
7830 /* Zero the buffer and then set bits later where necessary. */
7831 int extract_bytes = MIN (len, total_bytes - off);
7832 if (ptr)
7833 memset (ptr, 0, extract_bytes);
7835 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7836 unsigned int first_elt = off * elts_per_byte;
7837 unsigned int extract_elts = extract_bytes * elts_per_byte;
7838 for (unsigned int i = 0; i < extract_elts; ++i)
7840 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7841 if (TREE_CODE (elt) != INTEGER_CST)
7842 return 0;
7844 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7846 unsigned int bit = i * elt_bits;
7847 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7850 return extract_bytes;
7853 int offset = 0;
7854 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7855 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7857 if (off >= size)
7859 off -= size;
7860 continue;
7862 tree elem = VECTOR_CST_ELT (expr, i);
7863 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7864 len - offset, off);
7865 if ((off == -1 && res != size) || res == 0)
7866 return 0;
7867 offset += res;
7868 if (offset >= len)
7869 return (off == -1 && i < count - 1) ? 0 : offset;
7870 if (off != -1)
7871 off = 0;
7873 return offset;
7876 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7877 specified by EXPR into the buffer PTR of length LEN bytes.
7878 Return the number of bytes placed in the buffer, or zero
7879 upon failure. */
7881 static int
7882 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7884 unsigned HOST_WIDE_INT count;
7885 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7886 return 0;
7887 return native_encode_vector_part (expr, ptr, len, off, count);
7891 /* Subroutine of native_encode_expr. Encode the STRING_CST
7892 specified by EXPR into the buffer PTR of length LEN bytes.
7893 Return the number of bytes placed in the buffer, or zero
7894 upon failure. */
7896 static int
7897 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7899 tree type = TREE_TYPE (expr);
7901 /* Wide-char strings are encoded in target byte-order so native
7902 encoding them is trivial. */
7903 if (BITS_PER_UNIT != CHAR_BIT
7904 || TREE_CODE (type) != ARRAY_TYPE
7905 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7906 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7907 return 0;
7909 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7910 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7911 return 0;
7912 if (off == -1)
7913 off = 0;
7914 len = MIN (total_bytes - off, len);
7915 if (ptr == NULL)
7916 /* Dry run. */;
7917 else
7919 int written = 0;
7920 if (off < TREE_STRING_LENGTH (expr))
7922 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7923 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7925 memset (ptr + written, 0, len - written);
7927 return len;
7931 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, REAL_CST,
7932 FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7933 the buffer PTR of size LEN bytes. If PTR is NULL, don't actually store
7934 anything, just do a dry run. Fail either if OFF is -1 and LEN isn't
7935 sufficient to encode the entire EXPR, or if OFF is out of bounds.
7936 Otherwise, start at byte offset OFF and encode at most LEN bytes.
7937 Return the number of bytes placed in the buffer, or zero upon failure. */
7940 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7942 /* We don't support starting at negative offset and -1 is special. */
7943 if (off < -1)
7944 return 0;
7946 switch (TREE_CODE (expr))
7948 case INTEGER_CST:
7949 return native_encode_int (expr, ptr, len, off);
7951 case REAL_CST:
7952 return native_encode_real (expr, ptr, len, off);
7954 case FIXED_CST:
7955 return native_encode_fixed (expr, ptr, len, off);
7957 case COMPLEX_CST:
7958 return native_encode_complex (expr, ptr, len, off);
7960 case VECTOR_CST:
7961 return native_encode_vector (expr, ptr, len, off);
7963 case STRING_CST:
7964 return native_encode_string (expr, ptr, len, off);
7966 default:
7967 return 0;
7971 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
7972 or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
7973 of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of
7974 machine modes, we can't just use build_nonstandard_integer_type. */
7976 tree
7977 find_bitfield_repr_type (int fieldsize, int len)
7979 machine_mode mode;
7980 for (int pass = 0; pass < 2; pass++)
7982 enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
7983 FOR_EACH_MODE_IN_CLASS (mode, mclass)
7984 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
7985 && known_eq (GET_MODE_PRECISION (mode),
7986 GET_MODE_BITSIZE (mode))
7987 && known_le (GET_MODE_SIZE (mode), len))
7989 tree ret = lang_hooks.types.type_for_mode (mode, 1);
7990 if (ret && TYPE_MODE (ret) == mode)
7991 return ret;
7995 for (int i = 0; i < NUM_INT_N_ENTS; i ++)
7996 if (int_n_enabled_p[i]
7997 && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
7998 && int_n_trees[i].unsigned_type)
8000 tree ret = int_n_trees[i].unsigned_type;
8001 mode = TYPE_MODE (ret);
8002 if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8003 && known_eq (GET_MODE_PRECISION (mode),
8004 GET_MODE_BITSIZE (mode))
8005 && known_le (GET_MODE_SIZE (mode), len))
8006 return ret;
8009 return NULL_TREE;
8012 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8013 NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has
8014 to be non-NULL and OFF zero), then in addition to filling the
8015 bytes pointed by PTR with the value also clear any bits pointed
8016 by MASK that are known to be initialized, keep them as is for
8017 e.g. uninitialized padding bits or uninitialized fields. */
8020 native_encode_initializer (tree init, unsigned char *ptr, int len,
8021 int off, unsigned char *mask)
8023 int r;
8025 /* We don't support starting at negative offset and -1 is special. */
8026 if (off < -1 || init == NULL_TREE)
8027 return 0;
8029 gcc_assert (mask == NULL || (off == 0 && ptr));
8031 STRIP_NOPS (init);
8032 switch (TREE_CODE (init))
8034 case VIEW_CONVERT_EXPR:
8035 case NON_LVALUE_EXPR:
8036 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8037 mask);
8038 default:
8039 r = native_encode_expr (init, ptr, len, off);
8040 if (mask)
8041 memset (mask, 0, r);
8042 return r;
8043 case CONSTRUCTOR:
8044 tree type = TREE_TYPE (init);
8045 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8046 if (total_bytes < 0)
8047 return 0;
8048 if ((off == -1 && total_bytes > len) || off >= total_bytes)
8049 return 0;
8050 int o = off == -1 ? 0 : off;
8051 if (TREE_CODE (type) == ARRAY_TYPE)
8053 HOST_WIDE_INT min_index;
8054 unsigned HOST_WIDE_INT cnt;
8055 HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8056 constructor_elt *ce;
8058 if (TYPE_DOMAIN (type) == NULL_TREE
8059 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
8060 return 0;
8062 fieldsize = int_size_in_bytes (TREE_TYPE (type));
8063 if (fieldsize <= 0)
8064 return 0;
8066 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
8067 if (ptr != NULL)
8068 memset (ptr, '\0', MIN (total_bytes - off, len));
8070 for (cnt = 0; ; cnt++)
8072 tree val = NULL_TREE, index = NULL_TREE;
8073 HOST_WIDE_INT pos = curpos, count = 0;
8074 bool full = false;
8075 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8077 val = ce->value;
8078 index = ce->index;
8080 else if (mask == NULL
8081 || CONSTRUCTOR_NO_CLEARING (init)
8082 || curpos >= total_bytes)
8083 break;
8084 else
8085 pos = total_bytes;
8086 if (index && TREE_CODE (index) == RANGE_EXPR)
8088 if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
8089 || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
8090 return 0;
8091 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
8092 * fieldsize;
8093 count = (tree_to_shwi (TREE_OPERAND (index, 1))
8094 - tree_to_shwi (TREE_OPERAND (index, 0)));
8096 else if (index)
8098 if (!tree_fits_shwi_p (index))
8099 return 0;
8100 pos = (tree_to_shwi (index) - min_index) * fieldsize;
8103 if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8105 if (valueinit == -1)
8107 tree zero = build_constructor (TREE_TYPE (type), NULL);
8108 r = native_encode_initializer (zero, ptr + curpos,
8109 fieldsize, 0,
8110 mask + curpos);
8111 ggc_free (zero);
8112 if (!r)
8113 return 0;
8114 valueinit = curpos;
8115 curpos += fieldsize;
8117 while (curpos != pos)
8119 memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8120 memcpy (mask + curpos, mask + valueinit, fieldsize);
8121 curpos += fieldsize;
8125 curpos = pos;
8126 if (val)
8129 if (off == -1
8130 || (curpos >= off
8131 && (curpos + fieldsize
8132 <= (HOST_WIDE_INT) off + len)))
8134 if (full)
8136 if (ptr)
8137 memcpy (ptr + (curpos - o), ptr + (pos - o),
8138 fieldsize);
8139 if (mask)
8140 memcpy (mask + curpos, mask + pos, fieldsize);
8142 else if (!native_encode_initializer (val,
8144 ? ptr + curpos - o
8145 : NULL,
8146 fieldsize,
8147 off == -1 ? -1
8148 : 0,
8149 mask
8150 ? mask + curpos
8151 : NULL))
8152 return 0;
8153 else
8155 full = true;
8156 pos = curpos;
8159 else if (curpos + fieldsize > off
8160 && curpos < (HOST_WIDE_INT) off + len)
8162 /* Partial overlap. */
8163 unsigned char *p = NULL;
8164 int no = 0;
8165 int l;
8166 gcc_assert (mask == NULL);
8167 if (curpos >= off)
8169 if (ptr)
8170 p = ptr + curpos - off;
8171 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8172 fieldsize);
8174 else
8176 p = ptr;
8177 no = off - curpos;
8178 l = len;
8180 if (!native_encode_initializer (val, p, l, no, NULL))
8181 return 0;
8183 curpos += fieldsize;
8185 while (count-- != 0);
8187 return MIN (total_bytes - off, len);
8189 else if (TREE_CODE (type) == RECORD_TYPE
8190 || TREE_CODE (type) == UNION_TYPE)
8192 unsigned HOST_WIDE_INT cnt;
8193 constructor_elt *ce;
8194 tree fld_base = TYPE_FIELDS (type);
8195 tree to_free = NULL_TREE;
8197 gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8198 if (ptr != NULL)
8199 memset (ptr, '\0', MIN (total_bytes - off, len));
8200 for (cnt = 0; ; cnt++)
8202 tree val = NULL_TREE, field = NULL_TREE;
8203 HOST_WIDE_INT pos = 0, fieldsize;
8204 unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8206 if (to_free)
8208 ggc_free (to_free);
8209 to_free = NULL_TREE;
8212 if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8214 val = ce->value;
8215 field = ce->index;
8216 if (field == NULL_TREE)
8217 return 0;
8219 pos = int_byte_position (field);
8220 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8221 continue;
8223 else if (mask == NULL
8224 || CONSTRUCTOR_NO_CLEARING (init))
8225 break;
8226 else
8227 pos = total_bytes;
8229 if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8231 tree fld;
8232 for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8234 if (TREE_CODE (fld) != FIELD_DECL)
8235 continue;
8236 if (fld == field)
8237 break;
8238 if (DECL_PADDING_P (fld))
8239 continue;
8240 if (DECL_SIZE_UNIT (fld) == NULL_TREE
8241 || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8242 return 0;
8243 if (integer_zerop (DECL_SIZE_UNIT (fld)))
8244 continue;
8245 break;
8247 if (fld == NULL_TREE)
8249 if (ce == NULL)
8250 break;
8251 return 0;
8253 fld_base = DECL_CHAIN (fld);
8254 if (fld != field)
8256 cnt--;
8257 field = fld;
8258 val = build_constructor (TREE_TYPE (fld), NULL);
8259 to_free = val;
8263 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8264 && TYPE_DOMAIN (TREE_TYPE (field))
8265 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8266 return 0;
8267 if (DECL_SIZE_UNIT (field) == NULL_TREE
8268 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8269 return 0;
8270 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8271 if (fieldsize == 0)
8272 continue;
8274 if (DECL_BIT_FIELD (field))
8276 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8277 return 0;
8278 fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8279 bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8280 if (bpos % BITS_PER_UNIT)
8281 bpos %= BITS_PER_UNIT;
8282 else
8283 bpos = 0;
8284 fieldsize += bpos;
8285 epos = fieldsize % BITS_PER_UNIT;
8286 fieldsize += BITS_PER_UNIT - 1;
8287 fieldsize /= BITS_PER_UNIT;
8290 if (off != -1 && pos + fieldsize <= off)
8291 continue;
8293 if (val == NULL_TREE)
8294 continue;
8296 if (DECL_BIT_FIELD (field))
8298 /* FIXME: Handle PDP endian. */
8299 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8300 return 0;
8302 if (TREE_CODE (val) != INTEGER_CST)
8303 return 0;
8305 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8306 tree repr_type = NULL_TREE;
8307 HOST_WIDE_INT rpos = 0;
8308 if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8310 rpos = int_byte_position (repr);
8311 repr_type = TREE_TYPE (repr);
8313 else
8315 repr_type = find_bitfield_repr_type (fieldsize, len);
8316 if (repr_type == NULL_TREE)
8317 return 0;
8318 HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8319 gcc_assert (repr_size > 0 && repr_size <= len);
8320 if (pos + repr_size <= len)
8321 rpos = pos;
8322 else
8324 rpos = len - repr_size;
8325 gcc_assert (rpos <= pos);
8329 if (rpos > pos)
8330 return 0;
8331 wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8332 int diff = (TYPE_PRECISION (repr_type)
8333 - TYPE_PRECISION (TREE_TYPE (field)));
8334 HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8335 if (!BYTES_BIG_ENDIAN)
8336 w = wi::lshift (w, bitoff);
8337 else
8338 w = wi::lshift (w, diff - bitoff);
8339 val = wide_int_to_tree (repr_type, w);
8341 unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8342 / BITS_PER_UNIT + 1];
8343 int l = native_encode_int (val, buf, sizeof buf, 0);
8344 if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8345 return 0;
8347 if (ptr == NULL)
8348 continue;
8350 /* If the bitfield does not start at byte boundary, handle
8351 the partial byte at the start. */
8352 if (bpos
8353 && (off == -1 || (pos >= off && len >= 1)))
8355 if (!BYTES_BIG_ENDIAN)
8357 int msk = (1 << bpos) - 1;
8358 buf[pos - rpos] &= ~msk;
8359 buf[pos - rpos] |= ptr[pos - o] & msk;
8360 if (mask)
8362 if (fieldsize > 1 || epos == 0)
8363 mask[pos] &= msk;
8364 else
8365 mask[pos] &= (msk | ~((1 << epos) - 1));
8368 else
8370 int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8371 buf[pos - rpos] &= msk;
8372 buf[pos - rpos] |= ptr[pos - o] & ~msk;
8373 if (mask)
8375 if (fieldsize > 1 || epos == 0)
8376 mask[pos] &= ~msk;
8377 else
8378 mask[pos] &= (~msk
8379 | ((1 << (BITS_PER_UNIT - epos))
8380 - 1));
8384 /* If the bitfield does not end at byte boundary, handle
8385 the partial byte at the end. */
8386 if (epos
8387 && (off == -1
8388 || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8390 if (!BYTES_BIG_ENDIAN)
8392 int msk = (1 << epos) - 1;
8393 buf[pos - rpos + fieldsize - 1] &= msk;
8394 buf[pos - rpos + fieldsize - 1]
8395 |= ptr[pos + fieldsize - 1 - o] & ~msk;
8396 if (mask && (fieldsize > 1 || bpos == 0))
8397 mask[pos + fieldsize - 1] &= ~msk;
8399 else
8401 int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8402 buf[pos - rpos + fieldsize - 1] &= ~msk;
8403 buf[pos - rpos + fieldsize - 1]
8404 |= ptr[pos + fieldsize - 1 - o] & msk;
8405 if (mask && (fieldsize > 1 || bpos == 0))
8406 mask[pos + fieldsize - 1] &= msk;
8409 if (off == -1
8410 || (pos >= off
8411 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8413 memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8414 if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8415 memset (mask + pos + (bpos != 0), 0,
8416 fieldsize - (bpos != 0) - (epos != 0));
8418 else
8420 /* Partial overlap. */
8421 HOST_WIDE_INT fsz = fieldsize;
8422 gcc_assert (mask == NULL);
8423 if (pos < off)
8425 fsz -= (off - pos);
8426 pos = off;
8428 if (pos + fsz > (HOST_WIDE_INT) off + len)
8429 fsz = (HOST_WIDE_INT) off + len - pos;
8430 memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8432 continue;
8435 if (off == -1
8436 || (pos >= off
8437 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8439 if (!native_encode_initializer (val, ptr ? ptr + pos - o
8440 : NULL,
8441 fieldsize,
8442 off == -1 ? -1 : 0,
8443 mask ? mask + pos : NULL))
8444 return 0;
8446 else
8448 /* Partial overlap. */
8449 unsigned char *p = NULL;
8450 int no = 0;
8451 int l;
8452 gcc_assert (mask == NULL);
8453 if (pos >= off)
8455 if (ptr)
8456 p = ptr + pos - off;
8457 l = MIN ((HOST_WIDE_INT) off + len - pos,
8458 fieldsize);
8460 else
8462 p = ptr;
8463 no = off - pos;
8464 l = len;
8466 if (!native_encode_initializer (val, p, l, no, NULL))
8467 return 0;
8470 return MIN (total_bytes - off, len);
8472 return 0;
8477 /* Subroutine of native_interpret_expr. Interpret the contents of
8478 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8479 If the buffer cannot be interpreted, return NULL_TREE. */
8481 static tree
8482 native_interpret_int (tree type, const unsigned char *ptr, int len)
8484 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8486 if (total_bytes > len
8487 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8488 return NULL_TREE;
8490 wide_int result = wi::from_buffer (ptr, total_bytes);
8492 return wide_int_to_tree (type, result);
8496 /* Subroutine of native_interpret_expr. Interpret the contents of
8497 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8498 If the buffer cannot be interpreted, return NULL_TREE. */
8500 static tree
8501 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8503 scalar_mode mode = SCALAR_TYPE_MODE (type);
8504 int total_bytes = GET_MODE_SIZE (mode);
8505 double_int result;
8506 FIXED_VALUE_TYPE fixed_value;
8508 if (total_bytes > len
8509 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8510 return NULL_TREE;
8512 result = double_int::from_buffer (ptr, total_bytes);
8513 fixed_value = fixed_from_double_int (result, mode);
8515 return build_fixed (type, fixed_value);
8519 /* Subroutine of native_interpret_expr. Interpret the contents of
8520 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8521 If the buffer cannot be interpreted, return NULL_TREE. */
8523 static tree
8524 native_interpret_real (tree type, const unsigned char *ptr, int len)
8526 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8527 int total_bytes = GET_MODE_SIZE (mode);
8528 unsigned char value;
8529 /* There are always 32 bits in each long, no matter the size of
8530 the hosts long. We handle floating point representations with
8531 up to 192 bits. */
8532 REAL_VALUE_TYPE r;
8533 long tmp[6];
8535 if (total_bytes > len || total_bytes > 24)
8536 return NULL_TREE;
8537 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8539 memset (tmp, 0, sizeof (tmp));
8540 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8541 bitpos += BITS_PER_UNIT)
8543 /* Both OFFSET and BYTE index within a long;
8544 bitpos indexes the whole float. */
8545 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8546 if (UNITS_PER_WORD < 4)
8548 int word = byte / UNITS_PER_WORD;
8549 if (WORDS_BIG_ENDIAN)
8550 word = (words - 1) - word;
8551 offset = word * UNITS_PER_WORD;
8552 if (BYTES_BIG_ENDIAN)
8553 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8554 else
8555 offset += byte % UNITS_PER_WORD;
8557 else
8559 offset = byte;
8560 if (BYTES_BIG_ENDIAN)
8562 /* Reverse bytes within each long, or within the entire float
8563 if it's smaller than a long (for HFmode). */
8564 offset = MIN (3, total_bytes - 1) - offset;
8565 gcc_assert (offset >= 0);
8568 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8570 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8573 real_from_target (&r, tmp, mode);
8574 tree ret = build_real (type, r);
8575 if (MODE_COMPOSITE_P (mode))
8577 /* For floating point values in composite modes, punt if this folding
8578 doesn't preserve bit representation. As the mode doesn't have fixed
8579 precision while GCC pretends it does, there could be valid values that
8580 GCC can't really represent accurately. See PR95450. */
8581 unsigned char buf[24];
8582 if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8583 || memcmp (ptr, buf, total_bytes) != 0)
8584 ret = NULL_TREE;
8586 return ret;
8590 /* Subroutine of native_interpret_expr. Interpret the contents of
8591 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8592 If the buffer cannot be interpreted, return NULL_TREE. */
8594 static tree
8595 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8597 tree etype, rpart, ipart;
8598 int size;
8600 etype = TREE_TYPE (type);
8601 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8602 if (size * 2 > len)
8603 return NULL_TREE;
8604 rpart = native_interpret_expr (etype, ptr, size);
8605 if (!rpart)
8606 return NULL_TREE;
8607 ipart = native_interpret_expr (etype, ptr+size, size);
8608 if (!ipart)
8609 return NULL_TREE;
8610 return build_complex (type, rpart, ipart);
8613 /* Read a vector of type TYPE from the target memory image given by BYTES,
8614 which contains LEN bytes. The vector is known to be encodable using
8615 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8617 Return the vector on success, otherwise return null. */
8619 static tree
8620 native_interpret_vector_part (tree type, const unsigned char *bytes,
8621 unsigned int len, unsigned int npatterns,
8622 unsigned int nelts_per_pattern)
8624 tree elt_type = TREE_TYPE (type);
8625 if (VECTOR_BOOLEAN_TYPE_P (type)
8626 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8628 /* This is the only case in which elements can be smaller than a byte.
8629 Element 0 is always in the lsb of the containing byte. */
8630 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8631 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8632 return NULL_TREE;
8634 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8635 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8637 unsigned int bit_index = i * elt_bits;
8638 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8639 unsigned int lsb = bit_index % BITS_PER_UNIT;
8640 builder.quick_push (bytes[byte_index] & (1 << lsb)
8641 ? build_all_ones_cst (elt_type)
8642 : build_zero_cst (elt_type));
8644 return builder.build ();
8647 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8648 if (elt_bytes * npatterns * nelts_per_pattern > len)
8649 return NULL_TREE;
8651 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8652 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8654 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8655 if (!elt)
8656 return NULL_TREE;
8657 builder.quick_push (elt);
8658 bytes += elt_bytes;
8660 return builder.build ();
8663 /* Subroutine of native_interpret_expr. Interpret the contents of
8664 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8665 If the buffer cannot be interpreted, return NULL_TREE. */
8667 static tree
8668 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8670 tree etype;
8671 unsigned int size;
8672 unsigned HOST_WIDE_INT count;
8674 etype = TREE_TYPE (type);
8675 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8676 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8677 || size * count > len)
8678 return NULL_TREE;
8680 return native_interpret_vector_part (type, ptr, len, count, 1);
8684 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8685 the buffer PTR of length LEN as a constant of type TYPE. For
8686 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8687 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8688 return NULL_TREE. */
8690 tree
8691 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8693 switch (TREE_CODE (type))
8695 case INTEGER_TYPE:
8696 case ENUMERAL_TYPE:
8697 case BOOLEAN_TYPE:
8698 case POINTER_TYPE:
8699 case REFERENCE_TYPE:
8700 return native_interpret_int (type, ptr, len);
8702 case REAL_TYPE:
8703 return native_interpret_real (type, ptr, len);
8705 case FIXED_POINT_TYPE:
8706 return native_interpret_fixed (type, ptr, len);
8708 case COMPLEX_TYPE:
8709 return native_interpret_complex (type, ptr, len);
8711 case VECTOR_TYPE:
8712 return native_interpret_vector (type, ptr, len);
8714 default:
8715 return NULL_TREE;
8719 /* Returns true if we can interpret the contents of a native encoding
8720 as TYPE. */
8722 bool
8723 can_native_interpret_type_p (tree type)
8725 switch (TREE_CODE (type))
8727 case INTEGER_TYPE:
8728 case ENUMERAL_TYPE:
8729 case BOOLEAN_TYPE:
8730 case POINTER_TYPE:
8731 case REFERENCE_TYPE:
8732 case FIXED_POINT_TYPE:
8733 case REAL_TYPE:
8734 case COMPLEX_TYPE:
8735 case VECTOR_TYPE:
8736 return true;
8737 default:
8738 return false;
8742 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8743 byte order at PTR + OFF with LEN bytes. Does not handle unions. */
8745 tree
8746 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8747 int len)
8749 vec<constructor_elt, va_gc> *elts = NULL;
8750 if (TREE_CODE (type) == ARRAY_TYPE)
8752 HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8753 if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8754 return NULL_TREE;
8756 HOST_WIDE_INT cnt = 0;
8757 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8759 if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8760 return NULL_TREE;
8761 cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8763 if (eltsz == 0)
8764 cnt = 0;
8765 HOST_WIDE_INT pos = 0;
8766 for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8768 tree v = NULL_TREE;
8769 if (pos >= len || pos + eltsz > len)
8770 return NULL_TREE;
8771 if (can_native_interpret_type_p (TREE_TYPE (type)))
8773 v = native_interpret_expr (TREE_TYPE (type),
8774 ptr + off + pos, eltsz);
8775 if (v == NULL_TREE)
8776 return NULL_TREE;
8778 else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8779 || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8780 v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8781 eltsz);
8782 if (v == NULL_TREE)
8783 return NULL_TREE;
8784 CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8786 return build_constructor (type, elts);
8788 if (TREE_CODE (type) != RECORD_TYPE)
8789 return NULL_TREE;
8790 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8792 if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8793 continue;
8794 tree fld = field;
8795 HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8796 int diff = 0;
8797 tree v = NULL_TREE;
8798 if (DECL_BIT_FIELD (field))
8800 fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8801 if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8803 poly_int64 bitoffset;
8804 poly_uint64 field_offset, fld_offset;
8805 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8806 && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8807 bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8808 else
8809 bitoffset = 0;
8810 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8811 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8812 diff = (TYPE_PRECISION (TREE_TYPE (fld))
8813 - TYPE_PRECISION (TREE_TYPE (field)));
8814 if (!bitoffset.is_constant (&bitoff)
8815 || bitoff < 0
8816 || bitoff > diff)
8817 return NULL_TREE;
8819 else
8821 if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8822 return NULL_TREE;
8823 int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8824 int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8825 bpos %= BITS_PER_UNIT;
8826 fieldsize += bpos;
8827 fieldsize += BITS_PER_UNIT - 1;
8828 fieldsize /= BITS_PER_UNIT;
8829 tree repr_type = find_bitfield_repr_type (fieldsize, len);
8830 if (repr_type == NULL_TREE)
8831 return NULL_TREE;
8832 sz = int_size_in_bytes (repr_type);
8833 if (sz < 0 || sz > len)
8834 return NULL_TREE;
8835 pos = int_byte_position (field);
8836 if (pos < 0 || pos > len || pos + fieldsize > len)
8837 return NULL_TREE;
8838 HOST_WIDE_INT rpos;
8839 if (pos + sz <= len)
8840 rpos = pos;
8841 else
8843 rpos = len - sz;
8844 gcc_assert (rpos <= pos);
8846 bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8847 pos = rpos;
8848 diff = (TYPE_PRECISION (repr_type)
8849 - TYPE_PRECISION (TREE_TYPE (field)));
8850 v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8851 if (v == NULL_TREE)
8852 return NULL_TREE;
8853 fld = NULL_TREE;
8857 if (fld)
8859 sz = int_size_in_bytes (TREE_TYPE (fld));
8860 if (sz < 0 || sz > len)
8861 return NULL_TREE;
8862 tree byte_pos = byte_position (fld);
8863 if (!tree_fits_shwi_p (byte_pos))
8864 return NULL_TREE;
8865 pos = tree_to_shwi (byte_pos);
8866 if (pos < 0 || pos > len || pos + sz > len)
8867 return NULL_TREE;
8869 if (fld == NULL_TREE)
8870 /* Already handled above. */;
8871 else if (can_native_interpret_type_p (TREE_TYPE (fld)))
8873 v = native_interpret_expr (TREE_TYPE (fld),
8874 ptr + off + pos, sz);
8875 if (v == NULL_TREE)
8876 return NULL_TREE;
8878 else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
8879 || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
8880 v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
8881 if (v == NULL_TREE)
8882 return NULL_TREE;
8883 if (fld != field)
8885 if (TREE_CODE (v) != INTEGER_CST)
8886 return NULL_TREE;
8888 /* FIXME: Figure out how to handle PDP endian bitfields. */
8889 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8890 return NULL_TREE;
8891 if (!BYTES_BIG_ENDIAN)
8892 v = wide_int_to_tree (TREE_TYPE (field),
8893 wi::lrshift (wi::to_wide (v), bitoff));
8894 else
8895 v = wide_int_to_tree (TREE_TYPE (field),
8896 wi::lrshift (wi::to_wide (v),
8897 diff - bitoff));
8899 CONSTRUCTOR_APPEND_ELT (elts, field, v);
8901 return build_constructor (type, elts);
8904 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8905 or extracted constant positions and/or sizes aren't byte aligned. */
8907 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8908 bits between adjacent elements. AMNT should be within
8909 [0, BITS_PER_UNIT).
8910 Example, AMNT = 2:
8911 00011111|11100000 << 2 = 01111111|10000000
8912 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8914 void
8915 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8916 unsigned int amnt)
8918 if (amnt == 0)
8919 return;
8921 unsigned char carry_over = 0U;
8922 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8923 unsigned char clear_mask = (~0U) << amnt;
8925 for (unsigned int i = 0; i < sz; i++)
8927 unsigned prev_carry_over = carry_over;
8928 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8930 ptr[i] <<= amnt;
8931 if (i != 0)
8933 ptr[i] &= clear_mask;
8934 ptr[i] |= prev_carry_over;
8939 /* Like shift_bytes_in_array_left but for big-endian.
8940 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8941 bits between adjacent elements. AMNT should be within
8942 [0, BITS_PER_UNIT).
8943 Example, AMNT = 2:
8944 00011111|11100000 >> 2 = 00000111|11111000
8945 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8947 void
8948 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8949 unsigned int amnt)
8951 if (amnt == 0)
8952 return;
8954 unsigned char carry_over = 0U;
8955 unsigned char carry_mask = ~(~0U << amnt);
8957 for (unsigned int i = 0; i < sz; i++)
8959 unsigned prev_carry_over = carry_over;
8960 carry_over = ptr[i] & carry_mask;
8962 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8963 ptr[i] >>= amnt;
8964 ptr[i] |= prev_carry_over;
8968 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8969 directly on the VECTOR_CST encoding, in a way that works for variable-
8970 length vectors. Return the resulting VECTOR_CST on success or null
8971 on failure. */
8973 static tree
8974 fold_view_convert_vector_encoding (tree type, tree expr)
8976 tree expr_type = TREE_TYPE (expr);
8977 poly_uint64 type_bits, expr_bits;
8978 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8979 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8980 return NULL_TREE;
8982 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8983 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8984 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8985 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8987 /* We can only preserve the semantics of a stepped pattern if the new
8988 vector element is an integer of the same size. */
8989 if (VECTOR_CST_STEPPED_P (expr)
8990 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8991 return NULL_TREE;
8993 /* The number of bits needed to encode one element from every pattern
8994 of the original vector. */
8995 unsigned int expr_sequence_bits
8996 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8998 /* The number of bits needed to encode one element from every pattern
8999 of the result. */
9000 unsigned int type_sequence_bits
9001 = least_common_multiple (expr_sequence_bits, type_elt_bits);
9003 /* Don't try to read more bytes than are available, which can happen
9004 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9005 The general VIEW_CONVERT handling can cope with that case, so there's
9006 no point complicating things here. */
9007 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9008 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9009 BITS_PER_UNIT);
9010 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9011 if (known_gt (buffer_bits, expr_bits))
9012 return NULL_TREE;
9014 /* Get enough bytes of EXPR to form the new encoding. */
9015 auto_vec<unsigned char, 128> buffer (buffer_bytes);
9016 buffer.quick_grow (buffer_bytes);
9017 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9018 buffer_bits / expr_elt_bits)
9019 != (int) buffer_bytes)
9020 return NULL_TREE;
9022 /* Reencode the bytes as TYPE. */
9023 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9024 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9025 type_npatterns, nelts_per_pattern);
9028 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9029 TYPE at compile-time. If we're unable to perform the conversion
9030 return NULL_TREE. */
9032 static tree
9033 fold_view_convert_expr (tree type, tree expr)
9035 /* We support up to 512-bit values (for V8DFmode). */
9036 unsigned char buffer[64];
9037 int len;
9039 /* Check that the host and target are sane. */
9040 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9041 return NULL_TREE;
9043 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9044 if (tree res = fold_view_convert_vector_encoding (type, expr))
9045 return res;
9047 len = native_encode_expr (expr, buffer, sizeof (buffer));
9048 if (len == 0)
9049 return NULL_TREE;
9051 return native_interpret_expr (type, buffer, len);
9054 /* Build an expression for the address of T. Folds away INDIRECT_REF
9055 to avoid confusing the gimplify process. */
9057 tree
9058 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9060 /* The size of the object is not relevant when talking about its address. */
9061 if (TREE_CODE (t) == WITH_SIZE_EXPR)
9062 t = TREE_OPERAND (t, 0);
9064 if (TREE_CODE (t) == INDIRECT_REF)
9066 t = TREE_OPERAND (t, 0);
9068 if (TREE_TYPE (t) != ptrtype)
9069 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9071 else if (TREE_CODE (t) == MEM_REF
9072 && integer_zerop (TREE_OPERAND (t, 1)))
9074 t = TREE_OPERAND (t, 0);
9076 if (TREE_TYPE (t) != ptrtype)
9077 t = fold_convert_loc (loc, ptrtype, t);
9079 else if (TREE_CODE (t) == MEM_REF
9080 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9081 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9082 TREE_OPERAND (t, 0),
9083 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9084 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9086 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9088 if (TREE_TYPE (t) != ptrtype)
9089 t = fold_convert_loc (loc, ptrtype, t);
9091 else
9092 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9094 return t;
9097 /* Build an expression for the address of T. */
9099 tree
9100 build_fold_addr_expr_loc (location_t loc, tree t)
9102 tree ptrtype = build_pointer_type (TREE_TYPE (t));
9104 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9107 /* Fold a unary expression of code CODE and type TYPE with operand
9108 OP0. Return the folded expression if folding is successful.
9109 Otherwise, return NULL_TREE. */
9111 tree
9112 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9114 tree tem;
9115 tree arg0;
9116 enum tree_code_class kind = TREE_CODE_CLASS (code);
9118 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9119 && TREE_CODE_LENGTH (code) == 1);
9121 arg0 = op0;
9122 if (arg0)
9124 if (CONVERT_EXPR_CODE_P (code)
9125 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9127 /* Don't use STRIP_NOPS, because signedness of argument type
9128 matters. */
9129 STRIP_SIGN_NOPS (arg0);
9131 else
9133 /* Strip any conversions that don't change the mode. This
9134 is safe for every expression, except for a comparison
9135 expression because its signedness is derived from its
9136 operands.
9138 Note that this is done as an internal manipulation within
9139 the constant folder, in order to find the simplest
9140 representation of the arguments so that their form can be
9141 studied. In any cases, the appropriate type conversions
9142 should be put back in the tree that will get out of the
9143 constant folder. */
9144 STRIP_NOPS (arg0);
9147 if (CONSTANT_CLASS_P (arg0))
9149 tree tem = const_unop (code, type, arg0);
9150 if (tem)
9152 if (TREE_TYPE (tem) != type)
9153 tem = fold_convert_loc (loc, type, tem);
9154 return tem;
9159 tem = generic_simplify (loc, code, type, op0);
9160 if (tem)
9161 return tem;
9163 if (TREE_CODE_CLASS (code) == tcc_unary)
9165 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9166 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9167 fold_build1_loc (loc, code, type,
9168 fold_convert_loc (loc, TREE_TYPE (op0),
9169 TREE_OPERAND (arg0, 1))));
9170 else if (TREE_CODE (arg0) == COND_EXPR)
9172 tree arg01 = TREE_OPERAND (arg0, 1);
9173 tree arg02 = TREE_OPERAND (arg0, 2);
9174 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9175 arg01 = fold_build1_loc (loc, code, type,
9176 fold_convert_loc (loc,
9177 TREE_TYPE (op0), arg01));
9178 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9179 arg02 = fold_build1_loc (loc, code, type,
9180 fold_convert_loc (loc,
9181 TREE_TYPE (op0), arg02));
9182 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9183 arg01, arg02);
9185 /* If this was a conversion, and all we did was to move into
9186 inside the COND_EXPR, bring it back out. But leave it if
9187 it is a conversion from integer to integer and the
9188 result precision is no wider than a word since such a
9189 conversion is cheap and may be optimized away by combine,
9190 while it couldn't if it were outside the COND_EXPR. Then return
9191 so we don't get into an infinite recursion loop taking the
9192 conversion out and then back in. */
9194 if ((CONVERT_EXPR_CODE_P (code)
9195 || code == NON_LVALUE_EXPR)
9196 && TREE_CODE (tem) == COND_EXPR
9197 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9198 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9199 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9200 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9201 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9202 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9203 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9204 && (INTEGRAL_TYPE_P
9205 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9206 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9207 || flag_syntax_only))
9208 tem = build1_loc (loc, code, type,
9209 build3 (COND_EXPR,
9210 TREE_TYPE (TREE_OPERAND
9211 (TREE_OPERAND (tem, 1), 0)),
9212 TREE_OPERAND (tem, 0),
9213 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9214 TREE_OPERAND (TREE_OPERAND (tem, 2),
9215 0)));
9216 return tem;
9220 switch (code)
9222 case NON_LVALUE_EXPR:
9223 if (!maybe_lvalue_p (op0))
9224 return fold_convert_loc (loc, type, op0);
9225 return NULL_TREE;
9227 CASE_CONVERT:
9228 case FLOAT_EXPR:
9229 case FIX_TRUNC_EXPR:
9230 if (COMPARISON_CLASS_P (op0))
9232 /* If we have (type) (a CMP b) and type is an integral type, return
9233 new expression involving the new type. Canonicalize
9234 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9235 non-integral type.
9236 Do not fold the result as that would not simplify further, also
9237 folding again results in recursions. */
9238 if (TREE_CODE (type) == BOOLEAN_TYPE)
9239 return build2_loc (loc, TREE_CODE (op0), type,
9240 TREE_OPERAND (op0, 0),
9241 TREE_OPERAND (op0, 1));
9242 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9243 && TREE_CODE (type) != VECTOR_TYPE)
9244 return build3_loc (loc, COND_EXPR, type, op0,
9245 constant_boolean_node (true, type),
9246 constant_boolean_node (false, type));
9249 /* Handle (T *)&A.B.C for A being of type T and B and C
9250 living at offset zero. This occurs frequently in
9251 C++ upcasting and then accessing the base. */
9252 if (TREE_CODE (op0) == ADDR_EXPR
9253 && POINTER_TYPE_P (type)
9254 && handled_component_p (TREE_OPERAND (op0, 0)))
9256 poly_int64 bitsize, bitpos;
9257 tree offset;
9258 machine_mode mode;
9259 int unsignedp, reversep, volatilep;
9260 tree base
9261 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9262 &offset, &mode, &unsignedp, &reversep,
9263 &volatilep);
9264 /* If the reference was to a (constant) zero offset, we can use
9265 the address of the base if it has the same base type
9266 as the result type and the pointer type is unqualified. */
9267 if (!offset
9268 && known_eq (bitpos, 0)
9269 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9270 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9271 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9272 return fold_convert_loc (loc, type,
9273 build_fold_addr_expr_loc (loc, base));
9276 if (TREE_CODE (op0) == MODIFY_EXPR
9277 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9278 /* Detect assigning a bitfield. */
9279 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9280 && DECL_BIT_FIELD
9281 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9283 /* Don't leave an assignment inside a conversion
9284 unless assigning a bitfield. */
9285 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9286 /* First do the assignment, then return converted constant. */
9287 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9288 TREE_NO_WARNING (tem) = 1;
9289 TREE_USED (tem) = 1;
9290 return tem;
9293 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9294 constants (if x has signed type, the sign bit cannot be set
9295 in c). This folds extension into the BIT_AND_EXPR.
9296 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9297 very likely don't have maximal range for their precision and this
9298 transformation effectively doesn't preserve non-maximal ranges. */
9299 if (TREE_CODE (type) == INTEGER_TYPE
9300 && TREE_CODE (op0) == BIT_AND_EXPR
9301 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9303 tree and_expr = op0;
9304 tree and0 = TREE_OPERAND (and_expr, 0);
9305 tree and1 = TREE_OPERAND (and_expr, 1);
9306 int change = 0;
9308 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9309 || (TYPE_PRECISION (type)
9310 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9311 change = 1;
9312 else if (TYPE_PRECISION (TREE_TYPE (and1))
9313 <= HOST_BITS_PER_WIDE_INT
9314 && tree_fits_uhwi_p (and1))
9316 unsigned HOST_WIDE_INT cst;
9318 cst = tree_to_uhwi (and1);
9319 cst &= HOST_WIDE_INT_M1U
9320 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9321 change = (cst == 0);
9322 if (change
9323 && !flag_syntax_only
9324 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9325 == ZERO_EXTEND))
9327 tree uns = unsigned_type_for (TREE_TYPE (and0));
9328 and0 = fold_convert_loc (loc, uns, and0);
9329 and1 = fold_convert_loc (loc, uns, and1);
9332 if (change)
9334 tem = force_fit_type (type, wi::to_widest (and1), 0,
9335 TREE_OVERFLOW (and1));
9336 return fold_build2_loc (loc, BIT_AND_EXPR, type,
9337 fold_convert_loc (loc, type, and0), tem);
9341 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9342 cast (T1)X will fold away. We assume that this happens when X itself
9343 is a cast. */
9344 if (POINTER_TYPE_P (type)
9345 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9346 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9348 tree arg00 = TREE_OPERAND (arg0, 0);
9349 tree arg01 = TREE_OPERAND (arg0, 1);
9351 return fold_build_pointer_plus_loc
9352 (loc, fold_convert_loc (loc, type, arg00), arg01);
9355 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9356 of the same precision, and X is an integer type not narrower than
9357 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
9358 if (INTEGRAL_TYPE_P (type)
9359 && TREE_CODE (op0) == BIT_NOT_EXPR
9360 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9361 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9362 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9364 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9365 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9366 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9367 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9368 fold_convert_loc (loc, type, tem));
9371 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9372 type of X and Y (integer types only). */
9373 if (INTEGRAL_TYPE_P (type)
9374 && TREE_CODE (op0) == MULT_EXPR
9375 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9376 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9378 /* Be careful not to introduce new overflows. */
9379 tree mult_type;
9380 if (TYPE_OVERFLOW_WRAPS (type))
9381 mult_type = type;
9382 else
9383 mult_type = unsigned_type_for (type);
9385 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9387 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9388 fold_convert_loc (loc, mult_type,
9389 TREE_OPERAND (op0, 0)),
9390 fold_convert_loc (loc, mult_type,
9391 TREE_OPERAND (op0, 1)));
9392 return fold_convert_loc (loc, type, tem);
9396 return NULL_TREE;
9398 case VIEW_CONVERT_EXPR:
9399 if (TREE_CODE (op0) == MEM_REF)
9401 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9402 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9403 tem = fold_build2_loc (loc, MEM_REF, type,
9404 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9405 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9406 return tem;
9409 return NULL_TREE;
9411 case NEGATE_EXPR:
9412 tem = fold_negate_expr (loc, arg0);
9413 if (tem)
9414 return fold_convert_loc (loc, type, tem);
9415 return NULL_TREE;
9417 case ABS_EXPR:
9418 /* Convert fabs((double)float) into (double)fabsf(float). */
9419 if (TREE_CODE (arg0) == NOP_EXPR
9420 && TREE_CODE (type) == REAL_TYPE)
9422 tree targ0 = strip_float_extensions (arg0);
9423 if (targ0 != arg0)
9424 return fold_convert_loc (loc, type,
9425 fold_build1_loc (loc, ABS_EXPR,
9426 TREE_TYPE (targ0),
9427 targ0));
9429 return NULL_TREE;
9431 case BIT_NOT_EXPR:
9432 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
9433 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9434 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9435 fold_convert_loc (loc, type,
9436 TREE_OPERAND (arg0, 0)))))
9437 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9438 fold_convert_loc (loc, type,
9439 TREE_OPERAND (arg0, 1)));
9440 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9441 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9442 fold_convert_loc (loc, type,
9443 TREE_OPERAND (arg0, 1)))))
9444 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9445 fold_convert_loc (loc, type,
9446 TREE_OPERAND (arg0, 0)), tem);
9448 return NULL_TREE;
9450 case TRUTH_NOT_EXPR:
9451 /* Note that the operand of this must be an int
9452 and its values must be 0 or 1.
9453 ("true" is a fixed value perhaps depending on the language,
9454 but we don't handle values other than 1 correctly yet.) */
9455 tem = fold_truth_not_expr (loc, arg0);
9456 if (!tem)
9457 return NULL_TREE;
9458 return fold_convert_loc (loc, type, tem);
9460 case INDIRECT_REF:
9461 /* Fold *&X to X if X is an lvalue. */
9462 if (TREE_CODE (op0) == ADDR_EXPR)
9464 tree op00 = TREE_OPERAND (op0, 0);
9465 if ((VAR_P (op00)
9466 || TREE_CODE (op00) == PARM_DECL
9467 || TREE_CODE (op00) == RESULT_DECL)
9468 && !TREE_READONLY (op00))
9469 return op00;
9471 return NULL_TREE;
9473 default:
9474 return NULL_TREE;
9475 } /* switch (code) */
9479 /* If the operation was a conversion do _not_ mark a resulting constant
9480 with TREE_OVERFLOW if the original constant was not. These conversions
9481 have implementation defined behavior and retaining the TREE_OVERFLOW
9482 flag here would confuse later passes such as VRP. */
9483 tree
9484 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9485 tree type, tree op0)
9487 tree res = fold_unary_loc (loc, code, type, op0);
9488 if (res
9489 && TREE_CODE (res) == INTEGER_CST
9490 && TREE_CODE (op0) == INTEGER_CST
9491 && CONVERT_EXPR_CODE_P (code))
9492 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9494 return res;
9497 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9498 operands OP0 and OP1. LOC is the location of the resulting expression.
9499 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9500 Return the folded expression if folding is successful. Otherwise,
9501 return NULL_TREE. */
9502 static tree
9503 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9504 tree arg0, tree arg1, tree op0, tree op1)
9506 tree tem;
9508 /* We only do these simplifications if we are optimizing. */
9509 if (!optimize)
9510 return NULL_TREE;
9512 /* Check for things like (A || B) && (A || C). We can convert this
9513 to A || (B && C). Note that either operator can be any of the four
9514 truth and/or operations and the transformation will still be
9515 valid. Also note that we only care about order for the
9516 ANDIF and ORIF operators. If B contains side effects, this
9517 might change the truth-value of A. */
9518 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9519 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9520 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9521 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9522 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9523 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9525 tree a00 = TREE_OPERAND (arg0, 0);
9526 tree a01 = TREE_OPERAND (arg0, 1);
9527 tree a10 = TREE_OPERAND (arg1, 0);
9528 tree a11 = TREE_OPERAND (arg1, 1);
9529 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9530 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9531 && (code == TRUTH_AND_EXPR
9532 || code == TRUTH_OR_EXPR));
9534 if (operand_equal_p (a00, a10, 0))
9535 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9536 fold_build2_loc (loc, code, type, a01, a11));
9537 else if (commutative && operand_equal_p (a00, a11, 0))
9538 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9539 fold_build2_loc (loc, code, type, a01, a10));
9540 else if (commutative && operand_equal_p (a01, a10, 0))
9541 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9542 fold_build2_loc (loc, code, type, a00, a11));
9544 /* This case if tricky because we must either have commutative
9545 operators or else A10 must not have side-effects. */
9547 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9548 && operand_equal_p (a01, a11, 0))
9549 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9550 fold_build2_loc (loc, code, type, a00, a10),
9551 a01);
9554 /* See if we can build a range comparison. */
9555 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9556 return tem;
9558 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9559 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9561 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9562 if (tem)
9563 return fold_build2_loc (loc, code, type, tem, arg1);
9566 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9567 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9569 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9570 if (tem)
9571 return fold_build2_loc (loc, code, type, arg0, tem);
9574 /* Check for the possibility of merging component references. If our
9575 lhs is another similar operation, try to merge its rhs with our
9576 rhs. Then try to merge our lhs and rhs. */
9577 if (TREE_CODE (arg0) == code
9578 && (tem = fold_truth_andor_1 (loc, code, type,
9579 TREE_OPERAND (arg0, 1), arg1)) != 0)
9580 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9582 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9583 return tem;
9585 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9586 if (param_logical_op_non_short_circuit != -1)
9587 logical_op_non_short_circuit
9588 = param_logical_op_non_short_circuit;
9589 if (logical_op_non_short_circuit
9590 && !flag_sanitize_coverage
9591 && (code == TRUTH_AND_EXPR
9592 || code == TRUTH_ANDIF_EXPR
9593 || code == TRUTH_OR_EXPR
9594 || code == TRUTH_ORIF_EXPR))
9596 enum tree_code ncode, icode;
9598 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9599 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9600 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9602 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9603 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9604 We don't want to pack more than two leafs to a non-IF AND/OR
9605 expression.
9606 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9607 equal to IF-CODE, then we don't want to add right-hand operand.
9608 If the inner right-hand side of left-hand operand has
9609 side-effects, or isn't simple, then we can't add to it,
9610 as otherwise we might destroy if-sequence. */
9611 if (TREE_CODE (arg0) == icode
9612 && simple_operand_p_2 (arg1)
9613 /* Needed for sequence points to handle trappings, and
9614 side-effects. */
9615 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9617 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9618 arg1);
9619 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9620 tem);
9622 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9623 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9624 else if (TREE_CODE (arg1) == icode
9625 && simple_operand_p_2 (arg0)
9626 /* Needed for sequence points to handle trappings, and
9627 side-effects. */
9628 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9630 tem = fold_build2_loc (loc, ncode, type,
9631 arg0, TREE_OPERAND (arg1, 0));
9632 return fold_build2_loc (loc, icode, type, tem,
9633 TREE_OPERAND (arg1, 1));
9635 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9636 into (A OR B).
9637 For sequence point consistancy, we need to check for trapping,
9638 and side-effects. */
9639 else if (code == icode && simple_operand_p_2 (arg0)
9640 && simple_operand_p_2 (arg1))
9641 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9644 return NULL_TREE;
9647 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9648 by changing CODE to reduce the magnitude of constants involved in
9649 ARG0 of the comparison.
9650 Returns a canonicalized comparison tree if a simplification was
9651 possible, otherwise returns NULL_TREE.
9652 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9653 valid if signed overflow is undefined. */
9655 static tree
9656 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9657 tree arg0, tree arg1,
9658 bool *strict_overflow_p)
9660 enum tree_code code0 = TREE_CODE (arg0);
9661 tree t, cst0 = NULL_TREE;
9662 int sgn0;
9664 /* Match A +- CST code arg1. We can change this only if overflow
9665 is undefined. */
9666 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9667 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9668 /* In principle pointers also have undefined overflow behavior,
9669 but that causes problems elsewhere. */
9670 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9671 && (code0 == MINUS_EXPR
9672 || code0 == PLUS_EXPR)
9673 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9674 return NULL_TREE;
9676 /* Identify the constant in arg0 and its sign. */
9677 cst0 = TREE_OPERAND (arg0, 1);
9678 sgn0 = tree_int_cst_sgn (cst0);
9680 /* Overflowed constants and zero will cause problems. */
9681 if (integer_zerop (cst0)
9682 || TREE_OVERFLOW (cst0))
9683 return NULL_TREE;
9685 /* See if we can reduce the magnitude of the constant in
9686 arg0 by changing the comparison code. */
9687 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9688 if (code == LT_EXPR
9689 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9690 code = LE_EXPR;
9691 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9692 else if (code == GT_EXPR
9693 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9694 code = GE_EXPR;
9695 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9696 else if (code == LE_EXPR
9697 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9698 code = LT_EXPR;
9699 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9700 else if (code == GE_EXPR
9701 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9702 code = GT_EXPR;
9703 else
9704 return NULL_TREE;
9705 *strict_overflow_p = true;
9707 /* Now build the constant reduced in magnitude. But not if that
9708 would produce one outside of its types range. */
9709 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9710 && ((sgn0 == 1
9711 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9712 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9713 || (sgn0 == -1
9714 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9715 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9716 return NULL_TREE;
9718 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9719 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9720 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9721 t = fold_convert (TREE_TYPE (arg1), t);
9723 return fold_build2_loc (loc, code, type, t, arg1);
9726 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9727 overflow further. Try to decrease the magnitude of constants involved
9728 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9729 and put sole constants at the second argument position.
9730 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9732 static tree
9733 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9734 tree arg0, tree arg1)
9736 tree t;
9737 bool strict_overflow_p;
9738 const char * const warnmsg = G_("assuming signed overflow does not occur "
9739 "when reducing constant in comparison");
9741 /* Try canonicalization by simplifying arg0. */
9742 strict_overflow_p = false;
9743 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9744 &strict_overflow_p);
9745 if (t)
9747 if (strict_overflow_p)
9748 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9749 return t;
9752 /* Try canonicalization by simplifying arg1 using the swapped
9753 comparison. */
9754 code = swap_tree_comparison (code);
9755 strict_overflow_p = false;
9756 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9757 &strict_overflow_p);
9758 if (t && strict_overflow_p)
9759 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9760 return t;
9763 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9764 space. This is used to avoid issuing overflow warnings for
9765 expressions like &p->x which cannot wrap. */
9767 static bool
9768 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9770 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9771 return true;
9773 if (maybe_lt (bitpos, 0))
9774 return true;
9776 poly_wide_int wi_offset;
9777 int precision = TYPE_PRECISION (TREE_TYPE (base));
9778 if (offset == NULL_TREE)
9779 wi_offset = wi::zero (precision);
9780 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9781 return true;
9782 else
9783 wi_offset = wi::to_poly_wide (offset);
9785 wi::overflow_type overflow;
9786 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9787 precision);
9788 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9789 if (overflow)
9790 return true;
9792 poly_uint64 total_hwi, size;
9793 if (!total.to_uhwi (&total_hwi)
9794 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9795 &size)
9796 || known_eq (size, 0U))
9797 return true;
9799 if (known_le (total_hwi, size))
9800 return false;
9802 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9803 array. */
9804 if (TREE_CODE (base) == ADDR_EXPR
9805 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9806 &size)
9807 && maybe_ne (size, 0U)
9808 && known_le (total_hwi, size))
9809 return false;
9811 return true;
9814 /* Return a positive integer when the symbol DECL is known to have
9815 a nonzero address, zero when it's known not to (e.g., it's a weak
9816 symbol), and a negative integer when the symbol is not yet in the
9817 symbol table and so whether or not its address is zero is unknown.
9818 For function local objects always return positive integer. */
9819 static int
9820 maybe_nonzero_address (tree decl)
9822 if (DECL_P (decl) && decl_in_symtab_p (decl))
9823 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9824 return symbol->nonzero_address ();
9826 /* Function local objects are never NULL. */
9827 if (DECL_P (decl)
9828 && (DECL_CONTEXT (decl)
9829 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9830 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9831 return 1;
9833 return -1;
9836 /* Subroutine of fold_binary. This routine performs all of the
9837 transformations that are common to the equality/inequality
9838 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9839 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9840 fold_binary should call fold_binary. Fold a comparison with
9841 tree code CODE and type TYPE with operands OP0 and OP1. Return
9842 the folded comparison or NULL_TREE. */
9844 static tree
9845 fold_comparison (location_t loc, enum tree_code code, tree type,
9846 tree op0, tree op1)
9848 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9849 tree arg0, arg1, tem;
9851 arg0 = op0;
9852 arg1 = op1;
9854 STRIP_SIGN_NOPS (arg0);
9855 STRIP_SIGN_NOPS (arg1);
9857 /* For comparisons of pointers we can decompose it to a compile time
9858 comparison of the base objects and the offsets into the object.
9859 This requires at least one operand being an ADDR_EXPR or a
9860 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9861 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9862 && (TREE_CODE (arg0) == ADDR_EXPR
9863 || TREE_CODE (arg1) == ADDR_EXPR
9864 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9865 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9867 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9868 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9869 machine_mode mode;
9870 int volatilep, reversep, unsignedp;
9871 bool indirect_base0 = false, indirect_base1 = false;
9873 /* Get base and offset for the access. Strip ADDR_EXPR for
9874 get_inner_reference, but put it back by stripping INDIRECT_REF
9875 off the base object if possible. indirect_baseN will be true
9876 if baseN is not an address but refers to the object itself. */
9877 base0 = arg0;
9878 if (TREE_CODE (arg0) == ADDR_EXPR)
9880 base0
9881 = get_inner_reference (TREE_OPERAND (arg0, 0),
9882 &bitsize, &bitpos0, &offset0, &mode,
9883 &unsignedp, &reversep, &volatilep);
9884 if (TREE_CODE (base0) == INDIRECT_REF)
9885 base0 = TREE_OPERAND (base0, 0);
9886 else
9887 indirect_base0 = true;
9889 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9891 base0 = TREE_OPERAND (arg0, 0);
9892 STRIP_SIGN_NOPS (base0);
9893 if (TREE_CODE (base0) == ADDR_EXPR)
9895 base0
9896 = get_inner_reference (TREE_OPERAND (base0, 0),
9897 &bitsize, &bitpos0, &offset0, &mode,
9898 &unsignedp, &reversep, &volatilep);
9899 if (TREE_CODE (base0) == INDIRECT_REF)
9900 base0 = TREE_OPERAND (base0, 0);
9901 else
9902 indirect_base0 = true;
9904 if (offset0 == NULL_TREE || integer_zerop (offset0))
9905 offset0 = TREE_OPERAND (arg0, 1);
9906 else
9907 offset0 = size_binop (PLUS_EXPR, offset0,
9908 TREE_OPERAND (arg0, 1));
9909 if (poly_int_tree_p (offset0))
9911 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9912 TYPE_PRECISION (sizetype));
9913 tem <<= LOG2_BITS_PER_UNIT;
9914 tem += bitpos0;
9915 if (tem.to_shwi (&bitpos0))
9916 offset0 = NULL_TREE;
9920 base1 = arg1;
9921 if (TREE_CODE (arg1) == ADDR_EXPR)
9923 base1
9924 = get_inner_reference (TREE_OPERAND (arg1, 0),
9925 &bitsize, &bitpos1, &offset1, &mode,
9926 &unsignedp, &reversep, &volatilep);
9927 if (TREE_CODE (base1) == INDIRECT_REF)
9928 base1 = TREE_OPERAND (base1, 0);
9929 else
9930 indirect_base1 = true;
9932 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9934 base1 = TREE_OPERAND (arg1, 0);
9935 STRIP_SIGN_NOPS (base1);
9936 if (TREE_CODE (base1) == ADDR_EXPR)
9938 base1
9939 = get_inner_reference (TREE_OPERAND (base1, 0),
9940 &bitsize, &bitpos1, &offset1, &mode,
9941 &unsignedp, &reversep, &volatilep);
9942 if (TREE_CODE (base1) == INDIRECT_REF)
9943 base1 = TREE_OPERAND (base1, 0);
9944 else
9945 indirect_base1 = true;
9947 if (offset1 == NULL_TREE || integer_zerop (offset1))
9948 offset1 = TREE_OPERAND (arg1, 1);
9949 else
9950 offset1 = size_binop (PLUS_EXPR, offset1,
9951 TREE_OPERAND (arg1, 1));
9952 if (poly_int_tree_p (offset1))
9954 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9955 TYPE_PRECISION (sizetype));
9956 tem <<= LOG2_BITS_PER_UNIT;
9957 tem += bitpos1;
9958 if (tem.to_shwi (&bitpos1))
9959 offset1 = NULL_TREE;
9963 /* If we have equivalent bases we might be able to simplify. */
9964 if (indirect_base0 == indirect_base1
9965 && operand_equal_p (base0, base1,
9966 indirect_base0 ? OEP_ADDRESS_OF : 0))
9968 /* We can fold this expression to a constant if the non-constant
9969 offset parts are equal. */
9970 if ((offset0 == offset1
9971 || (offset0 && offset1
9972 && operand_equal_p (offset0, offset1, 0)))
9973 && (equality_code
9974 || (indirect_base0
9975 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9976 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9978 if (!equality_code
9979 && maybe_ne (bitpos0, bitpos1)
9980 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9981 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9982 fold_overflow_warning (("assuming pointer wraparound does not "
9983 "occur when comparing P +- C1 with "
9984 "P +- C2"),
9985 WARN_STRICT_OVERFLOW_CONDITIONAL);
9987 switch (code)
9989 case EQ_EXPR:
9990 if (known_eq (bitpos0, bitpos1))
9991 return constant_boolean_node (true, type);
9992 if (known_ne (bitpos0, bitpos1))
9993 return constant_boolean_node (false, type);
9994 break;
9995 case NE_EXPR:
9996 if (known_ne (bitpos0, bitpos1))
9997 return constant_boolean_node (true, type);
9998 if (known_eq (bitpos0, bitpos1))
9999 return constant_boolean_node (false, type);
10000 break;
10001 case LT_EXPR:
10002 if (known_lt (bitpos0, bitpos1))
10003 return constant_boolean_node (true, type);
10004 if (known_ge (bitpos0, bitpos1))
10005 return constant_boolean_node (false, type);
10006 break;
10007 case LE_EXPR:
10008 if (known_le (bitpos0, bitpos1))
10009 return constant_boolean_node (true, type);
10010 if (known_gt (bitpos0, bitpos1))
10011 return constant_boolean_node (false, type);
10012 break;
10013 case GE_EXPR:
10014 if (known_ge (bitpos0, bitpos1))
10015 return constant_boolean_node (true, type);
10016 if (known_lt (bitpos0, bitpos1))
10017 return constant_boolean_node (false, type);
10018 break;
10019 case GT_EXPR:
10020 if (known_gt (bitpos0, bitpos1))
10021 return constant_boolean_node (true, type);
10022 if (known_le (bitpos0, bitpos1))
10023 return constant_boolean_node (false, type);
10024 break;
10025 default:;
10028 /* We can simplify the comparison to a comparison of the variable
10029 offset parts if the constant offset parts are equal.
10030 Be careful to use signed sizetype here because otherwise we
10031 mess with array offsets in the wrong way. This is possible
10032 because pointer arithmetic is restricted to retain within an
10033 object and overflow on pointer differences is undefined as of
10034 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
10035 else if (known_eq (bitpos0, bitpos1)
10036 && (equality_code
10037 || (indirect_base0
10038 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10039 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10041 /* By converting to signed sizetype we cover middle-end pointer
10042 arithmetic which operates on unsigned pointer types of size
10043 type size and ARRAY_REF offsets which are properly sign or
10044 zero extended from their type in case it is narrower than
10045 sizetype. */
10046 if (offset0 == NULL_TREE)
10047 offset0 = build_int_cst (ssizetype, 0);
10048 else
10049 offset0 = fold_convert_loc (loc, ssizetype, offset0);
10050 if (offset1 == NULL_TREE)
10051 offset1 = build_int_cst (ssizetype, 0);
10052 else
10053 offset1 = fold_convert_loc (loc, ssizetype, offset1);
10055 if (!equality_code
10056 && (pointer_may_wrap_p (base0, offset0, bitpos0)
10057 || pointer_may_wrap_p (base1, offset1, bitpos1)))
10058 fold_overflow_warning (("assuming pointer wraparound does not "
10059 "occur when comparing P +- C1 with "
10060 "P +- C2"),
10061 WARN_STRICT_OVERFLOW_COMPARISON);
10063 return fold_build2_loc (loc, code, type, offset0, offset1);
10066 /* For equal offsets we can simplify to a comparison of the
10067 base addresses. */
10068 else if (known_eq (bitpos0, bitpos1)
10069 && (indirect_base0
10070 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10071 && (indirect_base1
10072 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10073 && ((offset0 == offset1)
10074 || (offset0 && offset1
10075 && operand_equal_p (offset0, offset1, 0))))
10077 if (indirect_base0)
10078 base0 = build_fold_addr_expr_loc (loc, base0);
10079 if (indirect_base1)
10080 base1 = build_fold_addr_expr_loc (loc, base1);
10081 return fold_build2_loc (loc, code, type, base0, base1);
10083 /* Comparison between an ordinary (non-weak) symbol and a null
10084 pointer can be eliminated since such symbols must have a non
10085 null address. In C, relational expressions between pointers
10086 to objects and null pointers are undefined. The results
10087 below follow the C++ rules with the additional property that
10088 every object pointer compares greater than a null pointer.
10090 else if (((DECL_P (base0)
10091 && maybe_nonzero_address (base0) > 0
10092 /* Avoid folding references to struct members at offset 0 to
10093 prevent tests like '&ptr->firstmember == 0' from getting
10094 eliminated. When ptr is null, although the -> expression
10095 is strictly speaking invalid, GCC retains it as a matter
10096 of QoI. See PR c/44555. */
10097 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10098 || CONSTANT_CLASS_P (base0))
10099 && indirect_base0
10100 /* The caller guarantees that when one of the arguments is
10101 constant (i.e., null in this case) it is second. */
10102 && integer_zerop (arg1))
10104 switch (code)
10106 case EQ_EXPR:
10107 case LE_EXPR:
10108 case LT_EXPR:
10109 return constant_boolean_node (false, type);
10110 case GE_EXPR:
10111 case GT_EXPR:
10112 case NE_EXPR:
10113 return constant_boolean_node (true, type);
10114 default:
10115 gcc_unreachable ();
10120 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10121 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
10122 the resulting offset is smaller in absolute value than the
10123 original one and has the same sign. */
10124 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10125 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10126 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10127 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10128 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10129 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10130 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10131 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10133 tree const1 = TREE_OPERAND (arg0, 1);
10134 tree const2 = TREE_OPERAND (arg1, 1);
10135 tree variable1 = TREE_OPERAND (arg0, 0);
10136 tree variable2 = TREE_OPERAND (arg1, 0);
10137 tree cst;
10138 const char * const warnmsg = G_("assuming signed overflow does not "
10139 "occur when combining constants around "
10140 "a comparison");
10142 /* Put the constant on the side where it doesn't overflow and is
10143 of lower absolute value and of same sign than before. */
10144 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10145 ? MINUS_EXPR : PLUS_EXPR,
10146 const2, const1);
10147 if (!TREE_OVERFLOW (cst)
10148 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10149 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10151 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10152 return fold_build2_loc (loc, code, type,
10153 variable1,
10154 fold_build2_loc (loc, TREE_CODE (arg1),
10155 TREE_TYPE (arg1),
10156 variable2, cst));
10159 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10160 ? MINUS_EXPR : PLUS_EXPR,
10161 const1, const2);
10162 if (!TREE_OVERFLOW (cst)
10163 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10164 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10166 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10167 return fold_build2_loc (loc, code, type,
10168 fold_build2_loc (loc, TREE_CODE (arg0),
10169 TREE_TYPE (arg0),
10170 variable1, cst),
10171 variable2);
10175 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10176 if (tem)
10177 return tem;
10179 /* If we are comparing an expression that just has comparisons
10180 of two integer values, arithmetic expressions of those comparisons,
10181 and constants, we can simplify it. There are only three cases
10182 to check: the two values can either be equal, the first can be
10183 greater, or the second can be greater. Fold the expression for
10184 those three values. Since each value must be 0 or 1, we have
10185 eight possibilities, each of which corresponds to the constant 0
10186 or 1 or one of the six possible comparisons.
10188 This handles common cases like (a > b) == 0 but also handles
10189 expressions like ((x > y) - (y > x)) > 0, which supposedly
10190 occur in macroized code. */
10192 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10194 tree cval1 = 0, cval2 = 0;
10196 if (twoval_comparison_p (arg0, &cval1, &cval2)
10197 /* Don't handle degenerate cases here; they should already
10198 have been handled anyway. */
10199 && cval1 != 0 && cval2 != 0
10200 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10201 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10202 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10203 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10204 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10205 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10206 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10208 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10209 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10211 /* We can't just pass T to eval_subst in case cval1 or cval2
10212 was the same as ARG1. */
10214 tree high_result
10215 = fold_build2_loc (loc, code, type,
10216 eval_subst (loc, arg0, cval1, maxval,
10217 cval2, minval),
10218 arg1);
10219 tree equal_result
10220 = fold_build2_loc (loc, code, type,
10221 eval_subst (loc, arg0, cval1, maxval,
10222 cval2, maxval),
10223 arg1);
10224 tree low_result
10225 = fold_build2_loc (loc, code, type,
10226 eval_subst (loc, arg0, cval1, minval,
10227 cval2, maxval),
10228 arg1);
10230 /* All three of these results should be 0 or 1. Confirm they are.
10231 Then use those values to select the proper code to use. */
10233 if (TREE_CODE (high_result) == INTEGER_CST
10234 && TREE_CODE (equal_result) == INTEGER_CST
10235 && TREE_CODE (low_result) == INTEGER_CST)
10237 /* Make a 3-bit mask with the high-order bit being the
10238 value for `>', the next for '=', and the low for '<'. */
10239 switch ((integer_onep (high_result) * 4)
10240 + (integer_onep (equal_result) * 2)
10241 + integer_onep (low_result))
10243 case 0:
10244 /* Always false. */
10245 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10246 case 1:
10247 code = LT_EXPR;
10248 break;
10249 case 2:
10250 code = EQ_EXPR;
10251 break;
10252 case 3:
10253 code = LE_EXPR;
10254 break;
10255 case 4:
10256 code = GT_EXPR;
10257 break;
10258 case 5:
10259 code = NE_EXPR;
10260 break;
10261 case 6:
10262 code = GE_EXPR;
10263 break;
10264 case 7:
10265 /* Always true. */
10266 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10269 return fold_build2_loc (loc, code, type, cval1, cval2);
10274 return NULL_TREE;
10278 /* Subroutine of fold_binary. Optimize complex multiplications of the
10279 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
10280 argument EXPR represents the expression "z" of type TYPE. */
10282 static tree
10283 fold_mult_zconjz (location_t loc, tree type, tree expr)
10285 tree itype = TREE_TYPE (type);
10286 tree rpart, ipart, tem;
10288 if (TREE_CODE (expr) == COMPLEX_EXPR)
10290 rpart = TREE_OPERAND (expr, 0);
10291 ipart = TREE_OPERAND (expr, 1);
10293 else if (TREE_CODE (expr) == COMPLEX_CST)
10295 rpart = TREE_REALPART (expr);
10296 ipart = TREE_IMAGPART (expr);
10298 else
10300 expr = save_expr (expr);
10301 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10302 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10305 rpart = save_expr (rpart);
10306 ipart = save_expr (ipart);
10307 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10308 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10309 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10310 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10311 build_zero_cst (itype));
10315 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
10316 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10317 true if successful. */
10319 static bool
10320 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10322 unsigned HOST_WIDE_INT i, nunits;
10324 if (TREE_CODE (arg) == VECTOR_CST
10325 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10327 for (i = 0; i < nunits; ++i)
10328 elts[i] = VECTOR_CST_ELT (arg, i);
10330 else if (TREE_CODE (arg) == CONSTRUCTOR)
10332 constructor_elt *elt;
10334 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10335 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10336 return false;
10337 else
10338 elts[i] = elt->value;
10340 else
10341 return false;
10342 for (; i < nelts; i++)
10343 elts[i]
10344 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10345 return true;
10348 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10349 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10350 NULL_TREE otherwise. */
10352 tree
10353 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10355 unsigned int i;
10356 unsigned HOST_WIDE_INT nelts;
10357 bool need_ctor = false;
10359 if (!sel.length ().is_constant (&nelts))
10360 return NULL_TREE;
10361 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10362 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10363 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10364 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10365 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10366 return NULL_TREE;
10368 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10369 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10370 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10371 return NULL_TREE;
10373 tree_vector_builder out_elts (type, nelts, 1);
10374 for (i = 0; i < nelts; i++)
10376 HOST_WIDE_INT index;
10377 if (!sel[i].is_constant (&index))
10378 return NULL_TREE;
10379 if (!CONSTANT_CLASS_P (in_elts[index]))
10380 need_ctor = true;
10381 out_elts.quick_push (unshare_expr (in_elts[index]));
10384 if (need_ctor)
10386 vec<constructor_elt, va_gc> *v;
10387 vec_alloc (v, nelts);
10388 for (i = 0; i < nelts; i++)
10389 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10390 return build_constructor (type, v);
10392 else
10393 return out_elts.build ();
10396 /* Try to fold a pointer difference of type TYPE two address expressions of
10397 array references AREF0 and AREF1 using location LOC. Return a
10398 simplified expression for the difference or NULL_TREE. */
10400 static tree
10401 fold_addr_of_array_ref_difference (location_t loc, tree type,
10402 tree aref0, tree aref1,
10403 bool use_pointer_diff)
10405 tree base0 = TREE_OPERAND (aref0, 0);
10406 tree base1 = TREE_OPERAND (aref1, 0);
10407 tree base_offset = build_int_cst (type, 0);
10409 /* If the bases are array references as well, recurse. If the bases
10410 are pointer indirections compute the difference of the pointers.
10411 If the bases are equal, we are set. */
10412 if ((TREE_CODE (base0) == ARRAY_REF
10413 && TREE_CODE (base1) == ARRAY_REF
10414 && (base_offset
10415 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10416 use_pointer_diff)))
10417 || (INDIRECT_REF_P (base0)
10418 && INDIRECT_REF_P (base1)
10419 && (base_offset
10420 = use_pointer_diff
10421 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10422 TREE_OPERAND (base0, 0),
10423 TREE_OPERAND (base1, 0))
10424 : fold_binary_loc (loc, MINUS_EXPR, type,
10425 fold_convert (type,
10426 TREE_OPERAND (base0, 0)),
10427 fold_convert (type,
10428 TREE_OPERAND (base1, 0)))))
10429 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10431 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10432 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10433 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10434 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10435 return fold_build2_loc (loc, PLUS_EXPR, type,
10436 base_offset,
10437 fold_build2_loc (loc, MULT_EXPR, type,
10438 diff, esz));
10440 return NULL_TREE;
10443 /* If the real or vector real constant CST of type TYPE has an exact
10444 inverse, return it, else return NULL. */
10446 tree
10447 exact_inverse (tree type, tree cst)
10449 REAL_VALUE_TYPE r;
10450 tree unit_type;
10451 machine_mode mode;
10453 switch (TREE_CODE (cst))
10455 case REAL_CST:
10456 r = TREE_REAL_CST (cst);
10458 if (exact_real_inverse (TYPE_MODE (type), &r))
10459 return build_real (type, r);
10461 return NULL_TREE;
10463 case VECTOR_CST:
10465 unit_type = TREE_TYPE (type);
10466 mode = TYPE_MODE (unit_type);
10468 tree_vector_builder elts;
10469 if (!elts.new_unary_operation (type, cst, false))
10470 return NULL_TREE;
10471 unsigned int count = elts.encoded_nelts ();
10472 for (unsigned int i = 0; i < count; ++i)
10474 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10475 if (!exact_real_inverse (mode, &r))
10476 return NULL_TREE;
10477 elts.quick_push (build_real (unit_type, r));
10480 return elts.build ();
10483 default:
10484 return NULL_TREE;
10488 /* Mask out the tz least significant bits of X of type TYPE where
10489 tz is the number of trailing zeroes in Y. */
10490 static wide_int
10491 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10493 int tz = wi::ctz (y);
10494 if (tz > 0)
10495 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10496 return x;
10499 /* Return true when T is an address and is known to be nonzero.
10500 For floating point we further ensure that T is not denormal.
10501 Similar logic is present in nonzero_address in rtlanal.h.
10503 If the return value is based on the assumption that signed overflow
10504 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10505 change *STRICT_OVERFLOW_P. */
10507 static bool
10508 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10510 tree type = TREE_TYPE (t);
10511 enum tree_code code;
10513 /* Doing something useful for floating point would need more work. */
10514 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10515 return false;
10517 code = TREE_CODE (t);
10518 switch (TREE_CODE_CLASS (code))
10520 case tcc_unary:
10521 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10522 strict_overflow_p);
10523 case tcc_binary:
10524 case tcc_comparison:
10525 return tree_binary_nonzero_warnv_p (code, type,
10526 TREE_OPERAND (t, 0),
10527 TREE_OPERAND (t, 1),
10528 strict_overflow_p);
10529 case tcc_constant:
10530 case tcc_declaration:
10531 case tcc_reference:
10532 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10534 default:
10535 break;
10538 switch (code)
10540 case TRUTH_NOT_EXPR:
10541 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10542 strict_overflow_p);
10544 case TRUTH_AND_EXPR:
10545 case TRUTH_OR_EXPR:
10546 case TRUTH_XOR_EXPR:
10547 return tree_binary_nonzero_warnv_p (code, type,
10548 TREE_OPERAND (t, 0),
10549 TREE_OPERAND (t, 1),
10550 strict_overflow_p);
10552 case COND_EXPR:
10553 case CONSTRUCTOR:
10554 case OBJ_TYPE_REF:
10555 case ASSERT_EXPR:
10556 case ADDR_EXPR:
10557 case WITH_SIZE_EXPR:
10558 case SSA_NAME:
10559 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10561 case COMPOUND_EXPR:
10562 case MODIFY_EXPR:
10563 case BIND_EXPR:
10564 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10565 strict_overflow_p);
10567 case SAVE_EXPR:
10568 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10569 strict_overflow_p);
10571 case CALL_EXPR:
10573 tree fndecl = get_callee_fndecl (t);
10574 if (!fndecl) return false;
10575 if (flag_delete_null_pointer_checks && !flag_check_new
10576 && DECL_IS_OPERATOR_NEW_P (fndecl)
10577 && !TREE_NOTHROW (fndecl))
10578 return true;
10579 if (flag_delete_null_pointer_checks
10580 && lookup_attribute ("returns_nonnull",
10581 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10582 return true;
10583 return alloca_call_p (t);
10586 default:
10587 break;
10589 return false;
10592 /* Return true when T is an address and is known to be nonzero.
10593 Handle warnings about undefined signed overflow. */
10595 bool
10596 tree_expr_nonzero_p (tree t)
10598 bool ret, strict_overflow_p;
10600 strict_overflow_p = false;
10601 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10602 if (strict_overflow_p)
10603 fold_overflow_warning (("assuming signed overflow does not occur when "
10604 "determining that expression is always "
10605 "non-zero"),
10606 WARN_STRICT_OVERFLOW_MISC);
10607 return ret;
10610 /* Return true if T is known not to be equal to an integer W. */
10612 bool
10613 expr_not_equal_to (tree t, const wide_int &w)
10615 value_range vr;
10616 switch (TREE_CODE (t))
10618 case INTEGER_CST:
10619 return wi::to_wide (t) != w;
10621 case SSA_NAME:
10622 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10623 return false;
10624 get_range_info (t, vr);
10625 if (!vr.undefined_p ()
10626 && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10627 return true;
10628 /* If T has some known zero bits and W has any of those bits set,
10629 then T is known not to be equal to W. */
10630 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10631 TYPE_PRECISION (TREE_TYPE (t))), 0))
10632 return true;
10633 return false;
10635 default:
10636 return false;
10640 /* Fold a binary expression of code CODE and type TYPE with operands
10641 OP0 and OP1. LOC is the location of the resulting expression.
10642 Return the folded expression if folding is successful. Otherwise,
10643 return NULL_TREE. */
10645 tree
10646 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10647 tree op0, tree op1)
10649 enum tree_code_class kind = TREE_CODE_CLASS (code);
10650 tree arg0, arg1, tem;
10651 tree t1 = NULL_TREE;
10652 bool strict_overflow_p;
10653 unsigned int prec;
10655 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10656 && TREE_CODE_LENGTH (code) == 2
10657 && op0 != NULL_TREE
10658 && op1 != NULL_TREE);
10660 arg0 = op0;
10661 arg1 = op1;
10663 /* Strip any conversions that don't change the mode. This is
10664 safe for every expression, except for a comparison expression
10665 because its signedness is derived from its operands. So, in
10666 the latter case, only strip conversions that don't change the
10667 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10668 preserved.
10670 Note that this is done as an internal manipulation within the
10671 constant folder, in order to find the simplest representation
10672 of the arguments so that their form can be studied. In any
10673 cases, the appropriate type conversions should be put back in
10674 the tree that will get out of the constant folder. */
10676 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10678 STRIP_SIGN_NOPS (arg0);
10679 STRIP_SIGN_NOPS (arg1);
10681 else
10683 STRIP_NOPS (arg0);
10684 STRIP_NOPS (arg1);
10687 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10688 constant but we can't do arithmetic on them. */
10689 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10691 tem = const_binop (code, type, arg0, arg1);
10692 if (tem != NULL_TREE)
10694 if (TREE_TYPE (tem) != type)
10695 tem = fold_convert_loc (loc, type, tem);
10696 return tem;
10700 /* If this is a commutative operation, and ARG0 is a constant, move it
10701 to ARG1 to reduce the number of tests below. */
10702 if (commutative_tree_code (code)
10703 && tree_swap_operands_p (arg0, arg1))
10704 return fold_build2_loc (loc, code, type, op1, op0);
10706 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10707 to ARG1 to reduce the number of tests below. */
10708 if (kind == tcc_comparison
10709 && tree_swap_operands_p (arg0, arg1))
10710 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10712 tem = generic_simplify (loc, code, type, op0, op1);
10713 if (tem)
10714 return tem;
10716 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10718 First check for cases where an arithmetic operation is applied to a
10719 compound, conditional, or comparison operation. Push the arithmetic
10720 operation inside the compound or conditional to see if any folding
10721 can then be done. Convert comparison to conditional for this purpose.
10722 The also optimizes non-constant cases that used to be done in
10723 expand_expr.
10725 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10726 one of the operands is a comparison and the other is a comparison, a
10727 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10728 code below would make the expression more complex. Change it to a
10729 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10730 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10732 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10733 || code == EQ_EXPR || code == NE_EXPR)
10734 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10735 && ((truth_value_p (TREE_CODE (arg0))
10736 && (truth_value_p (TREE_CODE (arg1))
10737 || (TREE_CODE (arg1) == BIT_AND_EXPR
10738 && integer_onep (TREE_OPERAND (arg1, 1)))))
10739 || (truth_value_p (TREE_CODE (arg1))
10740 && (truth_value_p (TREE_CODE (arg0))
10741 || (TREE_CODE (arg0) == BIT_AND_EXPR
10742 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10744 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10745 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10746 : TRUTH_XOR_EXPR,
10747 boolean_type_node,
10748 fold_convert_loc (loc, boolean_type_node, arg0),
10749 fold_convert_loc (loc, boolean_type_node, arg1));
10751 if (code == EQ_EXPR)
10752 tem = invert_truthvalue_loc (loc, tem);
10754 return fold_convert_loc (loc, type, tem);
10757 if (TREE_CODE_CLASS (code) == tcc_binary
10758 || TREE_CODE_CLASS (code) == tcc_comparison)
10760 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10762 tem = fold_build2_loc (loc, code, type,
10763 fold_convert_loc (loc, TREE_TYPE (op0),
10764 TREE_OPERAND (arg0, 1)), op1);
10765 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10766 tem);
10768 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10770 tem = fold_build2_loc (loc, code, type, op0,
10771 fold_convert_loc (loc, TREE_TYPE (op1),
10772 TREE_OPERAND (arg1, 1)));
10773 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10774 tem);
10777 if (TREE_CODE (arg0) == COND_EXPR
10778 || TREE_CODE (arg0) == VEC_COND_EXPR
10779 || COMPARISON_CLASS_P (arg0))
10781 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10782 arg0, arg1,
10783 /*cond_first_p=*/1);
10784 if (tem != NULL_TREE)
10785 return tem;
10788 if (TREE_CODE (arg1) == COND_EXPR
10789 || TREE_CODE (arg1) == VEC_COND_EXPR
10790 || COMPARISON_CLASS_P (arg1))
10792 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10793 arg1, arg0,
10794 /*cond_first_p=*/0);
10795 if (tem != NULL_TREE)
10796 return tem;
10800 switch (code)
10802 case MEM_REF:
10803 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10804 if (TREE_CODE (arg0) == ADDR_EXPR
10805 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10807 tree iref = TREE_OPERAND (arg0, 0);
10808 return fold_build2 (MEM_REF, type,
10809 TREE_OPERAND (iref, 0),
10810 int_const_binop (PLUS_EXPR, arg1,
10811 TREE_OPERAND (iref, 1)));
10814 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10815 if (TREE_CODE (arg0) == ADDR_EXPR
10816 && handled_component_p (TREE_OPERAND (arg0, 0)))
10818 tree base;
10819 poly_int64 coffset;
10820 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10821 &coffset);
10822 if (!base)
10823 return NULL_TREE;
10824 return fold_build2 (MEM_REF, type,
10825 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10826 int_const_binop (PLUS_EXPR, arg1,
10827 size_int (coffset)));
10830 return NULL_TREE;
10832 case POINTER_PLUS_EXPR:
10833 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10834 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10835 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10836 return fold_convert_loc (loc, type,
10837 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10838 fold_convert_loc (loc, sizetype,
10839 arg1),
10840 fold_convert_loc (loc, sizetype,
10841 arg0)));
10843 return NULL_TREE;
10845 case PLUS_EXPR:
10846 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10848 /* X + (X / CST) * -CST is X % CST. */
10849 if (TREE_CODE (arg1) == MULT_EXPR
10850 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10851 && operand_equal_p (arg0,
10852 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10854 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10855 tree cst1 = TREE_OPERAND (arg1, 1);
10856 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10857 cst1, cst0);
10858 if (sum && integer_zerop (sum))
10859 return fold_convert_loc (loc, type,
10860 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10861 TREE_TYPE (arg0), arg0,
10862 cst0));
10866 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10867 one. Make sure the type is not saturating and has the signedness of
10868 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10869 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10870 if ((TREE_CODE (arg0) == MULT_EXPR
10871 || TREE_CODE (arg1) == MULT_EXPR)
10872 && !TYPE_SATURATING (type)
10873 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10874 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10875 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10877 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10878 if (tem)
10879 return tem;
10882 if (! FLOAT_TYPE_P (type))
10884 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10885 (plus (plus (mult) (mult)) (foo)) so that we can
10886 take advantage of the factoring cases below. */
10887 if (ANY_INTEGRAL_TYPE_P (type)
10888 && TYPE_OVERFLOW_WRAPS (type)
10889 && (((TREE_CODE (arg0) == PLUS_EXPR
10890 || TREE_CODE (arg0) == MINUS_EXPR)
10891 && TREE_CODE (arg1) == MULT_EXPR)
10892 || ((TREE_CODE (arg1) == PLUS_EXPR
10893 || TREE_CODE (arg1) == MINUS_EXPR)
10894 && TREE_CODE (arg0) == MULT_EXPR)))
10896 tree parg0, parg1, parg, marg;
10897 enum tree_code pcode;
10899 if (TREE_CODE (arg1) == MULT_EXPR)
10900 parg = arg0, marg = arg1;
10901 else
10902 parg = arg1, marg = arg0;
10903 pcode = TREE_CODE (parg);
10904 parg0 = TREE_OPERAND (parg, 0);
10905 parg1 = TREE_OPERAND (parg, 1);
10906 STRIP_NOPS (parg0);
10907 STRIP_NOPS (parg1);
10909 if (TREE_CODE (parg0) == MULT_EXPR
10910 && TREE_CODE (parg1) != MULT_EXPR)
10911 return fold_build2_loc (loc, pcode, type,
10912 fold_build2_loc (loc, PLUS_EXPR, type,
10913 fold_convert_loc (loc, type,
10914 parg0),
10915 fold_convert_loc (loc, type,
10916 marg)),
10917 fold_convert_loc (loc, type, parg1));
10918 if (TREE_CODE (parg0) != MULT_EXPR
10919 && TREE_CODE (parg1) == MULT_EXPR)
10920 return
10921 fold_build2_loc (loc, PLUS_EXPR, type,
10922 fold_convert_loc (loc, type, parg0),
10923 fold_build2_loc (loc, pcode, type,
10924 fold_convert_loc (loc, type, marg),
10925 fold_convert_loc (loc, type,
10926 parg1)));
10929 else
10931 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10932 to __complex__ ( x, y ). This is not the same for SNaNs or
10933 if signed zeros are involved. */
10934 if (!HONOR_SNANS (element_mode (arg0))
10935 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10936 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10938 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10939 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10940 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10941 bool arg0rz = false, arg0iz = false;
10942 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10943 || (arg0i && (arg0iz = real_zerop (arg0i))))
10945 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10946 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10947 if (arg0rz && arg1i && real_zerop (arg1i))
10949 tree rp = arg1r ? arg1r
10950 : build1 (REALPART_EXPR, rtype, arg1);
10951 tree ip = arg0i ? arg0i
10952 : build1 (IMAGPART_EXPR, rtype, arg0);
10953 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10955 else if (arg0iz && arg1r && real_zerop (arg1r))
10957 tree rp = arg0r ? arg0r
10958 : build1 (REALPART_EXPR, rtype, arg0);
10959 tree ip = arg1i ? arg1i
10960 : build1 (IMAGPART_EXPR, rtype, arg1);
10961 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10966 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10967 We associate floats only if the user has specified
10968 -fassociative-math. */
10969 if (flag_associative_math
10970 && TREE_CODE (arg1) == PLUS_EXPR
10971 && TREE_CODE (arg0) != MULT_EXPR)
10973 tree tree10 = TREE_OPERAND (arg1, 0);
10974 tree tree11 = TREE_OPERAND (arg1, 1);
10975 if (TREE_CODE (tree11) == MULT_EXPR
10976 && TREE_CODE (tree10) == MULT_EXPR)
10978 tree tree0;
10979 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10980 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10983 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10984 We associate floats only if the user has specified
10985 -fassociative-math. */
10986 if (flag_associative_math
10987 && TREE_CODE (arg0) == PLUS_EXPR
10988 && TREE_CODE (arg1) != MULT_EXPR)
10990 tree tree00 = TREE_OPERAND (arg0, 0);
10991 tree tree01 = TREE_OPERAND (arg0, 1);
10992 if (TREE_CODE (tree01) == MULT_EXPR
10993 && TREE_CODE (tree00) == MULT_EXPR)
10995 tree tree0;
10996 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10997 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11002 bit_rotate:
11003 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11004 is a rotate of A by C1 bits. */
11005 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11006 is a rotate of A by B bits.
11007 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11008 though in this case CODE must be | and not + or ^, otherwise
11009 it doesn't return A when B is 0. */
11011 enum tree_code code0, code1;
11012 tree rtype;
11013 code0 = TREE_CODE (arg0);
11014 code1 = TREE_CODE (arg1);
11015 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11016 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11017 && operand_equal_p (TREE_OPERAND (arg0, 0),
11018 TREE_OPERAND (arg1, 0), 0)
11019 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11020 TYPE_UNSIGNED (rtype))
11021 /* Only create rotates in complete modes. Other cases are not
11022 expanded properly. */
11023 && (element_precision (rtype)
11024 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11026 tree tree01, tree11;
11027 tree orig_tree01, orig_tree11;
11028 enum tree_code code01, code11;
11030 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11031 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11032 STRIP_NOPS (tree01);
11033 STRIP_NOPS (tree11);
11034 code01 = TREE_CODE (tree01);
11035 code11 = TREE_CODE (tree11);
11036 if (code11 != MINUS_EXPR
11037 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11039 std::swap (code0, code1);
11040 std::swap (code01, code11);
11041 std::swap (tree01, tree11);
11042 std::swap (orig_tree01, orig_tree11);
11044 if (code01 == INTEGER_CST
11045 && code11 == INTEGER_CST
11046 && (wi::to_widest (tree01) + wi::to_widest (tree11)
11047 == element_precision (rtype)))
11049 tem = build2_loc (loc, LROTATE_EXPR,
11050 rtype, TREE_OPERAND (arg0, 0),
11051 code0 == LSHIFT_EXPR
11052 ? orig_tree01 : orig_tree11);
11053 return fold_convert_loc (loc, type, tem);
11055 else if (code11 == MINUS_EXPR)
11057 tree tree110, tree111;
11058 tree110 = TREE_OPERAND (tree11, 0);
11059 tree111 = TREE_OPERAND (tree11, 1);
11060 STRIP_NOPS (tree110);
11061 STRIP_NOPS (tree111);
11062 if (TREE_CODE (tree110) == INTEGER_CST
11063 && compare_tree_int (tree110,
11064 element_precision (rtype)) == 0
11065 && operand_equal_p (tree01, tree111, 0))
11067 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11068 ? LROTATE_EXPR : RROTATE_EXPR),
11069 rtype, TREE_OPERAND (arg0, 0),
11070 orig_tree01);
11071 return fold_convert_loc (loc, type, tem);
11074 else if (code == BIT_IOR_EXPR
11075 && code11 == BIT_AND_EXPR
11076 && pow2p_hwi (element_precision (rtype)))
11078 tree tree110, tree111;
11079 tree110 = TREE_OPERAND (tree11, 0);
11080 tree111 = TREE_OPERAND (tree11, 1);
11081 STRIP_NOPS (tree110);
11082 STRIP_NOPS (tree111);
11083 if (TREE_CODE (tree110) == NEGATE_EXPR
11084 && TREE_CODE (tree111) == INTEGER_CST
11085 && compare_tree_int (tree111,
11086 element_precision (rtype) - 1) == 0
11087 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11089 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11090 ? LROTATE_EXPR : RROTATE_EXPR),
11091 rtype, TREE_OPERAND (arg0, 0),
11092 orig_tree01);
11093 return fold_convert_loc (loc, type, tem);
11099 associate:
11100 /* In most languages, can't associate operations on floats through
11101 parentheses. Rather than remember where the parentheses were, we
11102 don't associate floats at all, unless the user has specified
11103 -fassociative-math.
11104 And, we need to make sure type is not saturating. */
11106 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11107 && !TYPE_SATURATING (type))
11109 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11110 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11111 tree atype = type;
11112 bool ok = true;
11114 /* Split both trees into variables, constants, and literals. Then
11115 associate each group together, the constants with literals,
11116 then the result with variables. This increases the chances of
11117 literals being recombined later and of generating relocatable
11118 expressions for the sum of a constant and literal. */
11119 var0 = split_tree (arg0, type, code,
11120 &minus_var0, &con0, &minus_con0,
11121 &lit0, &minus_lit0, 0);
11122 var1 = split_tree (arg1, type, code,
11123 &minus_var1, &con1, &minus_con1,
11124 &lit1, &minus_lit1, code == MINUS_EXPR);
11126 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
11127 if (code == MINUS_EXPR)
11128 code = PLUS_EXPR;
11130 /* With undefined overflow prefer doing association in a type
11131 which wraps on overflow, if that is one of the operand types. */
11132 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11133 && !TYPE_OVERFLOW_WRAPS (type))
11135 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11136 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11137 atype = TREE_TYPE (arg0);
11138 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11139 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11140 atype = TREE_TYPE (arg1);
11141 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11144 /* With undefined overflow we can only associate constants with one
11145 variable, and constants whose association doesn't overflow. */
11146 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11147 && !TYPE_OVERFLOW_WRAPS (atype))
11149 if ((var0 && var1) || (minus_var0 && minus_var1))
11151 /* ??? If split_tree would handle NEGATE_EXPR we could
11152 simply reject these cases and the allowed cases would
11153 be the var0/minus_var1 ones. */
11154 tree tmp0 = var0 ? var0 : minus_var0;
11155 tree tmp1 = var1 ? var1 : minus_var1;
11156 bool one_neg = false;
11158 if (TREE_CODE (tmp0) == NEGATE_EXPR)
11160 tmp0 = TREE_OPERAND (tmp0, 0);
11161 one_neg = !one_neg;
11163 if (CONVERT_EXPR_P (tmp0)
11164 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11165 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11166 <= TYPE_PRECISION (atype)))
11167 tmp0 = TREE_OPERAND (tmp0, 0);
11168 if (TREE_CODE (tmp1) == NEGATE_EXPR)
11170 tmp1 = TREE_OPERAND (tmp1, 0);
11171 one_neg = !one_neg;
11173 if (CONVERT_EXPR_P (tmp1)
11174 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11175 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11176 <= TYPE_PRECISION (atype)))
11177 tmp1 = TREE_OPERAND (tmp1, 0);
11178 /* The only case we can still associate with two variables
11179 is if they cancel out. */
11180 if (!one_neg
11181 || !operand_equal_p (tmp0, tmp1, 0))
11182 ok = false;
11184 else if ((var0 && minus_var1
11185 && ! operand_equal_p (var0, minus_var1, 0))
11186 || (minus_var0 && var1
11187 && ! operand_equal_p (minus_var0, var1, 0)))
11188 ok = false;
11191 /* Only do something if we found more than two objects. Otherwise,
11192 nothing has changed and we risk infinite recursion. */
11193 if (ok
11194 && ((var0 != 0) + (var1 != 0)
11195 + (minus_var0 != 0) + (minus_var1 != 0)
11196 + (con0 != 0) + (con1 != 0)
11197 + (minus_con0 != 0) + (minus_con1 != 0)
11198 + (lit0 != 0) + (lit1 != 0)
11199 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11201 var0 = associate_trees (loc, var0, var1, code, atype);
11202 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11203 code, atype);
11204 con0 = associate_trees (loc, con0, con1, code, atype);
11205 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11206 code, atype);
11207 lit0 = associate_trees (loc, lit0, lit1, code, atype);
11208 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11209 code, atype);
11211 if (minus_var0 && var0)
11213 var0 = associate_trees (loc, var0, minus_var0,
11214 MINUS_EXPR, atype);
11215 minus_var0 = 0;
11217 if (minus_con0 && con0)
11219 con0 = associate_trees (loc, con0, minus_con0,
11220 MINUS_EXPR, atype);
11221 minus_con0 = 0;
11224 /* Preserve the MINUS_EXPR if the negative part of the literal is
11225 greater than the positive part. Otherwise, the multiplicative
11226 folding code (i.e extract_muldiv) may be fooled in case
11227 unsigned constants are subtracted, like in the following
11228 example: ((X*2 + 4) - 8U)/2. */
11229 if (minus_lit0 && lit0)
11231 if (TREE_CODE (lit0) == INTEGER_CST
11232 && TREE_CODE (minus_lit0) == INTEGER_CST
11233 && tree_int_cst_lt (lit0, minus_lit0)
11234 /* But avoid ending up with only negated parts. */
11235 && (var0 || con0))
11237 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11238 MINUS_EXPR, atype);
11239 lit0 = 0;
11241 else
11243 lit0 = associate_trees (loc, lit0, minus_lit0,
11244 MINUS_EXPR, atype);
11245 minus_lit0 = 0;
11249 /* Don't introduce overflows through reassociation. */
11250 if ((lit0 && TREE_OVERFLOW_P (lit0))
11251 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11252 return NULL_TREE;
11254 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11255 con0 = associate_trees (loc, con0, lit0, code, atype);
11256 lit0 = 0;
11257 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11258 code, atype);
11259 minus_lit0 = 0;
11261 /* Eliminate minus_con0. */
11262 if (minus_con0)
11264 if (con0)
11265 con0 = associate_trees (loc, con0, minus_con0,
11266 MINUS_EXPR, atype);
11267 else if (var0)
11268 var0 = associate_trees (loc, var0, minus_con0,
11269 MINUS_EXPR, atype);
11270 else
11271 gcc_unreachable ();
11272 minus_con0 = 0;
11275 /* Eliminate minus_var0. */
11276 if (minus_var0)
11278 if (con0)
11279 con0 = associate_trees (loc, con0, minus_var0,
11280 MINUS_EXPR, atype);
11281 else
11282 gcc_unreachable ();
11283 minus_var0 = 0;
11286 return
11287 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11288 code, atype));
11292 return NULL_TREE;
11294 case POINTER_DIFF_EXPR:
11295 case MINUS_EXPR:
11296 /* Fold &a[i] - &a[j] to i-j. */
11297 if (TREE_CODE (arg0) == ADDR_EXPR
11298 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11299 && TREE_CODE (arg1) == ADDR_EXPR
11300 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11302 tree tem = fold_addr_of_array_ref_difference (loc, type,
11303 TREE_OPERAND (arg0, 0),
11304 TREE_OPERAND (arg1, 0),
11305 code
11306 == POINTER_DIFF_EXPR);
11307 if (tem)
11308 return tem;
11311 /* Further transformations are not for pointers. */
11312 if (code == POINTER_DIFF_EXPR)
11313 return NULL_TREE;
11315 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
11316 if (TREE_CODE (arg0) == NEGATE_EXPR
11317 && negate_expr_p (op1)
11318 /* If arg0 is e.g. unsigned int and type is int, then this could
11319 introduce UB, because if A is INT_MIN at runtime, the original
11320 expression can be well defined while the latter is not.
11321 See PR83269. */
11322 && !(ANY_INTEGRAL_TYPE_P (type)
11323 && TYPE_OVERFLOW_UNDEFINED (type)
11324 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11325 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11326 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11327 fold_convert_loc (loc, type,
11328 TREE_OPERAND (arg0, 0)));
11330 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11331 __complex__ ( x, -y ). This is not the same for SNaNs or if
11332 signed zeros are involved. */
11333 if (!HONOR_SNANS (element_mode (arg0))
11334 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11335 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11337 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11338 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11339 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11340 bool arg0rz = false, arg0iz = false;
11341 if ((arg0r && (arg0rz = real_zerop (arg0r)))
11342 || (arg0i && (arg0iz = real_zerop (arg0i))))
11344 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11345 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11346 if (arg0rz && arg1i && real_zerop (arg1i))
11348 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11349 arg1r ? arg1r
11350 : build1 (REALPART_EXPR, rtype, arg1));
11351 tree ip = arg0i ? arg0i
11352 : build1 (IMAGPART_EXPR, rtype, arg0);
11353 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11355 else if (arg0iz && arg1r && real_zerop (arg1r))
11357 tree rp = arg0r ? arg0r
11358 : build1 (REALPART_EXPR, rtype, arg0);
11359 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11360 arg1i ? arg1i
11361 : build1 (IMAGPART_EXPR, rtype, arg1));
11362 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11367 /* A - B -> A + (-B) if B is easily negatable. */
11368 if (negate_expr_p (op1)
11369 && ! TYPE_OVERFLOW_SANITIZED (type)
11370 && ((FLOAT_TYPE_P (type)
11371 /* Avoid this transformation if B is a positive REAL_CST. */
11372 && (TREE_CODE (op1) != REAL_CST
11373 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11374 || INTEGRAL_TYPE_P (type)))
11375 return fold_build2_loc (loc, PLUS_EXPR, type,
11376 fold_convert_loc (loc, type, arg0),
11377 negate_expr (op1));
11379 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11380 one. Make sure the type is not saturating and has the signedness of
11381 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11382 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11383 if ((TREE_CODE (arg0) == MULT_EXPR
11384 || TREE_CODE (arg1) == MULT_EXPR)
11385 && !TYPE_SATURATING (type)
11386 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11387 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11388 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11390 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11391 if (tem)
11392 return tem;
11395 goto associate;
11397 case MULT_EXPR:
11398 if (! FLOAT_TYPE_P (type))
11400 /* Transform x * -C into -x * C if x is easily negatable. */
11401 if (TREE_CODE (op1) == INTEGER_CST
11402 && tree_int_cst_sgn (op1) == -1
11403 && negate_expr_p (op0)
11404 && negate_expr_p (op1)
11405 && (tem = negate_expr (op1)) != op1
11406 && ! TREE_OVERFLOW (tem))
11407 return fold_build2_loc (loc, MULT_EXPR, type,
11408 fold_convert_loc (loc, type,
11409 negate_expr (op0)), tem);
11411 strict_overflow_p = false;
11412 if (TREE_CODE (arg1) == INTEGER_CST
11413 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11414 &strict_overflow_p)) != 0)
11416 if (strict_overflow_p)
11417 fold_overflow_warning (("assuming signed overflow does not "
11418 "occur when simplifying "
11419 "multiplication"),
11420 WARN_STRICT_OVERFLOW_MISC);
11421 return fold_convert_loc (loc, type, tem);
11424 /* Optimize z * conj(z) for integer complex numbers. */
11425 if (TREE_CODE (arg0) == CONJ_EXPR
11426 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11427 return fold_mult_zconjz (loc, type, arg1);
11428 if (TREE_CODE (arg1) == CONJ_EXPR
11429 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11430 return fold_mult_zconjz (loc, type, arg0);
11432 else
11434 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11435 This is not the same for NaNs or if signed zeros are
11436 involved. */
11437 if (!HONOR_NANS (arg0)
11438 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11439 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11440 && TREE_CODE (arg1) == COMPLEX_CST
11441 && real_zerop (TREE_REALPART (arg1)))
11443 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11444 if (real_onep (TREE_IMAGPART (arg1)))
11445 return
11446 fold_build2_loc (loc, COMPLEX_EXPR, type,
11447 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11448 rtype, arg0)),
11449 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11450 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11451 return
11452 fold_build2_loc (loc, COMPLEX_EXPR, type,
11453 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11454 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11455 rtype, arg0)));
11458 /* Optimize z * conj(z) for floating point complex numbers.
11459 Guarded by flag_unsafe_math_optimizations as non-finite
11460 imaginary components don't produce scalar results. */
11461 if (flag_unsafe_math_optimizations
11462 && TREE_CODE (arg0) == CONJ_EXPR
11463 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11464 return fold_mult_zconjz (loc, type, arg1);
11465 if (flag_unsafe_math_optimizations
11466 && TREE_CODE (arg1) == CONJ_EXPR
11467 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11468 return fold_mult_zconjz (loc, type, arg0);
11470 goto associate;
11472 case BIT_IOR_EXPR:
11473 /* Canonicalize (X & C1) | C2. */
11474 if (TREE_CODE (arg0) == BIT_AND_EXPR
11475 && TREE_CODE (arg1) == INTEGER_CST
11476 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11478 int width = TYPE_PRECISION (type), w;
11479 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11480 wide_int c2 = wi::to_wide (arg1);
11482 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11483 if ((c1 & c2) == c1)
11484 return omit_one_operand_loc (loc, type, arg1,
11485 TREE_OPERAND (arg0, 0));
11487 wide_int msk = wi::mask (width, false,
11488 TYPE_PRECISION (TREE_TYPE (arg1)));
11490 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11491 if (wi::bit_and_not (msk, c1 | c2) == 0)
11493 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11494 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11497 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11498 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11499 mode which allows further optimizations. */
11500 c1 &= msk;
11501 c2 &= msk;
11502 wide_int c3 = wi::bit_and_not (c1, c2);
11503 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11505 wide_int mask = wi::mask (w, false,
11506 TYPE_PRECISION (type));
11507 if (((c1 | c2) & mask) == mask
11508 && wi::bit_and_not (c1, mask) == 0)
11510 c3 = mask;
11511 break;
11515 if (c3 != c1)
11517 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11518 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11519 wide_int_to_tree (type, c3));
11520 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11524 /* See if this can be simplified into a rotate first. If that
11525 is unsuccessful continue in the association code. */
11526 goto bit_rotate;
11528 case BIT_XOR_EXPR:
11529 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11530 if (TREE_CODE (arg0) == BIT_AND_EXPR
11531 && INTEGRAL_TYPE_P (type)
11532 && integer_onep (TREE_OPERAND (arg0, 1))
11533 && integer_onep (arg1))
11534 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11535 build_zero_cst (TREE_TYPE (arg0)));
11537 /* See if this can be simplified into a rotate first. If that
11538 is unsuccessful continue in the association code. */
11539 goto bit_rotate;
11541 case BIT_AND_EXPR:
11542 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11543 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11544 && INTEGRAL_TYPE_P (type)
11545 && integer_onep (TREE_OPERAND (arg0, 1))
11546 && integer_onep (arg1))
11548 tree tem2;
11549 tem = TREE_OPERAND (arg0, 0);
11550 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11551 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11552 tem, tem2);
11553 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11554 build_zero_cst (TREE_TYPE (tem)));
11556 /* Fold ~X & 1 as (X & 1) == 0. */
11557 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11558 && INTEGRAL_TYPE_P (type)
11559 && integer_onep (arg1))
11561 tree tem2;
11562 tem = TREE_OPERAND (arg0, 0);
11563 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11564 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11565 tem, tem2);
11566 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11567 build_zero_cst (TREE_TYPE (tem)));
11569 /* Fold !X & 1 as X == 0. */
11570 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11571 && integer_onep (arg1))
11573 tem = TREE_OPERAND (arg0, 0);
11574 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11575 build_zero_cst (TREE_TYPE (tem)));
11578 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11579 multiple of 1 << CST. */
11580 if (TREE_CODE (arg1) == INTEGER_CST)
11582 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11583 wide_int ncst1 = -cst1;
11584 if ((cst1 & ncst1) == ncst1
11585 && multiple_of_p (type, arg0,
11586 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11587 return fold_convert_loc (loc, type, arg0);
11590 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11591 bits from CST2. */
11592 if (TREE_CODE (arg1) == INTEGER_CST
11593 && TREE_CODE (arg0) == MULT_EXPR
11594 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11596 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11597 wide_int masked
11598 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11600 if (masked == 0)
11601 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11602 arg0, arg1);
11603 else if (masked != warg1)
11605 /* Avoid the transform if arg1 is a mask of some
11606 mode which allows further optimizations. */
11607 int pop = wi::popcount (warg1);
11608 if (!(pop >= BITS_PER_UNIT
11609 && pow2p_hwi (pop)
11610 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11611 return fold_build2_loc (loc, code, type, op0,
11612 wide_int_to_tree (type, masked));
11616 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11617 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11618 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11620 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11622 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11623 if (mask == -1)
11624 return
11625 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11628 goto associate;
11630 case RDIV_EXPR:
11631 /* Don't touch a floating-point divide by zero unless the mode
11632 of the constant can represent infinity. */
11633 if (TREE_CODE (arg1) == REAL_CST
11634 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11635 && real_zerop (arg1))
11636 return NULL_TREE;
11638 /* (-A) / (-B) -> A / B */
11639 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11640 return fold_build2_loc (loc, RDIV_EXPR, type,
11641 TREE_OPERAND (arg0, 0),
11642 negate_expr (arg1));
11643 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11644 return fold_build2_loc (loc, RDIV_EXPR, type,
11645 negate_expr (arg0),
11646 TREE_OPERAND (arg1, 0));
11647 return NULL_TREE;
11649 case TRUNC_DIV_EXPR:
11650 /* Fall through */
11652 case FLOOR_DIV_EXPR:
11653 /* Simplify A / (B << N) where A and B are positive and B is
11654 a power of 2, to A >> (N + log2(B)). */
11655 strict_overflow_p = false;
11656 if (TREE_CODE (arg1) == LSHIFT_EXPR
11657 && (TYPE_UNSIGNED (type)
11658 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11660 tree sval = TREE_OPERAND (arg1, 0);
11661 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11663 tree sh_cnt = TREE_OPERAND (arg1, 1);
11664 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11665 wi::exact_log2 (wi::to_wide (sval)));
11667 if (strict_overflow_p)
11668 fold_overflow_warning (("assuming signed overflow does not "
11669 "occur when simplifying A / (B << N)"),
11670 WARN_STRICT_OVERFLOW_MISC);
11672 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11673 sh_cnt, pow2);
11674 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11675 fold_convert_loc (loc, type, arg0), sh_cnt);
11679 /* Fall through */
11681 case ROUND_DIV_EXPR:
11682 case CEIL_DIV_EXPR:
11683 case EXACT_DIV_EXPR:
11684 if (integer_zerop (arg1))
11685 return NULL_TREE;
11687 /* Convert -A / -B to A / B when the type is signed and overflow is
11688 undefined. */
11689 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11690 && TREE_CODE (op0) == NEGATE_EXPR
11691 && negate_expr_p (op1))
11693 if (ANY_INTEGRAL_TYPE_P (type))
11694 fold_overflow_warning (("assuming signed overflow does not occur "
11695 "when distributing negation across "
11696 "division"),
11697 WARN_STRICT_OVERFLOW_MISC);
11698 return fold_build2_loc (loc, code, type,
11699 fold_convert_loc (loc, type,
11700 TREE_OPERAND (arg0, 0)),
11701 negate_expr (op1));
11703 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11704 && TREE_CODE (arg1) == NEGATE_EXPR
11705 && negate_expr_p (op0))
11707 if (ANY_INTEGRAL_TYPE_P (type))
11708 fold_overflow_warning (("assuming signed overflow does not occur "
11709 "when distributing negation across "
11710 "division"),
11711 WARN_STRICT_OVERFLOW_MISC);
11712 return fold_build2_loc (loc, code, type,
11713 negate_expr (op0),
11714 fold_convert_loc (loc, type,
11715 TREE_OPERAND (arg1, 0)));
11718 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11719 operation, EXACT_DIV_EXPR.
11721 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11722 At one time others generated faster code, it's not clear if they do
11723 after the last round to changes to the DIV code in expmed.c. */
11724 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11725 && multiple_of_p (type, arg0, arg1))
11726 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11727 fold_convert (type, arg0),
11728 fold_convert (type, arg1));
11730 strict_overflow_p = false;
11731 if (TREE_CODE (arg1) == INTEGER_CST
11732 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11733 &strict_overflow_p)) != 0)
11735 if (strict_overflow_p)
11736 fold_overflow_warning (("assuming signed overflow does not occur "
11737 "when simplifying division"),
11738 WARN_STRICT_OVERFLOW_MISC);
11739 return fold_convert_loc (loc, type, tem);
11742 return NULL_TREE;
11744 case CEIL_MOD_EXPR:
11745 case FLOOR_MOD_EXPR:
11746 case ROUND_MOD_EXPR:
11747 case TRUNC_MOD_EXPR:
11748 strict_overflow_p = false;
11749 if (TREE_CODE (arg1) == INTEGER_CST
11750 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11751 &strict_overflow_p)) != 0)
11753 if (strict_overflow_p)
11754 fold_overflow_warning (("assuming signed overflow does not occur "
11755 "when simplifying modulus"),
11756 WARN_STRICT_OVERFLOW_MISC);
11757 return fold_convert_loc (loc, type, tem);
11760 return NULL_TREE;
11762 case LROTATE_EXPR:
11763 case RROTATE_EXPR:
11764 case RSHIFT_EXPR:
11765 case LSHIFT_EXPR:
11766 /* Since negative shift count is not well-defined,
11767 don't try to compute it in the compiler. */
11768 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11769 return NULL_TREE;
11771 prec = element_precision (type);
11773 /* If we have a rotate of a bit operation with the rotate count and
11774 the second operand of the bit operation both constant,
11775 permute the two operations. */
11776 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11777 && (TREE_CODE (arg0) == BIT_AND_EXPR
11778 || TREE_CODE (arg0) == BIT_IOR_EXPR
11779 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11780 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11782 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11783 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11784 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11785 fold_build2_loc (loc, code, type,
11786 arg00, arg1),
11787 fold_build2_loc (loc, code, type,
11788 arg01, arg1));
11791 /* Two consecutive rotates adding up to the some integer
11792 multiple of the precision of the type can be ignored. */
11793 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11794 && TREE_CODE (arg0) == RROTATE_EXPR
11795 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11796 && wi::umod_trunc (wi::to_wide (arg1)
11797 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11798 prec) == 0)
11799 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11801 return NULL_TREE;
11803 case MIN_EXPR:
11804 case MAX_EXPR:
11805 goto associate;
11807 case TRUTH_ANDIF_EXPR:
11808 /* Note that the operands of this must be ints
11809 and their values must be 0 or 1.
11810 ("true" is a fixed value perhaps depending on the language.) */
11811 /* If first arg is constant zero, return it. */
11812 if (integer_zerop (arg0))
11813 return fold_convert_loc (loc, type, arg0);
11814 /* FALLTHRU */
11815 case TRUTH_AND_EXPR:
11816 /* If either arg is constant true, drop it. */
11817 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11818 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11819 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11820 /* Preserve sequence points. */
11821 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11822 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11823 /* If second arg is constant zero, result is zero, but first arg
11824 must be evaluated. */
11825 if (integer_zerop (arg1))
11826 return omit_one_operand_loc (loc, type, arg1, arg0);
11827 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11828 case will be handled here. */
11829 if (integer_zerop (arg0))
11830 return omit_one_operand_loc (loc, type, arg0, arg1);
11832 /* !X && X is always false. */
11833 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11834 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11835 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11836 /* X && !X is always false. */
11837 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11838 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11839 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11841 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11842 means A >= Y && A != MAX, but in this case we know that
11843 A < X <= MAX. */
11845 if (!TREE_SIDE_EFFECTS (arg0)
11846 && !TREE_SIDE_EFFECTS (arg1))
11848 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11849 if (tem && !operand_equal_p (tem, arg0, 0))
11850 return fold_build2_loc (loc, code, type, tem, arg1);
11852 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11853 if (tem && !operand_equal_p (tem, arg1, 0))
11854 return fold_build2_loc (loc, code, type, arg0, tem);
11857 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11858 != NULL_TREE)
11859 return tem;
11861 return NULL_TREE;
11863 case TRUTH_ORIF_EXPR:
11864 /* Note that the operands of this must be ints
11865 and their values must be 0 or true.
11866 ("true" is a fixed value perhaps depending on the language.) */
11867 /* If first arg is constant true, return it. */
11868 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11869 return fold_convert_loc (loc, type, arg0);
11870 /* FALLTHRU */
11871 case TRUTH_OR_EXPR:
11872 /* If either arg is constant zero, drop it. */
11873 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11874 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11875 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11876 /* Preserve sequence points. */
11877 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11878 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11879 /* If second arg is constant true, result is true, but we must
11880 evaluate first arg. */
11881 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11882 return omit_one_operand_loc (loc, type, arg1, arg0);
11883 /* Likewise for first arg, but note this only occurs here for
11884 TRUTH_OR_EXPR. */
11885 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11886 return omit_one_operand_loc (loc, type, arg0, arg1);
11888 /* !X || X is always true. */
11889 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11890 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11891 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11892 /* X || !X is always true. */
11893 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11894 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11895 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11897 /* (X && !Y) || (!X && Y) is X ^ Y */
11898 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11899 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11901 tree a0, a1, l0, l1, n0, n1;
11903 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11904 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11906 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11907 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11909 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11910 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11912 if ((operand_equal_p (n0, a0, 0)
11913 && operand_equal_p (n1, a1, 0))
11914 || (operand_equal_p (n0, a1, 0)
11915 && operand_equal_p (n1, a0, 0)))
11916 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11919 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11920 != NULL_TREE)
11921 return tem;
11923 return NULL_TREE;
11925 case TRUTH_XOR_EXPR:
11926 /* If the second arg is constant zero, drop it. */
11927 if (integer_zerop (arg1))
11928 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11929 /* If the second arg is constant true, this is a logical inversion. */
11930 if (integer_onep (arg1))
11932 tem = invert_truthvalue_loc (loc, arg0);
11933 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11935 /* Identical arguments cancel to zero. */
11936 if (operand_equal_p (arg0, arg1, 0))
11937 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11939 /* !X ^ X is always true. */
11940 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11941 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11942 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11944 /* X ^ !X is always true. */
11945 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11946 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11947 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11949 return NULL_TREE;
11951 case EQ_EXPR:
11952 case NE_EXPR:
11953 STRIP_NOPS (arg0);
11954 STRIP_NOPS (arg1);
11956 tem = fold_comparison (loc, code, type, op0, op1);
11957 if (tem != NULL_TREE)
11958 return tem;
11960 /* bool_var != 1 becomes !bool_var. */
11961 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11962 && code == NE_EXPR)
11963 return fold_convert_loc (loc, type,
11964 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11965 TREE_TYPE (arg0), arg0));
11967 /* bool_var == 0 becomes !bool_var. */
11968 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11969 && code == EQ_EXPR)
11970 return fold_convert_loc (loc, type,
11971 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11972 TREE_TYPE (arg0), arg0));
11974 /* !exp != 0 becomes !exp */
11975 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11976 && code == NE_EXPR)
11977 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11979 /* If this is an EQ or NE comparison with zero and ARG0 is
11980 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11981 two operations, but the latter can be done in one less insn
11982 on machines that have only two-operand insns or on which a
11983 constant cannot be the first operand. */
11984 if (TREE_CODE (arg0) == BIT_AND_EXPR
11985 && integer_zerop (arg1))
11987 tree arg00 = TREE_OPERAND (arg0, 0);
11988 tree arg01 = TREE_OPERAND (arg0, 1);
11989 if (TREE_CODE (arg00) == LSHIFT_EXPR
11990 && integer_onep (TREE_OPERAND (arg00, 0)))
11992 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11993 arg01, TREE_OPERAND (arg00, 1));
11994 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11995 build_int_cst (TREE_TYPE (arg0), 1));
11996 return fold_build2_loc (loc, code, type,
11997 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11998 arg1);
12000 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12001 && integer_onep (TREE_OPERAND (arg01, 0)))
12003 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12004 arg00, TREE_OPERAND (arg01, 1));
12005 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12006 build_int_cst (TREE_TYPE (arg0), 1));
12007 return fold_build2_loc (loc, code, type,
12008 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12009 arg1);
12013 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12014 C1 is a valid shift constant, and C2 is a power of two, i.e.
12015 a single bit. */
12016 if (TREE_CODE (arg0) == BIT_AND_EXPR
12017 && integer_pow2p (TREE_OPERAND (arg0, 1))
12018 && integer_zerop (arg1))
12020 tree arg00 = TREE_OPERAND (arg0, 0);
12021 STRIP_NOPS (arg00);
12022 if (TREE_CODE (arg00) == RSHIFT_EXPR
12023 && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12025 tree itype = TREE_TYPE (arg00);
12026 tree arg001 = TREE_OPERAND (arg00, 1);
12027 prec = TYPE_PRECISION (itype);
12029 /* Check for a valid shift count. */
12030 if (wi::ltu_p (wi::to_wide (arg001), prec))
12032 tree arg01 = TREE_OPERAND (arg0, 1);
12033 tree arg000 = TREE_OPERAND (arg00, 0);
12034 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12035 /* If (C2 << C1) doesn't overflow, then
12036 ((X >> C1) & C2) != 0 can be rewritten as
12037 (X & (C2 << C1)) != 0. */
12038 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12040 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12041 arg01, arg001);
12042 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12043 arg000, tem);
12044 return fold_build2_loc (loc, code, type, tem,
12045 fold_convert_loc (loc, itype, arg1));
12047 /* Otherwise, for signed (arithmetic) shifts,
12048 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12049 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12050 else if (!TYPE_UNSIGNED (itype))
12051 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12052 : LT_EXPR,
12053 type, arg000,
12054 build_int_cst (itype, 0));
12055 /* Otherwise, of unsigned (logical) shifts,
12056 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12057 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12058 else
12059 return omit_one_operand_loc (loc, type,
12060 code == EQ_EXPR ? integer_one_node
12061 : integer_zero_node,
12062 arg000);
12067 /* If this is a comparison of a field, we may be able to simplify it. */
12068 if ((TREE_CODE (arg0) == COMPONENT_REF
12069 || TREE_CODE (arg0) == BIT_FIELD_REF)
12070 /* Handle the constant case even without -O
12071 to make sure the warnings are given. */
12072 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12074 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12075 if (t1)
12076 return t1;
12079 /* Optimize comparisons of strlen vs zero to a compare of the
12080 first character of the string vs zero. To wit,
12081 strlen(ptr) == 0 => *ptr == 0
12082 strlen(ptr) != 0 => *ptr != 0
12083 Other cases should reduce to one of these two (or a constant)
12084 due to the return value of strlen being unsigned. */
12085 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12087 tree fndecl = get_callee_fndecl (arg0);
12089 if (fndecl
12090 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12091 && call_expr_nargs (arg0) == 1
12092 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12093 == POINTER_TYPE))
12095 tree ptrtype
12096 = build_pointer_type (build_qualified_type (char_type_node,
12097 TYPE_QUAL_CONST));
12098 tree ptr = fold_convert_loc (loc, ptrtype,
12099 CALL_EXPR_ARG (arg0, 0));
12100 tree iref = build_fold_indirect_ref_loc (loc, ptr);
12101 return fold_build2_loc (loc, code, type, iref,
12102 build_int_cst (TREE_TYPE (iref), 0));
12106 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12107 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12108 if (TREE_CODE (arg0) == RSHIFT_EXPR
12109 && integer_zerop (arg1)
12110 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12112 tree arg00 = TREE_OPERAND (arg0, 0);
12113 tree arg01 = TREE_OPERAND (arg0, 1);
12114 tree itype = TREE_TYPE (arg00);
12115 if (wi::to_wide (arg01) == element_precision (itype) - 1)
12117 if (TYPE_UNSIGNED (itype))
12119 itype = signed_type_for (itype);
12120 arg00 = fold_convert_loc (loc, itype, arg00);
12122 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12123 type, arg00, build_zero_cst (itype));
12127 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12128 (X & C) == 0 when C is a single bit. */
12129 if (TREE_CODE (arg0) == BIT_AND_EXPR
12130 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12131 && integer_zerop (arg1)
12132 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12134 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12135 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12136 TREE_OPERAND (arg0, 1));
12137 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12138 type, tem,
12139 fold_convert_loc (loc, TREE_TYPE (arg0),
12140 arg1));
12143 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12144 constant C is a power of two, i.e. a single bit. */
12145 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12146 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12147 && integer_zerop (arg1)
12148 && integer_pow2p (TREE_OPERAND (arg0, 1))
12149 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12150 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12152 tree arg00 = TREE_OPERAND (arg0, 0);
12153 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12154 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12157 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12158 when is C is a power of two, i.e. a single bit. */
12159 if (TREE_CODE (arg0) == BIT_AND_EXPR
12160 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12161 && integer_zerop (arg1)
12162 && integer_pow2p (TREE_OPERAND (arg0, 1))
12163 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12164 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12166 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12167 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12168 arg000, TREE_OPERAND (arg0, 1));
12169 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12170 tem, build_int_cst (TREE_TYPE (tem), 0));
12173 if (integer_zerop (arg1)
12174 && tree_expr_nonzero_p (arg0))
12176 tree res = constant_boolean_node (code==NE_EXPR, type);
12177 return omit_one_operand_loc (loc, type, res, arg0);
12180 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12181 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12183 tree arg00 = TREE_OPERAND (arg0, 0);
12184 tree arg01 = TREE_OPERAND (arg0, 1);
12185 tree arg10 = TREE_OPERAND (arg1, 0);
12186 tree arg11 = TREE_OPERAND (arg1, 1);
12187 tree itype = TREE_TYPE (arg0);
12189 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12190 operand_equal_p guarantees no side-effects so we don't need
12191 to use omit_one_operand on Z. */
12192 if (operand_equal_p (arg01, arg11, 0))
12193 return fold_build2_loc (loc, code, type, arg00,
12194 fold_convert_loc (loc, TREE_TYPE (arg00),
12195 arg10));
12196 if (operand_equal_p (arg01, arg10, 0))
12197 return fold_build2_loc (loc, code, type, arg00,
12198 fold_convert_loc (loc, TREE_TYPE (arg00),
12199 arg11));
12200 if (operand_equal_p (arg00, arg11, 0))
12201 return fold_build2_loc (loc, code, type, arg01,
12202 fold_convert_loc (loc, TREE_TYPE (arg01),
12203 arg10));
12204 if (operand_equal_p (arg00, arg10, 0))
12205 return fold_build2_loc (loc, code, type, arg01,
12206 fold_convert_loc (loc, TREE_TYPE (arg01),
12207 arg11));
12209 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12210 if (TREE_CODE (arg01) == INTEGER_CST
12211 && TREE_CODE (arg11) == INTEGER_CST)
12213 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12214 fold_convert_loc (loc, itype, arg11));
12215 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12216 return fold_build2_loc (loc, code, type, tem,
12217 fold_convert_loc (loc, itype, arg10));
12221 /* Attempt to simplify equality/inequality comparisons of complex
12222 values. Only lower the comparison if the result is known or
12223 can be simplified to a single scalar comparison. */
12224 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12225 || TREE_CODE (arg0) == COMPLEX_CST)
12226 && (TREE_CODE (arg1) == COMPLEX_EXPR
12227 || TREE_CODE (arg1) == COMPLEX_CST))
12229 tree real0, imag0, real1, imag1;
12230 tree rcond, icond;
12232 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12234 real0 = TREE_OPERAND (arg0, 0);
12235 imag0 = TREE_OPERAND (arg0, 1);
12237 else
12239 real0 = TREE_REALPART (arg0);
12240 imag0 = TREE_IMAGPART (arg0);
12243 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12245 real1 = TREE_OPERAND (arg1, 0);
12246 imag1 = TREE_OPERAND (arg1, 1);
12248 else
12250 real1 = TREE_REALPART (arg1);
12251 imag1 = TREE_IMAGPART (arg1);
12254 rcond = fold_binary_loc (loc, code, type, real0, real1);
12255 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12257 if (integer_zerop (rcond))
12259 if (code == EQ_EXPR)
12260 return omit_two_operands_loc (loc, type, boolean_false_node,
12261 imag0, imag1);
12262 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12264 else
12266 if (code == NE_EXPR)
12267 return omit_two_operands_loc (loc, type, boolean_true_node,
12268 imag0, imag1);
12269 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12273 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12274 if (icond && TREE_CODE (icond) == INTEGER_CST)
12276 if (integer_zerop (icond))
12278 if (code == EQ_EXPR)
12279 return omit_two_operands_loc (loc, type, boolean_false_node,
12280 real0, real1);
12281 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12283 else
12285 if (code == NE_EXPR)
12286 return omit_two_operands_loc (loc, type, boolean_true_node,
12287 real0, real1);
12288 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12293 return NULL_TREE;
12295 case LT_EXPR:
12296 case GT_EXPR:
12297 case LE_EXPR:
12298 case GE_EXPR:
12299 tem = fold_comparison (loc, code, type, op0, op1);
12300 if (tem != NULL_TREE)
12301 return tem;
12303 /* Transform comparisons of the form X +- C CMP X. */
12304 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12305 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12306 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12307 && !HONOR_SNANS (arg0))
12309 tree arg01 = TREE_OPERAND (arg0, 1);
12310 enum tree_code code0 = TREE_CODE (arg0);
12311 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12313 /* (X - c) > X becomes false. */
12314 if (code == GT_EXPR
12315 && ((code0 == MINUS_EXPR && is_positive >= 0)
12316 || (code0 == PLUS_EXPR && is_positive <= 0)))
12317 return constant_boolean_node (0, type);
12319 /* Likewise (X + c) < X becomes false. */
12320 if (code == LT_EXPR
12321 && ((code0 == PLUS_EXPR && is_positive >= 0)
12322 || (code0 == MINUS_EXPR && is_positive <= 0)))
12323 return constant_boolean_node (0, type);
12325 /* Convert (X - c) <= X to true. */
12326 if (!HONOR_NANS (arg1)
12327 && code == LE_EXPR
12328 && ((code0 == MINUS_EXPR && is_positive >= 0)
12329 || (code0 == PLUS_EXPR && is_positive <= 0)))
12330 return constant_boolean_node (1, type);
12332 /* Convert (X + c) >= X to true. */
12333 if (!HONOR_NANS (arg1)
12334 && code == GE_EXPR
12335 && ((code0 == PLUS_EXPR && is_positive >= 0)
12336 || (code0 == MINUS_EXPR && is_positive <= 0)))
12337 return constant_boolean_node (1, type);
12340 /* If we are comparing an ABS_EXPR with a constant, we can
12341 convert all the cases into explicit comparisons, but they may
12342 well not be faster than doing the ABS and one comparison.
12343 But ABS (X) <= C is a range comparison, which becomes a subtraction
12344 and a comparison, and is probably faster. */
12345 if (code == LE_EXPR
12346 && TREE_CODE (arg1) == INTEGER_CST
12347 && TREE_CODE (arg0) == ABS_EXPR
12348 && ! TREE_SIDE_EFFECTS (arg0)
12349 && (tem = negate_expr (arg1)) != 0
12350 && TREE_CODE (tem) == INTEGER_CST
12351 && !TREE_OVERFLOW (tem))
12352 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12353 build2 (GE_EXPR, type,
12354 TREE_OPERAND (arg0, 0), tem),
12355 build2 (LE_EXPR, type,
12356 TREE_OPERAND (arg0, 0), arg1));
12358 /* Convert ABS_EXPR<x> >= 0 to true. */
12359 strict_overflow_p = false;
12360 if (code == GE_EXPR
12361 && (integer_zerop (arg1)
12362 || (! HONOR_NANS (arg0)
12363 && real_zerop (arg1)))
12364 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12366 if (strict_overflow_p)
12367 fold_overflow_warning (("assuming signed overflow does not occur "
12368 "when simplifying comparison of "
12369 "absolute value and zero"),
12370 WARN_STRICT_OVERFLOW_CONDITIONAL);
12371 return omit_one_operand_loc (loc, type,
12372 constant_boolean_node (true, type),
12373 arg0);
12376 /* Convert ABS_EXPR<x> < 0 to false. */
12377 strict_overflow_p = false;
12378 if (code == LT_EXPR
12379 && (integer_zerop (arg1) || real_zerop (arg1))
12380 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12382 if (strict_overflow_p)
12383 fold_overflow_warning (("assuming signed overflow does not occur "
12384 "when simplifying comparison of "
12385 "absolute value and zero"),
12386 WARN_STRICT_OVERFLOW_CONDITIONAL);
12387 return omit_one_operand_loc (loc, type,
12388 constant_boolean_node (false, type),
12389 arg0);
12392 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12393 and similarly for >= into !=. */
12394 if ((code == LT_EXPR || code == GE_EXPR)
12395 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12396 && TREE_CODE (arg1) == LSHIFT_EXPR
12397 && integer_onep (TREE_OPERAND (arg1, 0)))
12398 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12399 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12400 TREE_OPERAND (arg1, 1)),
12401 build_zero_cst (TREE_TYPE (arg0)));
12403 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12404 otherwise Y might be >= # of bits in X's type and thus e.g.
12405 (unsigned char) (1 << Y) for Y 15 might be 0.
12406 If the cast is widening, then 1 << Y should have unsigned type,
12407 otherwise if Y is number of bits in the signed shift type minus 1,
12408 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12409 31 might be 0xffffffff80000000. */
12410 if ((code == LT_EXPR || code == GE_EXPR)
12411 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12412 && CONVERT_EXPR_P (arg1)
12413 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12414 && (element_precision (TREE_TYPE (arg1))
12415 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12416 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12417 || (element_precision (TREE_TYPE (arg1))
12418 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12419 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12421 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12422 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12423 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12424 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12425 build_zero_cst (TREE_TYPE (arg0)));
12428 return NULL_TREE;
12430 case UNORDERED_EXPR:
12431 case ORDERED_EXPR:
12432 case UNLT_EXPR:
12433 case UNLE_EXPR:
12434 case UNGT_EXPR:
12435 case UNGE_EXPR:
12436 case UNEQ_EXPR:
12437 case LTGT_EXPR:
12438 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12440 tree targ0 = strip_float_extensions (arg0);
12441 tree targ1 = strip_float_extensions (arg1);
12442 tree newtype = TREE_TYPE (targ0);
12444 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12445 newtype = TREE_TYPE (targ1);
12447 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12448 return fold_build2_loc (loc, code, type,
12449 fold_convert_loc (loc, newtype, targ0),
12450 fold_convert_loc (loc, newtype, targ1));
12453 return NULL_TREE;
12455 case COMPOUND_EXPR:
12456 /* When pedantic, a compound expression can be neither an lvalue
12457 nor an integer constant expression. */
12458 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12459 return NULL_TREE;
12460 /* Don't let (0, 0) be null pointer constant. */
12461 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12462 : fold_convert_loc (loc, type, arg1);
12463 return pedantic_non_lvalue_loc (loc, tem);
12465 case ASSERT_EXPR:
12466 /* An ASSERT_EXPR should never be passed to fold_binary. */
12467 gcc_unreachable ();
12469 default:
12470 return NULL_TREE;
12471 } /* switch (code) */
12474 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12475 ((A & N) + B) & M -> (A + B) & M
12476 Similarly if (N & M) == 0,
12477 ((A | N) + B) & M -> (A + B) & M
12478 and for - instead of + (or unary - instead of +)
12479 and/or ^ instead of |.
12480 If B is constant and (B & M) == 0, fold into A & M.
12482 This function is a helper for match.pd patterns. Return non-NULL
12483 type in which the simplified operation should be performed only
12484 if any optimization is possible.
12486 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12487 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12488 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12489 +/-. */
12490 tree
12491 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12492 tree arg00, enum tree_code code00, tree arg000, tree arg001,
12493 tree arg01, enum tree_code code01, tree arg010, tree arg011,
12494 tree *pmop)
12496 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12497 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12498 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12499 if (~cst1 == 0
12500 || (cst1 & (cst1 + 1)) != 0
12501 || !INTEGRAL_TYPE_P (type)
12502 || (!TYPE_OVERFLOW_WRAPS (type)
12503 && TREE_CODE (type) != INTEGER_TYPE)
12504 || (wi::max_value (type) & cst1) != cst1)
12505 return NULL_TREE;
12507 enum tree_code codes[2] = { code00, code01 };
12508 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12509 int which = 0;
12510 wide_int cst0;
12512 /* Now we know that arg0 is (C + D) or (C - D) or -C and
12513 arg1 (M) is == (1LL << cst) - 1.
12514 Store C into PMOP[0] and D into PMOP[1]. */
12515 pmop[0] = arg00;
12516 pmop[1] = arg01;
12517 which = code != NEGATE_EXPR;
12519 for (; which >= 0; which--)
12520 switch (codes[which])
12522 case BIT_AND_EXPR:
12523 case BIT_IOR_EXPR:
12524 case BIT_XOR_EXPR:
12525 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12526 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12527 if (codes[which] == BIT_AND_EXPR)
12529 if (cst0 != cst1)
12530 break;
12532 else if (cst0 != 0)
12533 break;
12534 /* If C or D is of the form (A & N) where
12535 (N & M) == M, or of the form (A | N) or
12536 (A ^ N) where (N & M) == 0, replace it with A. */
12537 pmop[which] = arg0xx[2 * which];
12538 break;
12539 case ERROR_MARK:
12540 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12541 break;
12542 /* If C or D is a N where (N & M) == 0, it can be
12543 omitted (replaced with 0). */
12544 if ((code == PLUS_EXPR
12545 || (code == MINUS_EXPR && which == 0))
12546 && (cst1 & wi::to_wide (pmop[which])) == 0)
12547 pmop[which] = build_int_cst (type, 0);
12548 /* Similarly, with C - N where (-N & M) == 0. */
12549 if (code == MINUS_EXPR
12550 && which == 1
12551 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12552 pmop[which] = build_int_cst (type, 0);
12553 break;
12554 default:
12555 gcc_unreachable ();
12558 /* Only build anything new if we optimized one or both arguments above. */
12559 if (pmop[0] == arg00 && pmop[1] == arg01)
12560 return NULL_TREE;
12562 if (TYPE_OVERFLOW_WRAPS (type))
12563 return type;
12564 else
12565 return unsigned_type_for (type);
12568 /* Used by contains_label_[p1]. */
12570 struct contains_label_data
12572 hash_set<tree> *pset;
12573 bool inside_switch_p;
12576 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12577 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12578 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12580 static tree
12581 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12583 contains_label_data *d = (contains_label_data *) data;
12584 switch (TREE_CODE (*tp))
12586 case LABEL_EXPR:
12587 return *tp;
12589 case CASE_LABEL_EXPR:
12590 if (!d->inside_switch_p)
12591 return *tp;
12592 return NULL_TREE;
12594 case SWITCH_EXPR:
12595 if (!d->inside_switch_p)
12597 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12598 return *tp;
12599 d->inside_switch_p = true;
12600 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12601 return *tp;
12602 d->inside_switch_p = false;
12603 *walk_subtrees = 0;
12605 return NULL_TREE;
12607 case GOTO_EXPR:
12608 *walk_subtrees = 0;
12609 return NULL_TREE;
12611 default:
12612 return NULL_TREE;
12616 /* Return whether the sub-tree ST contains a label which is accessible from
12617 outside the sub-tree. */
12619 static bool
12620 contains_label_p (tree st)
12622 hash_set<tree> pset;
12623 contains_label_data data = { &pset, false };
12624 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12627 /* Fold a ternary expression of code CODE and type TYPE with operands
12628 OP0, OP1, and OP2. Return the folded expression if folding is
12629 successful. Otherwise, return NULL_TREE. */
12631 tree
12632 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12633 tree op0, tree op1, tree op2)
12635 tree tem;
12636 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12637 enum tree_code_class kind = TREE_CODE_CLASS (code);
12639 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12640 && TREE_CODE_LENGTH (code) == 3);
12642 /* If this is a commutative operation, and OP0 is a constant, move it
12643 to OP1 to reduce the number of tests below. */
12644 if (commutative_ternary_tree_code (code)
12645 && tree_swap_operands_p (op0, op1))
12646 return fold_build3_loc (loc, code, type, op1, op0, op2);
12648 tem = generic_simplify (loc, code, type, op0, op1, op2);
12649 if (tem)
12650 return tem;
12652 /* Strip any conversions that don't change the mode. This is safe
12653 for every expression, except for a comparison expression because
12654 its signedness is derived from its operands. So, in the latter
12655 case, only strip conversions that don't change the signedness.
12657 Note that this is done as an internal manipulation within the
12658 constant folder, in order to find the simplest representation of
12659 the arguments so that their form can be studied. In any cases,
12660 the appropriate type conversions should be put back in the tree
12661 that will get out of the constant folder. */
12662 if (op0)
12664 arg0 = op0;
12665 STRIP_NOPS (arg0);
12668 if (op1)
12670 arg1 = op1;
12671 STRIP_NOPS (arg1);
12674 if (op2)
12676 arg2 = op2;
12677 STRIP_NOPS (arg2);
12680 switch (code)
12682 case COMPONENT_REF:
12683 if (TREE_CODE (arg0) == CONSTRUCTOR
12684 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12686 unsigned HOST_WIDE_INT idx;
12687 tree field, value;
12688 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12689 if (field == arg1)
12690 return value;
12692 return NULL_TREE;
12694 case COND_EXPR:
12695 case VEC_COND_EXPR:
12696 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12697 so all simple results must be passed through pedantic_non_lvalue. */
12698 if (TREE_CODE (arg0) == INTEGER_CST)
12700 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12701 tem = integer_zerop (arg0) ? op2 : op1;
12702 /* Only optimize constant conditions when the selected branch
12703 has the same type as the COND_EXPR. This avoids optimizing
12704 away "c ? x : throw", where the throw has a void type.
12705 Avoid throwing away that operand which contains label. */
12706 if ((!TREE_SIDE_EFFECTS (unused_op)
12707 || !contains_label_p (unused_op))
12708 && (! VOID_TYPE_P (TREE_TYPE (tem))
12709 || VOID_TYPE_P (type)))
12710 return pedantic_non_lvalue_loc (loc, tem);
12711 return NULL_TREE;
12713 else if (TREE_CODE (arg0) == VECTOR_CST)
12715 unsigned HOST_WIDE_INT nelts;
12716 if ((TREE_CODE (arg1) == VECTOR_CST
12717 || TREE_CODE (arg1) == CONSTRUCTOR)
12718 && (TREE_CODE (arg2) == VECTOR_CST
12719 || TREE_CODE (arg2) == CONSTRUCTOR)
12720 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12722 vec_perm_builder sel (nelts, nelts, 1);
12723 for (unsigned int i = 0; i < nelts; i++)
12725 tree val = VECTOR_CST_ELT (arg0, i);
12726 if (integer_all_onesp (val))
12727 sel.quick_push (i);
12728 else if (integer_zerop (val))
12729 sel.quick_push (nelts + i);
12730 else /* Currently unreachable. */
12731 return NULL_TREE;
12733 vec_perm_indices indices (sel, 2, nelts);
12734 tree t = fold_vec_perm (type, arg1, arg2, indices);
12735 if (t != NULL_TREE)
12736 return t;
12740 /* If we have A op B ? A : C, we may be able to convert this to a
12741 simpler expression, depending on the operation and the values
12742 of B and C. Signed zeros prevent all of these transformations,
12743 for reasons given above each one.
12745 Also try swapping the arguments and inverting the conditional. */
12746 if (COMPARISON_CLASS_P (arg0)
12747 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12748 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12750 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12751 if (tem)
12752 return tem;
12755 if (COMPARISON_CLASS_P (arg0)
12756 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12757 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12759 location_t loc0 = expr_location_or (arg0, loc);
12760 tem = fold_invert_truthvalue (loc0, arg0);
12761 if (tem && COMPARISON_CLASS_P (tem))
12763 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12764 if (tem)
12765 return tem;
12769 /* If the second operand is simpler than the third, swap them
12770 since that produces better jump optimization results. */
12771 if (truth_value_p (TREE_CODE (arg0))
12772 && tree_swap_operands_p (op1, op2))
12774 location_t loc0 = expr_location_or (arg0, loc);
12775 /* See if this can be inverted. If it can't, possibly because
12776 it was a floating-point inequality comparison, don't do
12777 anything. */
12778 tem = fold_invert_truthvalue (loc0, arg0);
12779 if (tem)
12780 return fold_build3_loc (loc, code, type, tem, op2, op1);
12783 /* Convert A ? 1 : 0 to simply A. */
12784 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12785 : (integer_onep (op1)
12786 && !VECTOR_TYPE_P (type)))
12787 && integer_zerop (op2)
12788 /* If we try to convert OP0 to our type, the
12789 call to fold will try to move the conversion inside
12790 a COND, which will recurse. In that case, the COND_EXPR
12791 is probably the best choice, so leave it alone. */
12792 && type == TREE_TYPE (arg0))
12793 return pedantic_non_lvalue_loc (loc, arg0);
12795 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12796 over COND_EXPR in cases such as floating point comparisons. */
12797 if (integer_zerop (op1)
12798 && code == COND_EXPR
12799 && integer_onep (op2)
12800 && !VECTOR_TYPE_P (type)
12801 && truth_value_p (TREE_CODE (arg0)))
12802 return pedantic_non_lvalue_loc (loc,
12803 fold_convert_loc (loc, type,
12804 invert_truthvalue_loc (loc,
12805 arg0)));
12807 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12808 if (TREE_CODE (arg0) == LT_EXPR
12809 && integer_zerop (TREE_OPERAND (arg0, 1))
12810 && integer_zerop (op2)
12811 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12813 /* sign_bit_p looks through both zero and sign extensions,
12814 but for this optimization only sign extensions are
12815 usable. */
12816 tree tem2 = TREE_OPERAND (arg0, 0);
12817 while (tem != tem2)
12819 if (TREE_CODE (tem2) != NOP_EXPR
12820 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12822 tem = NULL_TREE;
12823 break;
12825 tem2 = TREE_OPERAND (tem2, 0);
12827 /* sign_bit_p only checks ARG1 bits within A's precision.
12828 If <sign bit of A> has wider type than A, bits outside
12829 of A's precision in <sign bit of A> need to be checked.
12830 If they are all 0, this optimization needs to be done
12831 in unsigned A's type, if they are all 1 in signed A's type,
12832 otherwise this can't be done. */
12833 if (tem
12834 && TYPE_PRECISION (TREE_TYPE (tem))
12835 < TYPE_PRECISION (TREE_TYPE (arg1))
12836 && TYPE_PRECISION (TREE_TYPE (tem))
12837 < TYPE_PRECISION (type))
12839 int inner_width, outer_width;
12840 tree tem_type;
12842 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12843 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12844 if (outer_width > TYPE_PRECISION (type))
12845 outer_width = TYPE_PRECISION (type);
12847 wide_int mask = wi::shifted_mask
12848 (inner_width, outer_width - inner_width, false,
12849 TYPE_PRECISION (TREE_TYPE (arg1)));
12851 wide_int common = mask & wi::to_wide (arg1);
12852 if (common == mask)
12854 tem_type = signed_type_for (TREE_TYPE (tem));
12855 tem = fold_convert_loc (loc, tem_type, tem);
12857 else if (common == 0)
12859 tem_type = unsigned_type_for (TREE_TYPE (tem));
12860 tem = fold_convert_loc (loc, tem_type, tem);
12862 else
12863 tem = NULL;
12866 if (tem)
12867 return
12868 fold_convert_loc (loc, type,
12869 fold_build2_loc (loc, BIT_AND_EXPR,
12870 TREE_TYPE (tem), tem,
12871 fold_convert_loc (loc,
12872 TREE_TYPE (tem),
12873 arg1)));
12876 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12877 already handled above. */
12878 if (TREE_CODE (arg0) == BIT_AND_EXPR
12879 && integer_onep (TREE_OPERAND (arg0, 1))
12880 && integer_zerop (op2)
12881 && integer_pow2p (arg1))
12883 tree tem = TREE_OPERAND (arg0, 0);
12884 STRIP_NOPS (tem);
12885 if (TREE_CODE (tem) == RSHIFT_EXPR
12886 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12887 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12888 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12889 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12890 fold_convert_loc (loc, type,
12891 TREE_OPERAND (tem, 0)),
12892 op1);
12895 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12896 is probably obsolete because the first operand should be a
12897 truth value (that's why we have the two cases above), but let's
12898 leave it in until we can confirm this for all front-ends. */
12899 if (integer_zerop (op2)
12900 && TREE_CODE (arg0) == NE_EXPR
12901 && integer_zerop (TREE_OPERAND (arg0, 1))
12902 && integer_pow2p (arg1)
12903 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12904 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12905 arg1, OEP_ONLY_CONST)
12906 /* operand_equal_p compares just value, not precision, so e.g.
12907 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12908 second operand 32-bit -128, which is not a power of two (or vice
12909 versa. */
12910 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12911 return pedantic_non_lvalue_loc (loc,
12912 fold_convert_loc (loc, type,
12913 TREE_OPERAND (arg0,
12914 0)));
12916 /* Disable the transformations below for vectors, since
12917 fold_binary_op_with_conditional_arg may undo them immediately,
12918 yielding an infinite loop. */
12919 if (code == VEC_COND_EXPR)
12920 return NULL_TREE;
12922 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12923 if (integer_zerop (op2)
12924 && truth_value_p (TREE_CODE (arg0))
12925 && truth_value_p (TREE_CODE (arg1))
12926 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12927 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12928 : TRUTH_ANDIF_EXPR,
12929 type, fold_convert_loc (loc, type, arg0), op1);
12931 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12932 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12933 && truth_value_p (TREE_CODE (arg0))
12934 && truth_value_p (TREE_CODE (arg1))
12935 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12937 location_t loc0 = expr_location_or (arg0, loc);
12938 /* Only perform transformation if ARG0 is easily inverted. */
12939 tem = fold_invert_truthvalue (loc0, arg0);
12940 if (tem)
12941 return fold_build2_loc (loc, code == VEC_COND_EXPR
12942 ? BIT_IOR_EXPR
12943 : TRUTH_ORIF_EXPR,
12944 type, fold_convert_loc (loc, type, tem),
12945 op1);
12948 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12949 if (integer_zerop (arg1)
12950 && truth_value_p (TREE_CODE (arg0))
12951 && truth_value_p (TREE_CODE (op2))
12952 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12954 location_t loc0 = expr_location_or (arg0, loc);
12955 /* Only perform transformation if ARG0 is easily inverted. */
12956 tem = fold_invert_truthvalue (loc0, arg0);
12957 if (tem)
12958 return fold_build2_loc (loc, code == VEC_COND_EXPR
12959 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12960 type, fold_convert_loc (loc, type, tem),
12961 op2);
12964 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12965 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12966 && truth_value_p (TREE_CODE (arg0))
12967 && truth_value_p (TREE_CODE (op2))
12968 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12969 return fold_build2_loc (loc, code == VEC_COND_EXPR
12970 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12971 type, fold_convert_loc (loc, type, arg0), op2);
12973 return NULL_TREE;
12975 case CALL_EXPR:
12976 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12977 of fold_ternary on them. */
12978 gcc_unreachable ();
12980 case BIT_FIELD_REF:
12981 if (TREE_CODE (arg0) == VECTOR_CST
12982 && (type == TREE_TYPE (TREE_TYPE (arg0))
12983 || (VECTOR_TYPE_P (type)
12984 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12985 && tree_fits_uhwi_p (op1)
12986 && tree_fits_uhwi_p (op2))
12988 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12989 unsigned HOST_WIDE_INT width
12990 = (TREE_CODE (eltype) == BOOLEAN_TYPE
12991 ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
12992 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12993 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12995 if (n != 0
12996 && (idx % width) == 0
12997 && (n % width) == 0
12998 && known_le ((idx + n) / width,
12999 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13001 idx = idx / width;
13002 n = n / width;
13004 if (TREE_CODE (arg0) == VECTOR_CST)
13006 if (n == 1)
13008 tem = VECTOR_CST_ELT (arg0, idx);
13009 if (VECTOR_TYPE_P (type))
13010 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13011 return tem;
13014 tree_vector_builder vals (type, n, 1);
13015 for (unsigned i = 0; i < n; ++i)
13016 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13017 return vals.build ();
13022 /* On constants we can use native encode/interpret to constant
13023 fold (nearly) all BIT_FIELD_REFs. */
13024 if (CONSTANT_CLASS_P (arg0)
13025 && can_native_interpret_type_p (type)
13026 && BITS_PER_UNIT == 8
13027 && tree_fits_uhwi_p (op1)
13028 && tree_fits_uhwi_p (op2))
13030 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13031 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13032 /* Limit us to a reasonable amount of work. To relax the
13033 other limitations we need bit-shifting of the buffer
13034 and rounding up the size. */
13035 if (bitpos % BITS_PER_UNIT == 0
13036 && bitsize % BITS_PER_UNIT == 0
13037 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13039 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13040 unsigned HOST_WIDE_INT len
13041 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13042 bitpos / BITS_PER_UNIT);
13043 if (len > 0
13044 && len * BITS_PER_UNIT >= bitsize)
13046 tree v = native_interpret_expr (type, b,
13047 bitsize / BITS_PER_UNIT);
13048 if (v)
13049 return v;
13054 return NULL_TREE;
13056 case VEC_PERM_EXPR:
13057 /* Perform constant folding of BIT_INSERT_EXPR. */
13058 if (TREE_CODE (arg2) == VECTOR_CST
13059 && TREE_CODE (op0) == VECTOR_CST
13060 && TREE_CODE (op1) == VECTOR_CST)
13062 /* Build a vector of integers from the tree mask. */
13063 vec_perm_builder builder;
13064 if (!tree_to_vec_perm_builder (&builder, arg2))
13065 return NULL_TREE;
13067 /* Create a vec_perm_indices for the integer vector. */
13068 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13069 bool single_arg = (op0 == op1);
13070 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13071 return fold_vec_perm (type, op0, op1, sel);
13073 return NULL_TREE;
13075 case BIT_INSERT_EXPR:
13076 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
13077 if (TREE_CODE (arg0) == INTEGER_CST
13078 && TREE_CODE (arg1) == INTEGER_CST)
13080 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13081 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13082 wide_int tem = (wi::to_wide (arg0)
13083 & wi::shifted_mask (bitpos, bitsize, true,
13084 TYPE_PRECISION (type)));
13085 wide_int tem2
13086 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13087 bitsize), bitpos);
13088 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13090 else if (TREE_CODE (arg0) == VECTOR_CST
13091 && CONSTANT_CLASS_P (arg1)
13092 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13093 TREE_TYPE (arg1)))
13095 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13096 unsigned HOST_WIDE_INT elsize
13097 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13098 if (bitpos % elsize == 0)
13100 unsigned k = bitpos / elsize;
13101 unsigned HOST_WIDE_INT nelts;
13102 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13103 return arg0;
13104 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13106 tree_vector_builder elts (type, nelts, 1);
13107 elts.quick_grow (nelts);
13108 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13109 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13110 return elts.build ();
13114 return NULL_TREE;
13116 default:
13117 return NULL_TREE;
13118 } /* switch (code) */
13121 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13122 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
13123 constructor element index of the value returned. If the element is
13124 not found NULL_TREE is returned and *CTOR_IDX is updated to
13125 the index of the element after the ACCESS_INDEX position (which
13126 may be outside of the CTOR array). */
13128 tree
13129 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13130 unsigned *ctor_idx)
13132 tree index_type = NULL_TREE;
13133 signop index_sgn = UNSIGNED;
13134 offset_int low_bound = 0;
13136 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13138 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13139 if (domain_type && TYPE_MIN_VALUE (domain_type))
13141 /* Static constructors for variably sized objects makes no sense. */
13142 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13143 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13144 /* ??? When it is obvious that the range is signed, treat it so. */
13145 if (TYPE_UNSIGNED (index_type)
13146 && TYPE_MAX_VALUE (domain_type)
13147 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13148 TYPE_MIN_VALUE (domain_type)))
13150 index_sgn = SIGNED;
13151 low_bound
13152 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13153 SIGNED);
13155 else
13157 index_sgn = TYPE_SIGN (index_type);
13158 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13163 if (index_type)
13164 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13165 index_sgn);
13167 offset_int index = low_bound;
13168 if (index_type)
13169 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13171 offset_int max_index = index;
13172 unsigned cnt;
13173 tree cfield, cval;
13174 bool first_p = true;
13176 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13178 /* Array constructor might explicitly set index, or specify a range,
13179 or leave index NULL meaning that it is next index after previous
13180 one. */
13181 if (cfield)
13183 if (TREE_CODE (cfield) == INTEGER_CST)
13184 max_index = index
13185 = offset_int::from (wi::to_wide (cfield), index_sgn);
13186 else
13188 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13189 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13190 index_sgn);
13191 max_index
13192 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13193 index_sgn);
13194 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13197 else if (!first_p)
13199 index = max_index + 1;
13200 if (index_type)
13201 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13202 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13203 max_index = index;
13205 else
13206 first_p = false;
13208 /* Do we have match? */
13209 if (wi::cmp (access_index, index, index_sgn) >= 0)
13211 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13213 if (ctor_idx)
13214 *ctor_idx = cnt;
13215 return cval;
13218 else if (in_gimple_form)
13219 /* We're past the element we search for. Note during parsing
13220 the elements might not be sorted.
13221 ??? We should use a binary search and a flag on the
13222 CONSTRUCTOR as to whether elements are sorted in declaration
13223 order. */
13224 break;
13226 if (ctor_idx)
13227 *ctor_idx = cnt;
13228 return NULL_TREE;
13231 /* Perform constant folding and related simplification of EXPR.
13232 The related simplifications include x*1 => x, x*0 => 0, etc.,
13233 and application of the associative law.
13234 NOP_EXPR conversions may be removed freely (as long as we
13235 are careful not to change the type of the overall expression).
13236 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13237 but we can constant-fold them if they have constant operands. */
13239 #ifdef ENABLE_FOLD_CHECKING
13240 # define fold(x) fold_1 (x)
13241 static tree fold_1 (tree);
13242 static
13243 #endif
13244 tree
13245 fold (tree expr)
13247 const tree t = expr;
13248 enum tree_code code = TREE_CODE (t);
13249 enum tree_code_class kind = TREE_CODE_CLASS (code);
13250 tree tem;
13251 location_t loc = EXPR_LOCATION (expr);
13253 /* Return right away if a constant. */
13254 if (kind == tcc_constant)
13255 return t;
13257 /* CALL_EXPR-like objects with variable numbers of operands are
13258 treated specially. */
13259 if (kind == tcc_vl_exp)
13261 if (code == CALL_EXPR)
13263 tem = fold_call_expr (loc, expr, false);
13264 return tem ? tem : expr;
13266 return expr;
13269 if (IS_EXPR_CODE_CLASS (kind))
13271 tree type = TREE_TYPE (t);
13272 tree op0, op1, op2;
13274 switch (TREE_CODE_LENGTH (code))
13276 case 1:
13277 op0 = TREE_OPERAND (t, 0);
13278 tem = fold_unary_loc (loc, code, type, op0);
13279 return tem ? tem : expr;
13280 case 2:
13281 op0 = TREE_OPERAND (t, 0);
13282 op1 = TREE_OPERAND (t, 1);
13283 tem = fold_binary_loc (loc, code, type, op0, op1);
13284 return tem ? tem : expr;
13285 case 3:
13286 op0 = TREE_OPERAND (t, 0);
13287 op1 = TREE_OPERAND (t, 1);
13288 op2 = TREE_OPERAND (t, 2);
13289 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13290 return tem ? tem : expr;
13291 default:
13292 break;
13296 switch (code)
13298 case ARRAY_REF:
13300 tree op0 = TREE_OPERAND (t, 0);
13301 tree op1 = TREE_OPERAND (t, 1);
13303 if (TREE_CODE (op1) == INTEGER_CST
13304 && TREE_CODE (op0) == CONSTRUCTOR
13305 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13307 tree val = get_array_ctor_element_at_index (op0,
13308 wi::to_offset (op1));
13309 if (val)
13310 return val;
13313 return t;
13316 /* Return a VECTOR_CST if possible. */
13317 case CONSTRUCTOR:
13319 tree type = TREE_TYPE (t);
13320 if (TREE_CODE (type) != VECTOR_TYPE)
13321 return t;
13323 unsigned i;
13324 tree val;
13325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13326 if (! CONSTANT_CLASS_P (val))
13327 return t;
13329 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13332 case CONST_DECL:
13333 return fold (DECL_INITIAL (t));
13335 default:
13336 return t;
13337 } /* switch (code) */
13340 #ifdef ENABLE_FOLD_CHECKING
13341 #undef fold
13343 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13344 hash_table<nofree_ptr_hash<const tree_node> > *);
13345 static void fold_check_failed (const_tree, const_tree);
13346 void print_fold_checksum (const_tree);
13348 /* When --enable-checking=fold, compute a digest of expr before
13349 and after actual fold call to see if fold did not accidentally
13350 change original expr. */
13352 tree
13353 fold (tree expr)
13355 tree ret;
13356 struct md5_ctx ctx;
13357 unsigned char checksum_before[16], checksum_after[16];
13358 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13360 md5_init_ctx (&ctx);
13361 fold_checksum_tree (expr, &ctx, &ht);
13362 md5_finish_ctx (&ctx, checksum_before);
13363 ht.empty ();
13365 ret = fold_1 (expr);
13367 md5_init_ctx (&ctx);
13368 fold_checksum_tree (expr, &ctx, &ht);
13369 md5_finish_ctx (&ctx, checksum_after);
13371 if (memcmp (checksum_before, checksum_after, 16))
13372 fold_check_failed (expr, ret);
13374 return ret;
13377 void
13378 print_fold_checksum (const_tree expr)
13380 struct md5_ctx ctx;
13381 unsigned char checksum[16], cnt;
13382 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13384 md5_init_ctx (&ctx);
13385 fold_checksum_tree (expr, &ctx, &ht);
13386 md5_finish_ctx (&ctx, checksum);
13387 for (cnt = 0; cnt < 16; ++cnt)
13388 fprintf (stderr, "%02x", checksum[cnt]);
13389 putc ('\n', stderr);
13392 static void
13393 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13395 internal_error ("fold check: original tree changed by fold");
13398 static void
13399 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13400 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13402 const tree_node **slot;
13403 enum tree_code code;
13404 union tree_node *buf;
13405 int i, len;
13407 recursive_label:
13408 if (expr == NULL)
13409 return;
13410 slot = ht->find_slot (expr, INSERT);
13411 if (*slot != NULL)
13412 return;
13413 *slot = expr;
13414 code = TREE_CODE (expr);
13415 if (TREE_CODE_CLASS (code) == tcc_declaration
13416 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13418 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13419 size_t sz = tree_size (expr);
13420 buf = XALLOCAVAR (union tree_node, sz);
13421 memcpy ((char *) buf, expr, sz);
13422 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13423 buf->decl_with_vis.symtab_node = NULL;
13424 buf->base.nowarning_flag = 0;
13425 expr = (tree) buf;
13427 else if (TREE_CODE_CLASS (code) == tcc_type
13428 && (TYPE_POINTER_TO (expr)
13429 || TYPE_REFERENCE_TO (expr)
13430 || TYPE_CACHED_VALUES_P (expr)
13431 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13432 || TYPE_NEXT_VARIANT (expr)
13433 || TYPE_ALIAS_SET_KNOWN_P (expr)))
13435 /* Allow these fields to be modified. */
13436 tree tmp;
13437 size_t sz = tree_size (expr);
13438 buf = XALLOCAVAR (union tree_node, sz);
13439 memcpy ((char *) buf, expr, sz);
13440 expr = tmp = (tree) buf;
13441 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13442 TYPE_POINTER_TO (tmp) = NULL;
13443 TYPE_REFERENCE_TO (tmp) = NULL;
13444 TYPE_NEXT_VARIANT (tmp) = NULL;
13445 TYPE_ALIAS_SET (tmp) = -1;
13446 if (TYPE_CACHED_VALUES_P (tmp))
13448 TYPE_CACHED_VALUES_P (tmp) = 0;
13449 TYPE_CACHED_VALUES (tmp) = NULL;
13452 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
13454 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
13455 and change builtins.c etc. instead - see PR89543. */
13456 size_t sz = tree_size (expr);
13457 buf = XALLOCAVAR (union tree_node, sz);
13458 memcpy ((char *) buf, expr, sz);
13459 buf->base.nowarning_flag = 0;
13460 expr = (tree) buf;
13462 md5_process_bytes (expr, tree_size (expr), ctx);
13463 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13464 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13465 if (TREE_CODE_CLASS (code) != tcc_type
13466 && TREE_CODE_CLASS (code) != tcc_declaration
13467 && code != TREE_LIST
13468 && code != SSA_NAME
13469 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13470 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13471 switch (TREE_CODE_CLASS (code))
13473 case tcc_constant:
13474 switch (code)
13476 case STRING_CST:
13477 md5_process_bytes (TREE_STRING_POINTER (expr),
13478 TREE_STRING_LENGTH (expr), ctx);
13479 break;
13480 case COMPLEX_CST:
13481 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13482 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13483 break;
13484 case VECTOR_CST:
13485 len = vector_cst_encoded_nelts (expr);
13486 for (i = 0; i < len; ++i)
13487 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13488 break;
13489 default:
13490 break;
13492 break;
13493 case tcc_exceptional:
13494 switch (code)
13496 case TREE_LIST:
13497 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13498 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13499 expr = TREE_CHAIN (expr);
13500 goto recursive_label;
13501 break;
13502 case TREE_VEC:
13503 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13504 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13505 break;
13506 default:
13507 break;
13509 break;
13510 case tcc_expression:
13511 case tcc_reference:
13512 case tcc_comparison:
13513 case tcc_unary:
13514 case tcc_binary:
13515 case tcc_statement:
13516 case tcc_vl_exp:
13517 len = TREE_OPERAND_LENGTH (expr);
13518 for (i = 0; i < len; ++i)
13519 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13520 break;
13521 case tcc_declaration:
13522 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13523 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13524 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13526 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13527 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13528 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13529 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13530 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13533 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13535 if (TREE_CODE (expr) == FUNCTION_DECL)
13537 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13538 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13540 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13542 break;
13543 case tcc_type:
13544 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13545 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13546 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13547 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13548 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13549 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13550 if (INTEGRAL_TYPE_P (expr)
13551 || SCALAR_FLOAT_TYPE_P (expr))
13553 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13554 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13556 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13557 if (TREE_CODE (expr) == RECORD_TYPE
13558 || TREE_CODE (expr) == UNION_TYPE
13559 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13560 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13561 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13562 break;
13563 default:
13564 break;
13568 /* Helper function for outputting the checksum of a tree T. When
13569 debugging with gdb, you can "define mynext" to be "next" followed
13570 by "call debug_fold_checksum (op0)", then just trace down till the
13571 outputs differ. */
13573 DEBUG_FUNCTION void
13574 debug_fold_checksum (const_tree t)
13576 int i;
13577 unsigned char checksum[16];
13578 struct md5_ctx ctx;
13579 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13581 md5_init_ctx (&ctx);
13582 fold_checksum_tree (t, &ctx, &ht);
13583 md5_finish_ctx (&ctx, checksum);
13584 ht.empty ();
13586 for (i = 0; i < 16; i++)
13587 fprintf (stderr, "%d ", checksum[i]);
13589 fprintf (stderr, "\n");
13592 #endif
13594 /* Fold a unary tree expression with code CODE of type TYPE with an
13595 operand OP0. LOC is the location of the resulting expression.
13596 Return a folded expression if successful. Otherwise, return a tree
13597 expression with code CODE of type TYPE with an operand OP0. */
13599 tree
13600 fold_build1_loc (location_t loc,
13601 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13603 tree tem;
13604 #ifdef ENABLE_FOLD_CHECKING
13605 unsigned char checksum_before[16], checksum_after[16];
13606 struct md5_ctx ctx;
13607 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13609 md5_init_ctx (&ctx);
13610 fold_checksum_tree (op0, &ctx, &ht);
13611 md5_finish_ctx (&ctx, checksum_before);
13612 ht.empty ();
13613 #endif
13615 tem = fold_unary_loc (loc, code, type, op0);
13616 if (!tem)
13617 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13619 #ifdef ENABLE_FOLD_CHECKING
13620 md5_init_ctx (&ctx);
13621 fold_checksum_tree (op0, &ctx, &ht);
13622 md5_finish_ctx (&ctx, checksum_after);
13624 if (memcmp (checksum_before, checksum_after, 16))
13625 fold_check_failed (op0, tem);
13626 #endif
13627 return tem;
13630 /* Fold a binary tree expression with code CODE of type TYPE with
13631 operands OP0 and OP1. LOC is the location of the resulting
13632 expression. Return a folded expression if successful. Otherwise,
13633 return a tree expression with code CODE of type TYPE with operands
13634 OP0 and OP1. */
13636 tree
13637 fold_build2_loc (location_t loc,
13638 enum tree_code code, tree type, tree op0, tree op1
13639 MEM_STAT_DECL)
13641 tree tem;
13642 #ifdef ENABLE_FOLD_CHECKING
13643 unsigned char checksum_before_op0[16],
13644 checksum_before_op1[16],
13645 checksum_after_op0[16],
13646 checksum_after_op1[16];
13647 struct md5_ctx ctx;
13648 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13650 md5_init_ctx (&ctx);
13651 fold_checksum_tree (op0, &ctx, &ht);
13652 md5_finish_ctx (&ctx, checksum_before_op0);
13653 ht.empty ();
13655 md5_init_ctx (&ctx);
13656 fold_checksum_tree (op1, &ctx, &ht);
13657 md5_finish_ctx (&ctx, checksum_before_op1);
13658 ht.empty ();
13659 #endif
13661 tem = fold_binary_loc (loc, code, type, op0, op1);
13662 if (!tem)
13663 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13665 #ifdef ENABLE_FOLD_CHECKING
13666 md5_init_ctx (&ctx);
13667 fold_checksum_tree (op0, &ctx, &ht);
13668 md5_finish_ctx (&ctx, checksum_after_op0);
13669 ht.empty ();
13671 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13672 fold_check_failed (op0, tem);
13674 md5_init_ctx (&ctx);
13675 fold_checksum_tree (op1, &ctx, &ht);
13676 md5_finish_ctx (&ctx, checksum_after_op1);
13678 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13679 fold_check_failed (op1, tem);
13680 #endif
13681 return tem;
13684 /* Fold a ternary tree expression with code CODE of type TYPE with
13685 operands OP0, OP1, and OP2. Return a folded expression if
13686 successful. Otherwise, return a tree expression with code CODE of
13687 type TYPE with operands OP0, OP1, and OP2. */
13689 tree
13690 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13691 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13693 tree tem;
13694 #ifdef ENABLE_FOLD_CHECKING
13695 unsigned char checksum_before_op0[16],
13696 checksum_before_op1[16],
13697 checksum_before_op2[16],
13698 checksum_after_op0[16],
13699 checksum_after_op1[16],
13700 checksum_after_op2[16];
13701 struct md5_ctx ctx;
13702 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13704 md5_init_ctx (&ctx);
13705 fold_checksum_tree (op0, &ctx, &ht);
13706 md5_finish_ctx (&ctx, checksum_before_op0);
13707 ht.empty ();
13709 md5_init_ctx (&ctx);
13710 fold_checksum_tree (op1, &ctx, &ht);
13711 md5_finish_ctx (&ctx, checksum_before_op1);
13712 ht.empty ();
13714 md5_init_ctx (&ctx);
13715 fold_checksum_tree (op2, &ctx, &ht);
13716 md5_finish_ctx (&ctx, checksum_before_op2);
13717 ht.empty ();
13718 #endif
13720 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13721 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13722 if (!tem)
13723 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13725 #ifdef ENABLE_FOLD_CHECKING
13726 md5_init_ctx (&ctx);
13727 fold_checksum_tree (op0, &ctx, &ht);
13728 md5_finish_ctx (&ctx, checksum_after_op0);
13729 ht.empty ();
13731 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13732 fold_check_failed (op0, tem);
13734 md5_init_ctx (&ctx);
13735 fold_checksum_tree (op1, &ctx, &ht);
13736 md5_finish_ctx (&ctx, checksum_after_op1);
13737 ht.empty ();
13739 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13740 fold_check_failed (op1, tem);
13742 md5_init_ctx (&ctx);
13743 fold_checksum_tree (op2, &ctx, &ht);
13744 md5_finish_ctx (&ctx, checksum_after_op2);
13746 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13747 fold_check_failed (op2, tem);
13748 #endif
13749 return tem;
13752 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13753 arguments in ARGARRAY, and a null static chain.
13754 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13755 of type TYPE from the given operands as constructed by build_call_array. */
13757 tree
13758 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13759 int nargs, tree *argarray)
13761 tree tem;
13762 #ifdef ENABLE_FOLD_CHECKING
13763 unsigned char checksum_before_fn[16],
13764 checksum_before_arglist[16],
13765 checksum_after_fn[16],
13766 checksum_after_arglist[16];
13767 struct md5_ctx ctx;
13768 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13769 int i;
13771 md5_init_ctx (&ctx);
13772 fold_checksum_tree (fn, &ctx, &ht);
13773 md5_finish_ctx (&ctx, checksum_before_fn);
13774 ht.empty ();
13776 md5_init_ctx (&ctx);
13777 for (i = 0; i < nargs; i++)
13778 fold_checksum_tree (argarray[i], &ctx, &ht);
13779 md5_finish_ctx (&ctx, checksum_before_arglist);
13780 ht.empty ();
13781 #endif
13783 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13784 if (!tem)
13785 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13787 #ifdef ENABLE_FOLD_CHECKING
13788 md5_init_ctx (&ctx);
13789 fold_checksum_tree (fn, &ctx, &ht);
13790 md5_finish_ctx (&ctx, checksum_after_fn);
13791 ht.empty ();
13793 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13794 fold_check_failed (fn, tem);
13796 md5_init_ctx (&ctx);
13797 for (i = 0; i < nargs; i++)
13798 fold_checksum_tree (argarray[i], &ctx, &ht);
13799 md5_finish_ctx (&ctx, checksum_after_arglist);
13801 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13802 fold_check_failed (NULL_TREE, tem);
13803 #endif
13804 return tem;
13807 /* Perform constant folding and related simplification of initializer
13808 expression EXPR. These behave identically to "fold_buildN" but ignore
13809 potential run-time traps and exceptions that fold must preserve. */
13811 #define START_FOLD_INIT \
13812 int saved_signaling_nans = flag_signaling_nans;\
13813 int saved_trapping_math = flag_trapping_math;\
13814 int saved_rounding_math = flag_rounding_math;\
13815 int saved_trapv = flag_trapv;\
13816 int saved_folding_initializer = folding_initializer;\
13817 flag_signaling_nans = 0;\
13818 flag_trapping_math = 0;\
13819 flag_rounding_math = 0;\
13820 flag_trapv = 0;\
13821 folding_initializer = 1;
13823 #define END_FOLD_INIT \
13824 flag_signaling_nans = saved_signaling_nans;\
13825 flag_trapping_math = saved_trapping_math;\
13826 flag_rounding_math = saved_rounding_math;\
13827 flag_trapv = saved_trapv;\
13828 folding_initializer = saved_folding_initializer;
13830 tree
13831 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13832 tree type, tree op)
13834 tree result;
13835 START_FOLD_INIT;
13837 result = fold_build1_loc (loc, code, type, op);
13839 END_FOLD_INIT;
13840 return result;
13843 tree
13844 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13845 tree type, tree op0, tree op1)
13847 tree result;
13848 START_FOLD_INIT;
13850 result = fold_build2_loc (loc, code, type, op0, op1);
13852 END_FOLD_INIT;
13853 return result;
13856 tree
13857 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13858 int nargs, tree *argarray)
13860 tree result;
13861 START_FOLD_INIT;
13863 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13865 END_FOLD_INIT;
13866 return result;
13869 #undef START_FOLD_INIT
13870 #undef END_FOLD_INIT
13872 /* Determine if first argument is a multiple of second argument. Return 0 if
13873 it is not, or we cannot easily determined it to be.
13875 An example of the sort of thing we care about (at this point; this routine
13876 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13877 fold cases do now) is discovering that
13879 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13881 is a multiple of
13883 SAVE_EXPR (J * 8)
13885 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13887 This code also handles discovering that
13889 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13891 is a multiple of 8 so we don't have to worry about dealing with a
13892 possible remainder.
13894 Note that we *look* inside a SAVE_EXPR only to determine how it was
13895 calculated; it is not safe for fold to do much of anything else with the
13896 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13897 at run time. For example, the latter example above *cannot* be implemented
13898 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13899 evaluation time of the original SAVE_EXPR is not necessarily the same at
13900 the time the new expression is evaluated. The only optimization of this
13901 sort that would be valid is changing
13903 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13905 divided by 8 to
13907 SAVE_EXPR (I) * SAVE_EXPR (J)
13909 (where the same SAVE_EXPR (J) is used in the original and the
13910 transformed version). */
13913 multiple_of_p (tree type, const_tree top, const_tree bottom)
13915 gimple *stmt;
13916 tree t1, op1, op2;
13918 if (operand_equal_p (top, bottom, 0))
13919 return 1;
13921 if (TREE_CODE (type) != INTEGER_TYPE)
13922 return 0;
13924 switch (TREE_CODE (top))
13926 case BIT_AND_EXPR:
13927 /* Bitwise and provides a power of two multiple. If the mask is
13928 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13929 if (!integer_pow2p (bottom))
13930 return 0;
13931 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13932 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13934 case MULT_EXPR:
13935 if (TREE_CODE (bottom) == INTEGER_CST)
13937 op1 = TREE_OPERAND (top, 0);
13938 op2 = TREE_OPERAND (top, 1);
13939 if (TREE_CODE (op1) == INTEGER_CST)
13940 std::swap (op1, op2);
13941 if (TREE_CODE (op2) == INTEGER_CST)
13943 if (multiple_of_p (type, op2, bottom))
13944 return 1;
13945 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13946 if (multiple_of_p (type, bottom, op2))
13948 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13949 wi::to_widest (op2));
13950 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13952 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13953 return multiple_of_p (type, op1, op2);
13956 return multiple_of_p (type, op1, bottom);
13959 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13960 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13962 case MINUS_EXPR:
13963 /* It is impossible to prove if op0 - op1 is multiple of bottom
13964 precisely, so be conservative here checking if both op0 and op1
13965 are multiple of bottom. Note we check the second operand first
13966 since it's usually simpler. */
13967 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13968 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13970 case PLUS_EXPR:
13971 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13972 as op0 - 3 if the expression has unsigned type. For example,
13973 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13974 op1 = TREE_OPERAND (top, 1);
13975 if (TYPE_UNSIGNED (type)
13976 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13977 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13978 return (multiple_of_p (type, op1, bottom)
13979 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13981 case LSHIFT_EXPR:
13982 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13984 op1 = TREE_OPERAND (top, 1);
13985 /* const_binop may not detect overflow correctly,
13986 so check for it explicitly here. */
13987 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13988 wi::to_wide (op1))
13989 && (t1 = fold_convert (type,
13990 const_binop (LSHIFT_EXPR, size_one_node,
13991 op1))) != 0
13992 && !TREE_OVERFLOW (t1))
13993 return multiple_of_p (type, t1, bottom);
13995 return 0;
13997 case NOP_EXPR:
13998 /* Can't handle conversions from non-integral or wider integral type. */
13999 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14000 || (TYPE_PRECISION (type)
14001 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14002 return 0;
14004 /* fall through */
14006 case SAVE_EXPR:
14007 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14009 case COND_EXPR:
14010 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14011 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14013 case INTEGER_CST:
14014 if (TREE_CODE (bottom) != INTEGER_CST
14015 || integer_zerop (bottom)
14016 || (TYPE_UNSIGNED (type)
14017 && (tree_int_cst_sgn (top) < 0
14018 || tree_int_cst_sgn (bottom) < 0)))
14019 return 0;
14020 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14021 SIGNED);
14023 case SSA_NAME:
14024 if (TREE_CODE (bottom) == INTEGER_CST
14025 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14026 && gimple_code (stmt) == GIMPLE_ASSIGN)
14028 enum tree_code code = gimple_assign_rhs_code (stmt);
14030 /* Check for special cases to see if top is defined as multiple
14031 of bottom:
14033 top = (X & ~(bottom - 1) ; bottom is power of 2
14037 Y = X % bottom
14038 top = X - Y. */
14039 if (code == BIT_AND_EXPR
14040 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14041 && TREE_CODE (op2) == INTEGER_CST
14042 && integer_pow2p (bottom)
14043 && wi::multiple_of_p (wi::to_widest (op2),
14044 wi::to_widest (bottom), UNSIGNED))
14045 return 1;
14047 op1 = gimple_assign_rhs1 (stmt);
14048 if (code == MINUS_EXPR
14049 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14050 && TREE_CODE (op2) == SSA_NAME
14051 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14052 && gimple_code (stmt) == GIMPLE_ASSIGN
14053 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14054 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14055 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14056 return 1;
14059 /* fall through */
14061 default:
14062 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14063 return multiple_p (wi::to_poly_widest (top),
14064 wi::to_poly_widest (bottom));
14066 return 0;
14070 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14071 This function returns true for integer expressions, and returns
14072 false if uncertain. */
14074 bool
14075 tree_expr_finite_p (const_tree x)
14077 machine_mode mode = element_mode (x);
14078 if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14079 return true;
14080 switch (TREE_CODE (x))
14082 case REAL_CST:
14083 return real_isfinite (TREE_REAL_CST_PTR (x));
14084 case COMPLEX_CST:
14085 return tree_expr_finite_p (TREE_REALPART (x))
14086 && tree_expr_finite_p (TREE_IMAGPART (x));
14087 case FLOAT_EXPR:
14088 return true;
14089 case ABS_EXPR:
14090 case CONVERT_EXPR:
14091 case NON_LVALUE_EXPR:
14092 case NEGATE_EXPR:
14093 case SAVE_EXPR:
14094 return tree_expr_finite_p (TREE_OPERAND (x, 0));
14095 case MIN_EXPR:
14096 case MAX_EXPR:
14097 return tree_expr_finite_p (TREE_OPERAND (x, 0))
14098 && tree_expr_finite_p (TREE_OPERAND (x, 1));
14099 case COND_EXPR:
14100 return tree_expr_finite_p (TREE_OPERAND (x, 1))
14101 && tree_expr_finite_p (TREE_OPERAND (x, 2));
14102 case CALL_EXPR:
14103 switch (get_call_combined_fn (x))
14105 CASE_CFN_FABS:
14106 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14107 CASE_CFN_FMAX:
14108 CASE_CFN_FMIN:
14109 return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14110 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14111 default:
14112 return false;
14115 default:
14116 return false;
14120 /* Return true if expression X evaluates to an infinity.
14121 This function returns false for integer expressions. */
14123 bool
14124 tree_expr_infinite_p (const_tree x)
14126 if (!HONOR_INFINITIES (x))
14127 return false;
14128 switch (TREE_CODE (x))
14130 case REAL_CST:
14131 return real_isinf (TREE_REAL_CST_PTR (x));
14132 case ABS_EXPR:
14133 case NEGATE_EXPR:
14134 case NON_LVALUE_EXPR:
14135 case SAVE_EXPR:
14136 return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14137 case COND_EXPR:
14138 return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14139 && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14140 default:
14141 return false;
14145 /* Return true if expression X could evaluate to an infinity.
14146 This function returns false for integer expressions, and returns
14147 true if uncertain. */
14149 bool
14150 tree_expr_maybe_infinite_p (const_tree x)
14152 if (!HONOR_INFINITIES (x))
14153 return false;
14154 switch (TREE_CODE (x))
14156 case REAL_CST:
14157 return real_isinf (TREE_REAL_CST_PTR (x));
14158 case FLOAT_EXPR:
14159 return false;
14160 case ABS_EXPR:
14161 case NEGATE_EXPR:
14162 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14163 case COND_EXPR:
14164 return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14165 || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14166 default:
14167 return true;
14171 /* Return true if expression X evaluates to a signaling NaN.
14172 This function returns false for integer expressions. */
14174 bool
14175 tree_expr_signaling_nan_p (const_tree x)
14177 if (!HONOR_SNANS (x))
14178 return false;
14179 switch (TREE_CODE (x))
14181 case REAL_CST:
14182 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14183 case NON_LVALUE_EXPR:
14184 case SAVE_EXPR:
14185 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14186 case COND_EXPR:
14187 return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14188 && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14189 default:
14190 return false;
14194 /* Return true if expression X could evaluate to a signaling NaN.
14195 This function returns false for integer expressions, and returns
14196 true if uncertain. */
14198 bool
14199 tree_expr_maybe_signaling_nan_p (const_tree x)
14201 if (!HONOR_SNANS (x))
14202 return false;
14203 switch (TREE_CODE (x))
14205 case REAL_CST:
14206 return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14207 case FLOAT_EXPR:
14208 return false;
14209 case ABS_EXPR:
14210 case CONVERT_EXPR:
14211 case NEGATE_EXPR:
14212 case NON_LVALUE_EXPR:
14213 case SAVE_EXPR:
14214 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14215 case MIN_EXPR:
14216 case MAX_EXPR:
14217 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14218 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14219 case COND_EXPR:
14220 return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14221 || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14222 case CALL_EXPR:
14223 switch (get_call_combined_fn (x))
14225 CASE_CFN_FABS:
14226 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14227 CASE_CFN_FMAX:
14228 CASE_CFN_FMIN:
14229 return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14230 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14231 default:
14232 return true;
14234 default:
14235 return true;
14239 /* Return true if expression X evaluates to a NaN.
14240 This function returns false for integer expressions. */
14242 bool
14243 tree_expr_nan_p (const_tree x)
14245 if (!HONOR_NANS (x))
14246 return false;
14247 switch (TREE_CODE (x))
14249 case REAL_CST:
14250 return real_isnan (TREE_REAL_CST_PTR (x));
14251 case NON_LVALUE_EXPR:
14252 case SAVE_EXPR:
14253 return tree_expr_nan_p (TREE_OPERAND (x, 0));
14254 case COND_EXPR:
14255 return tree_expr_nan_p (TREE_OPERAND (x, 1))
14256 && tree_expr_nan_p (TREE_OPERAND (x, 2));
14257 default:
14258 return false;
14262 /* Return true if expression X could evaluate to a NaN.
14263 This function returns false for integer expressions, and returns
14264 true if uncertain. */
14266 bool
14267 tree_expr_maybe_nan_p (const_tree x)
14269 if (!HONOR_NANS (x))
14270 return false;
14271 switch (TREE_CODE (x))
14273 case REAL_CST:
14274 return real_isnan (TREE_REAL_CST_PTR (x));
14275 case FLOAT_EXPR:
14276 return false;
14277 case PLUS_EXPR:
14278 case MINUS_EXPR:
14279 case MULT_EXPR:
14280 return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14281 || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14282 case ABS_EXPR:
14283 case CONVERT_EXPR:
14284 case NEGATE_EXPR:
14285 case NON_LVALUE_EXPR:
14286 case SAVE_EXPR:
14287 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14288 case MIN_EXPR:
14289 case MAX_EXPR:
14290 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14291 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14292 case COND_EXPR:
14293 return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14294 || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14295 case CALL_EXPR:
14296 switch (get_call_combined_fn (x))
14298 CASE_CFN_FABS:
14299 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14300 CASE_CFN_FMAX:
14301 CASE_CFN_FMIN:
14302 return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14303 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14304 default:
14305 return true;
14307 default:
14308 return true;
14312 #define tree_expr_nonnegative_warnv_p(X, Y) \
14313 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14315 #define RECURSE(X) \
14316 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14318 /* Return true if CODE or TYPE is known to be non-negative. */
14320 static bool
14321 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14323 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14324 && truth_value_p (code))
14325 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14326 have a signed:1 type (where the value is -1 and 0). */
14327 return true;
14328 return false;
14331 /* Return true if (CODE OP0) is known to be non-negative. If the return
14332 value is based on the assumption that signed overflow is undefined,
14333 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14334 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14336 bool
14337 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14338 bool *strict_overflow_p, int depth)
14340 if (TYPE_UNSIGNED (type))
14341 return true;
14343 switch (code)
14345 case ABS_EXPR:
14346 /* We can't return 1 if flag_wrapv is set because
14347 ABS_EXPR<INT_MIN> = INT_MIN. */
14348 if (!ANY_INTEGRAL_TYPE_P (type))
14349 return true;
14350 if (TYPE_OVERFLOW_UNDEFINED (type))
14352 *strict_overflow_p = true;
14353 return true;
14355 break;
14357 case NON_LVALUE_EXPR:
14358 case FLOAT_EXPR:
14359 case FIX_TRUNC_EXPR:
14360 return RECURSE (op0);
14362 CASE_CONVERT:
14364 tree inner_type = TREE_TYPE (op0);
14365 tree outer_type = type;
14367 if (TREE_CODE (outer_type) == REAL_TYPE)
14369 if (TREE_CODE (inner_type) == REAL_TYPE)
14370 return RECURSE (op0);
14371 if (INTEGRAL_TYPE_P (inner_type))
14373 if (TYPE_UNSIGNED (inner_type))
14374 return true;
14375 return RECURSE (op0);
14378 else if (INTEGRAL_TYPE_P (outer_type))
14380 if (TREE_CODE (inner_type) == REAL_TYPE)
14381 return RECURSE (op0);
14382 if (INTEGRAL_TYPE_P (inner_type))
14383 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14384 && TYPE_UNSIGNED (inner_type);
14387 break;
14389 default:
14390 return tree_simple_nonnegative_warnv_p (code, type);
14393 /* We don't know sign of `t', so be conservative and return false. */
14394 return false;
14397 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14398 value is based on the assumption that signed overflow is undefined,
14399 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14400 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14402 bool
14403 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14404 tree op1, bool *strict_overflow_p,
14405 int depth)
14407 if (TYPE_UNSIGNED (type))
14408 return true;
14410 switch (code)
14412 case POINTER_PLUS_EXPR:
14413 case PLUS_EXPR:
14414 if (FLOAT_TYPE_P (type))
14415 return RECURSE (op0) && RECURSE (op1);
14417 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14418 both unsigned and at least 2 bits shorter than the result. */
14419 if (TREE_CODE (type) == INTEGER_TYPE
14420 && TREE_CODE (op0) == NOP_EXPR
14421 && TREE_CODE (op1) == NOP_EXPR)
14423 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14424 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14425 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14426 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14428 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14429 TYPE_PRECISION (inner2)) + 1;
14430 return prec < TYPE_PRECISION (type);
14433 break;
14435 case MULT_EXPR:
14436 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14438 /* x * x is always non-negative for floating point x
14439 or without overflow. */
14440 if (operand_equal_p (op0, op1, 0)
14441 || (RECURSE (op0) && RECURSE (op1)))
14443 if (ANY_INTEGRAL_TYPE_P (type)
14444 && TYPE_OVERFLOW_UNDEFINED (type))
14445 *strict_overflow_p = true;
14446 return true;
14450 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14451 both unsigned and their total bits is shorter than the result. */
14452 if (TREE_CODE (type) == INTEGER_TYPE
14453 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14454 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14456 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14457 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14458 : TREE_TYPE (op0);
14459 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14460 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14461 : TREE_TYPE (op1);
14463 bool unsigned0 = TYPE_UNSIGNED (inner0);
14464 bool unsigned1 = TYPE_UNSIGNED (inner1);
14466 if (TREE_CODE (op0) == INTEGER_CST)
14467 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14469 if (TREE_CODE (op1) == INTEGER_CST)
14470 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14472 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14473 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14475 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14476 ? tree_int_cst_min_precision (op0, UNSIGNED)
14477 : TYPE_PRECISION (inner0);
14479 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14480 ? tree_int_cst_min_precision (op1, UNSIGNED)
14481 : TYPE_PRECISION (inner1);
14483 return precision0 + precision1 < TYPE_PRECISION (type);
14486 return false;
14488 case BIT_AND_EXPR:
14489 return RECURSE (op0) || RECURSE (op1);
14491 case MAX_EXPR:
14492 /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14493 things. */
14494 if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14495 return RECURSE (op0) && RECURSE (op1);
14496 return RECURSE (op0) || RECURSE (op1);
14498 case BIT_IOR_EXPR:
14499 case BIT_XOR_EXPR:
14500 case MIN_EXPR:
14501 case RDIV_EXPR:
14502 case TRUNC_DIV_EXPR:
14503 case CEIL_DIV_EXPR:
14504 case FLOOR_DIV_EXPR:
14505 case ROUND_DIV_EXPR:
14506 return RECURSE (op0) && RECURSE (op1);
14508 case TRUNC_MOD_EXPR:
14509 return RECURSE (op0);
14511 case FLOOR_MOD_EXPR:
14512 return RECURSE (op1);
14514 case CEIL_MOD_EXPR:
14515 case ROUND_MOD_EXPR:
14516 default:
14517 return tree_simple_nonnegative_warnv_p (code, type);
14520 /* We don't know sign of `t', so be conservative and return false. */
14521 return false;
14524 /* Return true if T is known to be non-negative. If the return
14525 value is based on the assumption that signed overflow is undefined,
14526 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14527 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14529 bool
14530 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14532 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14533 return true;
14535 switch (TREE_CODE (t))
14537 case INTEGER_CST:
14538 return tree_int_cst_sgn (t) >= 0;
14540 case REAL_CST:
14541 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14543 case FIXED_CST:
14544 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14546 case COND_EXPR:
14547 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14549 case SSA_NAME:
14550 /* Limit the depth of recursion to avoid quadratic behavior.
14551 This is expected to catch almost all occurrences in practice.
14552 If this code misses important cases that unbounded recursion
14553 would not, passes that need this information could be revised
14554 to provide it through dataflow propagation. */
14555 return (!name_registered_for_update_p (t)
14556 && depth < param_max_ssa_name_query_depth
14557 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14558 strict_overflow_p, depth));
14560 default:
14561 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14565 /* Return true if T is known to be non-negative. If the return
14566 value is based on the assumption that signed overflow is undefined,
14567 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14568 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14570 bool
14571 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14572 bool *strict_overflow_p, int depth)
14574 switch (fn)
14576 CASE_CFN_ACOS:
14577 CASE_CFN_ACOSH:
14578 CASE_CFN_CABS:
14579 CASE_CFN_COSH:
14580 CASE_CFN_ERFC:
14581 CASE_CFN_EXP:
14582 CASE_CFN_EXP10:
14583 CASE_CFN_EXP2:
14584 CASE_CFN_FABS:
14585 CASE_CFN_FDIM:
14586 CASE_CFN_HYPOT:
14587 CASE_CFN_POW10:
14588 CASE_CFN_FFS:
14589 CASE_CFN_PARITY:
14590 CASE_CFN_POPCOUNT:
14591 CASE_CFN_CLZ:
14592 CASE_CFN_CLRSB:
14593 case CFN_BUILT_IN_BSWAP16:
14594 case CFN_BUILT_IN_BSWAP32:
14595 case CFN_BUILT_IN_BSWAP64:
14596 case CFN_BUILT_IN_BSWAP128:
14597 /* Always true. */
14598 return true;
14600 CASE_CFN_SQRT:
14601 CASE_CFN_SQRT_FN:
14602 /* sqrt(-0.0) is -0.0. */
14603 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14604 return true;
14605 return RECURSE (arg0);
14607 CASE_CFN_ASINH:
14608 CASE_CFN_ATAN:
14609 CASE_CFN_ATANH:
14610 CASE_CFN_CBRT:
14611 CASE_CFN_CEIL:
14612 CASE_CFN_CEIL_FN:
14613 CASE_CFN_ERF:
14614 CASE_CFN_EXPM1:
14615 CASE_CFN_FLOOR:
14616 CASE_CFN_FLOOR_FN:
14617 CASE_CFN_FMOD:
14618 CASE_CFN_FREXP:
14619 CASE_CFN_ICEIL:
14620 CASE_CFN_IFLOOR:
14621 CASE_CFN_IRINT:
14622 CASE_CFN_IROUND:
14623 CASE_CFN_LCEIL:
14624 CASE_CFN_LDEXP:
14625 CASE_CFN_LFLOOR:
14626 CASE_CFN_LLCEIL:
14627 CASE_CFN_LLFLOOR:
14628 CASE_CFN_LLRINT:
14629 CASE_CFN_LLROUND:
14630 CASE_CFN_LRINT:
14631 CASE_CFN_LROUND:
14632 CASE_CFN_MODF:
14633 CASE_CFN_NEARBYINT:
14634 CASE_CFN_NEARBYINT_FN:
14635 CASE_CFN_RINT:
14636 CASE_CFN_RINT_FN:
14637 CASE_CFN_ROUND:
14638 CASE_CFN_ROUND_FN:
14639 CASE_CFN_ROUNDEVEN:
14640 CASE_CFN_ROUNDEVEN_FN:
14641 CASE_CFN_SCALB:
14642 CASE_CFN_SCALBLN:
14643 CASE_CFN_SCALBN:
14644 CASE_CFN_SIGNBIT:
14645 CASE_CFN_SIGNIFICAND:
14646 CASE_CFN_SINH:
14647 CASE_CFN_TANH:
14648 CASE_CFN_TRUNC:
14649 CASE_CFN_TRUNC_FN:
14650 /* True if the 1st argument is nonnegative. */
14651 return RECURSE (arg0);
14653 CASE_CFN_FMAX:
14654 CASE_CFN_FMAX_FN:
14655 /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14656 things. In the presence of sNaNs, we're only guaranteed to be
14657 non-negative if both operands are non-negative. In the presence
14658 of qNaNs, we're non-negative if either operand is non-negative
14659 and can't be a qNaN, or if both operands are non-negative. */
14660 if (tree_expr_maybe_signaling_nan_p (arg0) ||
14661 tree_expr_maybe_signaling_nan_p (arg1))
14662 return RECURSE (arg0) && RECURSE (arg1);
14663 return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14664 || RECURSE (arg1))
14665 : (RECURSE (arg1)
14666 && !tree_expr_maybe_nan_p (arg1));
14668 CASE_CFN_FMIN:
14669 CASE_CFN_FMIN_FN:
14670 /* True if the 1st AND 2nd arguments are nonnegative. */
14671 return RECURSE (arg0) && RECURSE (arg1);
14673 CASE_CFN_COPYSIGN:
14674 CASE_CFN_COPYSIGN_FN:
14675 /* True if the 2nd argument is nonnegative. */
14676 return RECURSE (arg1);
14678 CASE_CFN_POWI:
14679 /* True if the 1st argument is nonnegative or the second
14680 argument is an even integer. */
14681 if (TREE_CODE (arg1) == INTEGER_CST
14682 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14683 return true;
14684 return RECURSE (arg0);
14686 CASE_CFN_POW:
14687 /* True if the 1st argument is nonnegative or the second
14688 argument is an even integer valued real. */
14689 if (TREE_CODE (arg1) == REAL_CST)
14691 REAL_VALUE_TYPE c;
14692 HOST_WIDE_INT n;
14694 c = TREE_REAL_CST (arg1);
14695 n = real_to_integer (&c);
14696 if ((n & 1) == 0)
14698 REAL_VALUE_TYPE cint;
14699 real_from_integer (&cint, VOIDmode, n, SIGNED);
14700 if (real_identical (&c, &cint))
14701 return true;
14704 return RECURSE (arg0);
14706 default:
14707 break;
14709 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14712 /* Return true if T is known to be non-negative. If the return
14713 value is based on the assumption that signed overflow is undefined,
14714 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14715 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14717 static bool
14718 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14720 enum tree_code code = TREE_CODE (t);
14721 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14722 return true;
14724 switch (code)
14726 case TARGET_EXPR:
14728 tree temp = TARGET_EXPR_SLOT (t);
14729 t = TARGET_EXPR_INITIAL (t);
14731 /* If the initializer is non-void, then it's a normal expression
14732 that will be assigned to the slot. */
14733 if (!VOID_TYPE_P (t))
14734 return RECURSE (t);
14736 /* Otherwise, the initializer sets the slot in some way. One common
14737 way is an assignment statement at the end of the initializer. */
14738 while (1)
14740 if (TREE_CODE (t) == BIND_EXPR)
14741 t = expr_last (BIND_EXPR_BODY (t));
14742 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14743 || TREE_CODE (t) == TRY_CATCH_EXPR)
14744 t = expr_last (TREE_OPERAND (t, 0));
14745 else if (TREE_CODE (t) == STATEMENT_LIST)
14746 t = expr_last (t);
14747 else
14748 break;
14750 if (TREE_CODE (t) == MODIFY_EXPR
14751 && TREE_OPERAND (t, 0) == temp)
14752 return RECURSE (TREE_OPERAND (t, 1));
14754 return false;
14757 case CALL_EXPR:
14759 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14760 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14762 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14763 get_call_combined_fn (t),
14764 arg0,
14765 arg1,
14766 strict_overflow_p, depth);
14768 case COMPOUND_EXPR:
14769 case MODIFY_EXPR:
14770 return RECURSE (TREE_OPERAND (t, 1));
14772 case BIND_EXPR:
14773 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14775 case SAVE_EXPR:
14776 return RECURSE (TREE_OPERAND (t, 0));
14778 default:
14779 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14783 #undef RECURSE
14784 #undef tree_expr_nonnegative_warnv_p
14786 /* Return true if T is known to be non-negative. If the return
14787 value is based on the assumption that signed overflow is undefined,
14788 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14789 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
14791 bool
14792 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14794 enum tree_code code;
14795 if (t == error_mark_node)
14796 return false;
14798 code = TREE_CODE (t);
14799 switch (TREE_CODE_CLASS (code))
14801 case tcc_binary:
14802 case tcc_comparison:
14803 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14804 TREE_TYPE (t),
14805 TREE_OPERAND (t, 0),
14806 TREE_OPERAND (t, 1),
14807 strict_overflow_p, depth);
14809 case tcc_unary:
14810 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14811 TREE_TYPE (t),
14812 TREE_OPERAND (t, 0),
14813 strict_overflow_p, depth);
14815 case tcc_constant:
14816 case tcc_declaration:
14817 case tcc_reference:
14818 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14820 default:
14821 break;
14824 switch (code)
14826 case TRUTH_AND_EXPR:
14827 case TRUTH_OR_EXPR:
14828 case TRUTH_XOR_EXPR:
14829 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14830 TREE_TYPE (t),
14831 TREE_OPERAND (t, 0),
14832 TREE_OPERAND (t, 1),
14833 strict_overflow_p, depth);
14834 case TRUTH_NOT_EXPR:
14835 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14836 TREE_TYPE (t),
14837 TREE_OPERAND (t, 0),
14838 strict_overflow_p, depth);
14840 case COND_EXPR:
14841 case CONSTRUCTOR:
14842 case OBJ_TYPE_REF:
14843 case ASSERT_EXPR:
14844 case ADDR_EXPR:
14845 case WITH_SIZE_EXPR:
14846 case SSA_NAME:
14847 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14849 default:
14850 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14854 /* Return true if `t' is known to be non-negative. Handle warnings
14855 about undefined signed overflow. */
14857 bool
14858 tree_expr_nonnegative_p (tree t)
14860 bool ret, strict_overflow_p;
14862 strict_overflow_p = false;
14863 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14864 if (strict_overflow_p)
14865 fold_overflow_warning (("assuming signed overflow does not occur when "
14866 "determining that expression is always "
14867 "non-negative"),
14868 WARN_STRICT_OVERFLOW_MISC);
14869 return ret;
14873 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14874 For floating point we further ensure that T is not denormal.
14875 Similar logic is present in nonzero_address in rtlanal.h.
14877 If the return value is based on the assumption that signed overflow
14878 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14879 change *STRICT_OVERFLOW_P. */
14881 bool
14882 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14883 bool *strict_overflow_p)
14885 switch (code)
14887 case ABS_EXPR:
14888 return tree_expr_nonzero_warnv_p (op0,
14889 strict_overflow_p);
14891 case NOP_EXPR:
14893 tree inner_type = TREE_TYPE (op0);
14894 tree outer_type = type;
14896 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14897 && tree_expr_nonzero_warnv_p (op0,
14898 strict_overflow_p));
14900 break;
14902 case NON_LVALUE_EXPR:
14903 return tree_expr_nonzero_warnv_p (op0,
14904 strict_overflow_p);
14906 default:
14907 break;
14910 return false;
14913 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14914 For floating point we further ensure that T is not denormal.
14915 Similar logic is present in nonzero_address in rtlanal.h.
14917 If the return value is based on the assumption that signed overflow
14918 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14919 change *STRICT_OVERFLOW_P. */
14921 bool
14922 tree_binary_nonzero_warnv_p (enum tree_code code,
14923 tree type,
14924 tree op0,
14925 tree op1, bool *strict_overflow_p)
14927 bool sub_strict_overflow_p;
14928 switch (code)
14930 case POINTER_PLUS_EXPR:
14931 case PLUS_EXPR:
14932 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14934 /* With the presence of negative values it is hard
14935 to say something. */
14936 sub_strict_overflow_p = false;
14937 if (!tree_expr_nonnegative_warnv_p (op0,
14938 &sub_strict_overflow_p)
14939 || !tree_expr_nonnegative_warnv_p (op1,
14940 &sub_strict_overflow_p))
14941 return false;
14942 /* One of operands must be positive and the other non-negative. */
14943 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14944 overflows, on a twos-complement machine the sum of two
14945 nonnegative numbers can never be zero. */
14946 return (tree_expr_nonzero_warnv_p (op0,
14947 strict_overflow_p)
14948 || tree_expr_nonzero_warnv_p (op1,
14949 strict_overflow_p));
14951 break;
14953 case MULT_EXPR:
14954 if (TYPE_OVERFLOW_UNDEFINED (type))
14956 if (tree_expr_nonzero_warnv_p (op0,
14957 strict_overflow_p)
14958 && tree_expr_nonzero_warnv_p (op1,
14959 strict_overflow_p))
14961 *strict_overflow_p = true;
14962 return true;
14965 break;
14967 case MIN_EXPR:
14968 sub_strict_overflow_p = false;
14969 if (tree_expr_nonzero_warnv_p (op0,
14970 &sub_strict_overflow_p)
14971 && tree_expr_nonzero_warnv_p (op1,
14972 &sub_strict_overflow_p))
14974 if (sub_strict_overflow_p)
14975 *strict_overflow_p = true;
14977 break;
14979 case MAX_EXPR:
14980 sub_strict_overflow_p = false;
14981 if (tree_expr_nonzero_warnv_p (op0,
14982 &sub_strict_overflow_p))
14984 if (sub_strict_overflow_p)
14985 *strict_overflow_p = true;
14987 /* When both operands are nonzero, then MAX must be too. */
14988 if (tree_expr_nonzero_warnv_p (op1,
14989 strict_overflow_p))
14990 return true;
14992 /* MAX where operand 0 is positive is positive. */
14993 return tree_expr_nonnegative_warnv_p (op0,
14994 strict_overflow_p);
14996 /* MAX where operand 1 is positive is positive. */
14997 else if (tree_expr_nonzero_warnv_p (op1,
14998 &sub_strict_overflow_p)
14999 && tree_expr_nonnegative_warnv_p (op1,
15000 &sub_strict_overflow_p))
15002 if (sub_strict_overflow_p)
15003 *strict_overflow_p = true;
15004 return true;
15006 break;
15008 case BIT_IOR_EXPR:
15009 return (tree_expr_nonzero_warnv_p (op1,
15010 strict_overflow_p)
15011 || tree_expr_nonzero_warnv_p (op0,
15012 strict_overflow_p));
15014 default:
15015 break;
15018 return false;
15021 /* Return true when T is an address and is known to be nonzero.
15022 For floating point we further ensure that T is not denormal.
15023 Similar logic is present in nonzero_address in rtlanal.h.
15025 If the return value is based on the assumption that signed overflow
15026 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15027 change *STRICT_OVERFLOW_P. */
15029 bool
15030 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15032 bool sub_strict_overflow_p;
15033 switch (TREE_CODE (t))
15035 case INTEGER_CST:
15036 return !integer_zerop (t);
15038 case ADDR_EXPR:
15040 tree base = TREE_OPERAND (t, 0);
15042 if (!DECL_P (base))
15043 base = get_base_address (base);
15045 if (base && TREE_CODE (base) == TARGET_EXPR)
15046 base = TARGET_EXPR_SLOT (base);
15048 if (!base)
15049 return false;
15051 /* For objects in symbol table check if we know they are non-zero.
15052 Don't do anything for variables and functions before symtab is built;
15053 it is quite possible that they will be declared weak later. */
15054 int nonzero_addr = maybe_nonzero_address (base);
15055 if (nonzero_addr >= 0)
15056 return nonzero_addr;
15058 /* Constants are never weak. */
15059 if (CONSTANT_CLASS_P (base))
15060 return true;
15062 return false;
15065 case COND_EXPR:
15066 sub_strict_overflow_p = false;
15067 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15068 &sub_strict_overflow_p)
15069 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15070 &sub_strict_overflow_p))
15072 if (sub_strict_overflow_p)
15073 *strict_overflow_p = true;
15074 return true;
15076 break;
15078 case SSA_NAME:
15079 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15080 break;
15081 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15083 default:
15084 break;
15086 return false;
15089 #define integer_valued_real_p(X) \
15090 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15092 #define RECURSE(X) \
15093 ((integer_valued_real_p) (X, depth + 1))
15095 /* Return true if the floating point result of (CODE OP0) has an
15096 integer value. We also allow +Inf, -Inf and NaN to be considered
15097 integer values. Return false for signaling NaN.
15099 DEPTH is the current nesting depth of the query. */
15101 bool
15102 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15104 switch (code)
15106 case FLOAT_EXPR:
15107 return true;
15109 case ABS_EXPR:
15110 return RECURSE (op0);
15112 CASE_CONVERT:
15114 tree type = TREE_TYPE (op0);
15115 if (TREE_CODE (type) == INTEGER_TYPE)
15116 return true;
15117 if (TREE_CODE (type) == REAL_TYPE)
15118 return RECURSE (op0);
15119 break;
15122 default:
15123 break;
15125 return false;
15128 /* Return true if the floating point result of (CODE OP0 OP1) has an
15129 integer value. We also allow +Inf, -Inf and NaN to be considered
15130 integer values. Return false for signaling NaN.
15132 DEPTH is the current nesting depth of the query. */
15134 bool
15135 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15137 switch (code)
15139 case PLUS_EXPR:
15140 case MINUS_EXPR:
15141 case MULT_EXPR:
15142 case MIN_EXPR:
15143 case MAX_EXPR:
15144 return RECURSE (op0) && RECURSE (op1);
15146 default:
15147 break;
15149 return false;
15152 /* Return true if the floating point result of calling FNDECL with arguments
15153 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
15154 considered integer values. Return false for signaling NaN. If FNDECL
15155 takes fewer than 2 arguments, the remaining ARGn are null.
15157 DEPTH is the current nesting depth of the query. */
15159 bool
15160 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15162 switch (fn)
15164 CASE_CFN_CEIL:
15165 CASE_CFN_CEIL_FN:
15166 CASE_CFN_FLOOR:
15167 CASE_CFN_FLOOR_FN:
15168 CASE_CFN_NEARBYINT:
15169 CASE_CFN_NEARBYINT_FN:
15170 CASE_CFN_RINT:
15171 CASE_CFN_RINT_FN:
15172 CASE_CFN_ROUND:
15173 CASE_CFN_ROUND_FN:
15174 CASE_CFN_ROUNDEVEN:
15175 CASE_CFN_ROUNDEVEN_FN:
15176 CASE_CFN_TRUNC:
15177 CASE_CFN_TRUNC_FN:
15178 return true;
15180 CASE_CFN_FMIN:
15181 CASE_CFN_FMIN_FN:
15182 CASE_CFN_FMAX:
15183 CASE_CFN_FMAX_FN:
15184 return RECURSE (arg0) && RECURSE (arg1);
15186 default:
15187 break;
15189 return false;
15192 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15193 has an integer value. We also allow +Inf, -Inf and NaN to be
15194 considered integer values. Return false for signaling NaN.
15196 DEPTH is the current nesting depth of the query. */
15198 bool
15199 integer_valued_real_single_p (tree t, int depth)
15201 switch (TREE_CODE (t))
15203 case REAL_CST:
15204 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15206 case COND_EXPR:
15207 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15209 case SSA_NAME:
15210 /* Limit the depth of recursion to avoid quadratic behavior.
15211 This is expected to catch almost all occurrences in practice.
15212 If this code misses important cases that unbounded recursion
15213 would not, passes that need this information could be revised
15214 to provide it through dataflow propagation. */
15215 return (!name_registered_for_update_p (t)
15216 && depth < param_max_ssa_name_query_depth
15217 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15218 depth));
15220 default:
15221 break;
15223 return false;
15226 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15227 has an integer value. We also allow +Inf, -Inf and NaN to be
15228 considered integer values. Return false for signaling NaN.
15230 DEPTH is the current nesting depth of the query. */
15232 static bool
15233 integer_valued_real_invalid_p (tree t, int depth)
15235 switch (TREE_CODE (t))
15237 case COMPOUND_EXPR:
15238 case MODIFY_EXPR:
15239 case BIND_EXPR:
15240 return RECURSE (TREE_OPERAND (t, 1));
15242 case SAVE_EXPR:
15243 return RECURSE (TREE_OPERAND (t, 0));
15245 default:
15246 break;
15248 return false;
15251 #undef RECURSE
15252 #undef integer_valued_real_p
15254 /* Return true if the floating point expression T has an integer value.
15255 We also allow +Inf, -Inf and NaN to be considered integer values.
15256 Return false for signaling NaN.
15258 DEPTH is the current nesting depth of the query. */
15260 bool
15261 integer_valued_real_p (tree t, int depth)
15263 if (t == error_mark_node)
15264 return false;
15266 STRIP_ANY_LOCATION_WRAPPER (t);
15268 tree_code code = TREE_CODE (t);
15269 switch (TREE_CODE_CLASS (code))
15271 case tcc_binary:
15272 case tcc_comparison:
15273 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15274 TREE_OPERAND (t, 1), depth);
15276 case tcc_unary:
15277 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15279 case tcc_constant:
15280 case tcc_declaration:
15281 case tcc_reference:
15282 return integer_valued_real_single_p (t, depth);
15284 default:
15285 break;
15288 switch (code)
15290 case COND_EXPR:
15291 case SSA_NAME:
15292 return integer_valued_real_single_p (t, depth);
15294 case CALL_EXPR:
15296 tree arg0 = (call_expr_nargs (t) > 0
15297 ? CALL_EXPR_ARG (t, 0)
15298 : NULL_TREE);
15299 tree arg1 = (call_expr_nargs (t) > 1
15300 ? CALL_EXPR_ARG (t, 1)
15301 : NULL_TREE);
15302 return integer_valued_real_call_p (get_call_combined_fn (t),
15303 arg0, arg1, depth);
15306 default:
15307 return integer_valued_real_invalid_p (t, depth);
15311 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15312 attempt to fold the expression to a constant without modifying TYPE,
15313 OP0 or OP1.
15315 If the expression could be simplified to a constant, then return
15316 the constant. If the expression would not be simplified to a
15317 constant, then return NULL_TREE. */
15319 tree
15320 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15322 tree tem = fold_binary (code, type, op0, op1);
15323 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15326 /* Given the components of a unary expression CODE, TYPE and OP0,
15327 attempt to fold the expression to a constant without modifying
15328 TYPE or OP0.
15330 If the expression could be simplified to a constant, then return
15331 the constant. If the expression would not be simplified to a
15332 constant, then return NULL_TREE. */
15334 tree
15335 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15337 tree tem = fold_unary (code, type, op0);
15338 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15341 /* If EXP represents referencing an element in a constant string
15342 (either via pointer arithmetic or array indexing), return the
15343 tree representing the value accessed, otherwise return NULL. */
15345 tree
15346 fold_read_from_constant_string (tree exp)
15348 if ((TREE_CODE (exp) == INDIRECT_REF
15349 || TREE_CODE (exp) == ARRAY_REF)
15350 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15352 tree exp1 = TREE_OPERAND (exp, 0);
15353 tree index;
15354 tree string;
15355 location_t loc = EXPR_LOCATION (exp);
15357 if (TREE_CODE (exp) == INDIRECT_REF)
15358 string = string_constant (exp1, &index, NULL, NULL);
15359 else
15361 tree low_bound = array_ref_low_bound (exp);
15362 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15364 /* Optimize the special-case of a zero lower bound.
15366 We convert the low_bound to sizetype to avoid some problems
15367 with constant folding. (E.g. suppose the lower bound is 1,
15368 and its mode is QI. Without the conversion,l (ARRAY
15369 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15370 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15371 if (! integer_zerop (low_bound))
15372 index = size_diffop_loc (loc, index,
15373 fold_convert_loc (loc, sizetype, low_bound));
15375 string = exp1;
15378 scalar_int_mode char_mode;
15379 if (string
15380 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15381 && TREE_CODE (string) == STRING_CST
15382 && TREE_CODE (index) == INTEGER_CST
15383 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15384 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15385 &char_mode)
15386 && GET_MODE_SIZE (char_mode) == 1)
15387 return build_int_cst_type (TREE_TYPE (exp),
15388 (TREE_STRING_POINTER (string)
15389 [TREE_INT_CST_LOW (index)]));
15391 return NULL;
15394 /* Folds a read from vector element at IDX of vector ARG. */
15396 tree
15397 fold_read_from_vector (tree arg, poly_uint64 idx)
15399 unsigned HOST_WIDE_INT i;
15400 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15401 && known_ge (idx, 0u)
15402 && idx.is_constant (&i))
15404 if (TREE_CODE (arg) == VECTOR_CST)
15405 return VECTOR_CST_ELT (arg, i);
15406 else if (TREE_CODE (arg) == CONSTRUCTOR)
15408 if (i >= CONSTRUCTOR_NELTS (arg))
15409 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15410 return CONSTRUCTOR_ELT (arg, i)->value;
15413 return NULL_TREE;
15416 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15417 an integer constant, real, or fixed-point constant.
15419 TYPE is the type of the result. */
15421 static tree
15422 fold_negate_const (tree arg0, tree type)
15424 tree t = NULL_TREE;
15426 switch (TREE_CODE (arg0))
15428 case REAL_CST:
15429 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15430 break;
15432 case FIXED_CST:
15434 FIXED_VALUE_TYPE f;
15435 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15436 &(TREE_FIXED_CST (arg0)), NULL,
15437 TYPE_SATURATING (type));
15438 t = build_fixed (type, f);
15439 /* Propagate overflow flags. */
15440 if (overflow_p | TREE_OVERFLOW (arg0))
15441 TREE_OVERFLOW (t) = 1;
15442 break;
15445 default:
15446 if (poly_int_tree_p (arg0))
15448 wi::overflow_type overflow;
15449 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15450 t = force_fit_type (type, res, 1,
15451 (overflow && ! TYPE_UNSIGNED (type))
15452 || TREE_OVERFLOW (arg0));
15453 break;
15456 gcc_unreachable ();
15459 return t;
15462 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15463 an integer constant or real constant.
15465 TYPE is the type of the result. */
15467 tree
15468 fold_abs_const (tree arg0, tree type)
15470 tree t = NULL_TREE;
15472 switch (TREE_CODE (arg0))
15474 case INTEGER_CST:
15476 /* If the value is unsigned or non-negative, then the absolute value
15477 is the same as the ordinary value. */
15478 wide_int val = wi::to_wide (arg0);
15479 wi::overflow_type overflow = wi::OVF_NONE;
15480 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15483 /* If the value is negative, then the absolute value is
15484 its negation. */
15485 else
15486 val = wi::neg (val, &overflow);
15488 /* Force to the destination type, set TREE_OVERFLOW for signed
15489 TYPE only. */
15490 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15492 break;
15494 case REAL_CST:
15495 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15496 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15497 else
15498 t = arg0;
15499 break;
15501 default:
15502 gcc_unreachable ();
15505 return t;
15508 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15509 constant. TYPE is the type of the result. */
15511 static tree
15512 fold_not_const (const_tree arg0, tree type)
15514 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15516 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15519 /* Given CODE, a relational operator, the target type, TYPE and two
15520 constant operands OP0 and OP1, return the result of the
15521 relational operation. If the result is not a compile time
15522 constant, then return NULL_TREE. */
15524 static tree
15525 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15527 int result, invert;
15529 /* From here on, the only cases we handle are when the result is
15530 known to be a constant. */
15532 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15534 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15535 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15537 /* Handle the cases where either operand is a NaN. */
15538 if (real_isnan (c0) || real_isnan (c1))
15540 switch (code)
15542 case EQ_EXPR:
15543 case ORDERED_EXPR:
15544 result = 0;
15545 break;
15547 case NE_EXPR:
15548 case UNORDERED_EXPR:
15549 case UNLT_EXPR:
15550 case UNLE_EXPR:
15551 case UNGT_EXPR:
15552 case UNGE_EXPR:
15553 case UNEQ_EXPR:
15554 result = 1;
15555 break;
15557 case LT_EXPR:
15558 case LE_EXPR:
15559 case GT_EXPR:
15560 case GE_EXPR:
15561 case LTGT_EXPR:
15562 if (flag_trapping_math)
15563 return NULL_TREE;
15564 result = 0;
15565 break;
15567 default:
15568 gcc_unreachable ();
15571 return constant_boolean_node (result, type);
15574 return constant_boolean_node (real_compare (code, c0, c1), type);
15577 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15579 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15580 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15581 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15584 /* Handle equality/inequality of complex constants. */
15585 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15587 tree rcond = fold_relational_const (code, type,
15588 TREE_REALPART (op0),
15589 TREE_REALPART (op1));
15590 tree icond = fold_relational_const (code, type,
15591 TREE_IMAGPART (op0),
15592 TREE_IMAGPART (op1));
15593 if (code == EQ_EXPR)
15594 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15595 else if (code == NE_EXPR)
15596 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15597 else
15598 return NULL_TREE;
15601 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15603 if (!VECTOR_TYPE_P (type))
15605 /* Have vector comparison with scalar boolean result. */
15606 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15607 && known_eq (VECTOR_CST_NELTS (op0),
15608 VECTOR_CST_NELTS (op1)));
15609 unsigned HOST_WIDE_INT nunits;
15610 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15611 return NULL_TREE;
15612 for (unsigned i = 0; i < nunits; i++)
15614 tree elem0 = VECTOR_CST_ELT (op0, i);
15615 tree elem1 = VECTOR_CST_ELT (op1, i);
15616 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15617 if (tmp == NULL_TREE)
15618 return NULL_TREE;
15619 if (integer_zerop (tmp))
15620 return constant_boolean_node (code == NE_EXPR, type);
15622 return constant_boolean_node (code == EQ_EXPR, type);
15624 tree_vector_builder elts;
15625 if (!elts.new_binary_operation (type, op0, op1, false))
15626 return NULL_TREE;
15627 unsigned int count = elts.encoded_nelts ();
15628 for (unsigned i = 0; i < count; i++)
15630 tree elem_type = TREE_TYPE (type);
15631 tree elem0 = VECTOR_CST_ELT (op0, i);
15632 tree elem1 = VECTOR_CST_ELT (op1, i);
15634 tree tem = fold_relational_const (code, elem_type,
15635 elem0, elem1);
15637 if (tem == NULL_TREE)
15638 return NULL_TREE;
15640 elts.quick_push (build_int_cst (elem_type,
15641 integer_zerop (tem) ? 0 : -1));
15644 return elts.build ();
15647 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15649 To compute GT, swap the arguments and do LT.
15650 To compute GE, do LT and invert the result.
15651 To compute LE, swap the arguments, do LT and invert the result.
15652 To compute NE, do EQ and invert the result.
15654 Therefore, the code below must handle only EQ and LT. */
15656 if (code == LE_EXPR || code == GT_EXPR)
15658 std::swap (op0, op1);
15659 code = swap_tree_comparison (code);
15662 /* Note that it is safe to invert for real values here because we
15663 have already handled the one case that it matters. */
15665 invert = 0;
15666 if (code == NE_EXPR || code == GE_EXPR)
15668 invert = 1;
15669 code = invert_tree_comparison (code, false);
15672 /* Compute a result for LT or EQ if args permit;
15673 Otherwise return T. */
15674 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15676 if (code == EQ_EXPR)
15677 result = tree_int_cst_equal (op0, op1);
15678 else
15679 result = tree_int_cst_lt (op0, op1);
15681 else
15682 return NULL_TREE;
15684 if (invert)
15685 result ^= 1;
15686 return constant_boolean_node (result, type);
15689 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15690 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15691 itself. */
15693 tree
15694 fold_build_cleanup_point_expr (tree type, tree expr)
15696 /* If the expression does not have side effects then we don't have to wrap
15697 it with a cleanup point expression. */
15698 if (!TREE_SIDE_EFFECTS (expr))
15699 return expr;
15701 /* If the expression is a return, check to see if the expression inside the
15702 return has no side effects or the right hand side of the modify expression
15703 inside the return. If either don't have side effects set we don't need to
15704 wrap the expression in a cleanup point expression. Note we don't check the
15705 left hand side of the modify because it should always be a return decl. */
15706 if (TREE_CODE (expr) == RETURN_EXPR)
15708 tree op = TREE_OPERAND (expr, 0);
15709 if (!op || !TREE_SIDE_EFFECTS (op))
15710 return expr;
15711 op = TREE_OPERAND (op, 1);
15712 if (!TREE_SIDE_EFFECTS (op))
15713 return expr;
15716 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15719 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15720 of an indirection through OP0, or NULL_TREE if no simplification is
15721 possible. */
15723 tree
15724 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15726 tree sub = op0;
15727 tree subtype;
15728 poly_uint64 const_op01;
15730 STRIP_NOPS (sub);
15731 subtype = TREE_TYPE (sub);
15732 if (!POINTER_TYPE_P (subtype)
15733 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15734 return NULL_TREE;
15736 if (TREE_CODE (sub) == ADDR_EXPR)
15738 tree op = TREE_OPERAND (sub, 0);
15739 tree optype = TREE_TYPE (op);
15741 /* *&CONST_DECL -> to the value of the const decl. */
15742 if (TREE_CODE (op) == CONST_DECL)
15743 return DECL_INITIAL (op);
15744 /* *&p => p; make sure to handle *&"str"[cst] here. */
15745 if (type == optype)
15747 tree fop = fold_read_from_constant_string (op);
15748 if (fop)
15749 return fop;
15750 else
15751 return op;
15753 /* *(foo *)&fooarray => fooarray[0] */
15754 else if (TREE_CODE (optype) == ARRAY_TYPE
15755 && type == TREE_TYPE (optype)
15756 && (!in_gimple_form
15757 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15759 tree type_domain = TYPE_DOMAIN (optype);
15760 tree min_val = size_zero_node;
15761 if (type_domain && TYPE_MIN_VALUE (type_domain))
15762 min_val = TYPE_MIN_VALUE (type_domain);
15763 if (in_gimple_form
15764 && TREE_CODE (min_val) != INTEGER_CST)
15765 return NULL_TREE;
15766 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15767 NULL_TREE, NULL_TREE);
15769 /* *(foo *)&complexfoo => __real__ complexfoo */
15770 else if (TREE_CODE (optype) == COMPLEX_TYPE
15771 && type == TREE_TYPE (optype))
15772 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15773 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15774 else if (VECTOR_TYPE_P (optype)
15775 && type == TREE_TYPE (optype))
15777 tree part_width = TYPE_SIZE (type);
15778 tree index = bitsize_int (0);
15779 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15780 index);
15784 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15785 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15787 tree op00 = TREE_OPERAND (sub, 0);
15788 tree op01 = TREE_OPERAND (sub, 1);
15790 STRIP_NOPS (op00);
15791 if (TREE_CODE (op00) == ADDR_EXPR)
15793 tree op00type;
15794 op00 = TREE_OPERAND (op00, 0);
15795 op00type = TREE_TYPE (op00);
15797 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15798 if (VECTOR_TYPE_P (op00type)
15799 && type == TREE_TYPE (op00type)
15800 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15801 but we want to treat offsets with MSB set as negative.
15802 For the code below negative offsets are invalid and
15803 TYPE_SIZE of the element is something unsigned, so
15804 check whether op01 fits into poly_int64, which implies
15805 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15806 then just use poly_uint64 because we want to treat the
15807 value as unsigned. */
15808 && tree_fits_poly_int64_p (op01))
15810 tree part_width = TYPE_SIZE (type);
15811 poly_uint64 max_offset
15812 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15813 * TYPE_VECTOR_SUBPARTS (op00type));
15814 if (known_lt (const_op01, max_offset))
15816 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15817 return fold_build3_loc (loc,
15818 BIT_FIELD_REF, type, op00,
15819 part_width, index);
15822 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15823 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15824 && type == TREE_TYPE (op00type))
15826 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15827 const_op01))
15828 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15830 /* ((foo *)&fooarray)[1] => fooarray[1] */
15831 else if (TREE_CODE (op00type) == ARRAY_TYPE
15832 && type == TREE_TYPE (op00type))
15834 tree type_domain = TYPE_DOMAIN (op00type);
15835 tree min_val = size_zero_node;
15836 if (type_domain && TYPE_MIN_VALUE (type_domain))
15837 min_val = TYPE_MIN_VALUE (type_domain);
15838 poly_uint64 type_size, index;
15839 if (poly_int_tree_p (min_val)
15840 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15841 && multiple_p (const_op01, type_size, &index))
15843 poly_offset_int off = index + wi::to_poly_offset (min_val);
15844 op01 = wide_int_to_tree (sizetype, off);
15845 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15846 NULL_TREE, NULL_TREE);
15852 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15853 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15854 && type == TREE_TYPE (TREE_TYPE (subtype))
15855 && (!in_gimple_form
15856 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15858 tree type_domain;
15859 tree min_val = size_zero_node;
15860 sub = build_fold_indirect_ref_loc (loc, sub);
15861 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15862 if (type_domain && TYPE_MIN_VALUE (type_domain))
15863 min_val = TYPE_MIN_VALUE (type_domain);
15864 if (in_gimple_form
15865 && TREE_CODE (min_val) != INTEGER_CST)
15866 return NULL_TREE;
15867 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15868 NULL_TREE);
15871 return NULL_TREE;
15874 /* Builds an expression for an indirection through T, simplifying some
15875 cases. */
15877 tree
15878 build_fold_indirect_ref_loc (location_t loc, tree t)
15880 tree type = TREE_TYPE (TREE_TYPE (t));
15881 tree sub = fold_indirect_ref_1 (loc, type, t);
15883 if (sub)
15884 return sub;
15886 return build1_loc (loc, INDIRECT_REF, type, t);
15889 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15891 tree
15892 fold_indirect_ref_loc (location_t loc, tree t)
15894 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15896 if (sub)
15897 return sub;
15898 else
15899 return t;
15902 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15903 whose result is ignored. The type of the returned tree need not be
15904 the same as the original expression. */
15906 tree
15907 fold_ignored_result (tree t)
15909 if (!TREE_SIDE_EFFECTS (t))
15910 return integer_zero_node;
15912 for (;;)
15913 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15915 case tcc_unary:
15916 t = TREE_OPERAND (t, 0);
15917 break;
15919 case tcc_binary:
15920 case tcc_comparison:
15921 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15922 t = TREE_OPERAND (t, 0);
15923 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15924 t = TREE_OPERAND (t, 1);
15925 else
15926 return t;
15927 break;
15929 case tcc_expression:
15930 switch (TREE_CODE (t))
15932 case COMPOUND_EXPR:
15933 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15934 return t;
15935 t = TREE_OPERAND (t, 0);
15936 break;
15938 case COND_EXPR:
15939 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15940 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15941 return t;
15942 t = TREE_OPERAND (t, 0);
15943 break;
15945 default:
15946 return t;
15948 break;
15950 default:
15951 return t;
15955 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15957 tree
15958 round_up_loc (location_t loc, tree value, unsigned int divisor)
15960 tree div = NULL_TREE;
15962 if (divisor == 1)
15963 return value;
15965 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15966 have to do anything. Only do this when we are not given a const,
15967 because in that case, this check is more expensive than just
15968 doing it. */
15969 if (TREE_CODE (value) != INTEGER_CST)
15971 div = build_int_cst (TREE_TYPE (value), divisor);
15973 if (multiple_of_p (TREE_TYPE (value), value, div))
15974 return value;
15977 /* If divisor is a power of two, simplify this to bit manipulation. */
15978 if (pow2_or_zerop (divisor))
15980 if (TREE_CODE (value) == INTEGER_CST)
15982 wide_int val = wi::to_wide (value);
15983 bool overflow_p;
15985 if ((val & (divisor - 1)) == 0)
15986 return value;
15988 overflow_p = TREE_OVERFLOW (value);
15989 val += divisor - 1;
15990 val &= (int) -divisor;
15991 if (val == 0)
15992 overflow_p = true;
15994 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15996 else
15998 tree t;
16000 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16001 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16002 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16003 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16006 else
16008 if (!div)
16009 div = build_int_cst (TREE_TYPE (value), divisor);
16010 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16011 value = size_binop_loc (loc, MULT_EXPR, value, div);
16014 return value;
16017 /* Likewise, but round down. */
16019 tree
16020 round_down_loc (location_t loc, tree value, int divisor)
16022 tree div = NULL_TREE;
16024 gcc_assert (divisor > 0);
16025 if (divisor == 1)
16026 return value;
16028 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16029 have to do anything. Only do this when we are not given a const,
16030 because in that case, this check is more expensive than just
16031 doing it. */
16032 if (TREE_CODE (value) != INTEGER_CST)
16034 div = build_int_cst (TREE_TYPE (value), divisor);
16036 if (multiple_of_p (TREE_TYPE (value), value, div))
16037 return value;
16040 /* If divisor is a power of two, simplify this to bit manipulation. */
16041 if (pow2_or_zerop (divisor))
16043 tree t;
16045 t = build_int_cst (TREE_TYPE (value), -divisor);
16046 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16048 else
16050 if (!div)
16051 div = build_int_cst (TREE_TYPE (value), divisor);
16052 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16053 value = size_binop_loc (loc, MULT_EXPR, value, div);
16056 return value;
16059 /* Returns the pointer to the base of the object addressed by EXP and
16060 extracts the information about the offset of the access, storing it
16061 to PBITPOS and POFFSET. */
16063 static tree
16064 split_address_to_core_and_offset (tree exp,
16065 poly_int64_pod *pbitpos, tree *poffset)
16067 tree core;
16068 machine_mode mode;
16069 int unsignedp, reversep, volatilep;
16070 poly_int64 bitsize;
16071 location_t loc = EXPR_LOCATION (exp);
16073 if (TREE_CODE (exp) == ADDR_EXPR)
16075 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16076 poffset, &mode, &unsignedp, &reversep,
16077 &volatilep);
16078 core = build_fold_addr_expr_loc (loc, core);
16080 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16082 core = TREE_OPERAND (exp, 0);
16083 STRIP_NOPS (core);
16084 *pbitpos = 0;
16085 *poffset = TREE_OPERAND (exp, 1);
16086 if (poly_int_tree_p (*poffset))
16088 poly_offset_int tem
16089 = wi::sext (wi::to_poly_offset (*poffset),
16090 TYPE_PRECISION (TREE_TYPE (*poffset)));
16091 tem <<= LOG2_BITS_PER_UNIT;
16092 if (tem.to_shwi (pbitpos))
16093 *poffset = NULL_TREE;
16096 else
16098 core = exp;
16099 *pbitpos = 0;
16100 *poffset = NULL_TREE;
16103 return core;
16106 /* Returns true if addresses of E1 and E2 differ by a constant, false
16107 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16109 bool
16110 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16112 tree core1, core2;
16113 poly_int64 bitpos1, bitpos2;
16114 tree toffset1, toffset2, tdiff, type;
16116 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16117 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16119 poly_int64 bytepos1, bytepos2;
16120 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16121 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16122 || !operand_equal_p (core1, core2, 0))
16123 return false;
16125 if (toffset1 && toffset2)
16127 type = TREE_TYPE (toffset1);
16128 if (type != TREE_TYPE (toffset2))
16129 toffset2 = fold_convert (type, toffset2);
16131 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16132 if (!cst_and_fits_in_hwi (tdiff))
16133 return false;
16135 *diff = int_cst_value (tdiff);
16137 else if (toffset1 || toffset2)
16139 /* If only one of the offsets is non-constant, the difference cannot
16140 be a constant. */
16141 return false;
16143 else
16144 *diff = 0;
16146 *diff += bytepos1 - bytepos2;
16147 return true;
16150 /* Return OFF converted to a pointer offset type suitable as offset for
16151 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16152 tree
16153 convert_to_ptrofftype_loc (location_t loc, tree off)
16155 if (ptrofftype_p (TREE_TYPE (off)))
16156 return off;
16157 return fold_convert_loc (loc, sizetype, off);
16160 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16161 tree
16162 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16164 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16165 ptr, convert_to_ptrofftype_loc (loc, off));
16168 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16169 tree
16170 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16172 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16173 ptr, size_int (off));
16176 /* Return a pointer to a NUL-terminated string containing the sequence
16177 of bytes corresponding to the representation of the object referred to
16178 by SRC (or a subsequence of such bytes within it if SRC is a reference
16179 to an initialized constant array plus some constant offset).
16180 Set *STRSIZE the number of bytes in the constant sequence including
16181 the terminating NUL byte. *STRSIZE is equal to sizeof(A) - OFFSET
16182 where A is the array that stores the constant sequence that SRC points
16183 to and OFFSET is the byte offset of SRC from the beginning of A. SRC
16184 need not point to a string or even an array of characters but may point
16185 to an object of any type. */
16187 const char *
16188 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16190 /* The offset into the array A storing the string, and A's byte size. */
16191 tree offset_node;
16192 tree mem_size;
16194 if (strsize)
16195 *strsize = 0;
16197 if (strsize)
16198 src = byte_representation (src, &offset_node, &mem_size, NULL);
16199 else
16200 src = string_constant (src, &offset_node, &mem_size, NULL);
16201 if (!src)
16202 return NULL;
16204 unsigned HOST_WIDE_INT offset = 0;
16205 if (offset_node != NULL_TREE)
16207 if (!tree_fits_uhwi_p (offset_node))
16208 return NULL;
16209 else
16210 offset = tree_to_uhwi (offset_node);
16213 if (!tree_fits_uhwi_p (mem_size))
16214 return NULL;
16216 /* ARRAY_SIZE is the byte size of the array the constant sequence
16217 is stored in and equal to sizeof A. INIT_BYTES is the number
16218 of bytes in the constant sequence used to initialize the array,
16219 including any embedded NULs as well as the terminating NUL (for
16220 strings), but not including any trailing zeros/NULs past
16221 the terminating one appended implicitly to a string literal to
16222 zero out the remainder of the array it's stored in. For example,
16223 given:
16224 const char a[7] = "abc\0d";
16225 n = strlen (a + 1);
16226 ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1. For a valid
16227 (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16228 is equal to strlen (A) + 1. */
16229 const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16230 unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16231 const char *string = TREE_STRING_POINTER (src);
16233 /* Ideally this would turn into a gcc_checking_assert over time. */
16234 if (init_bytes > array_size)
16235 init_bytes = array_size;
16237 if (init_bytes == 0 || offset >= array_size)
16238 return NULL;
16240 if (strsize)
16242 /* Compute and store the number of characters from the beginning
16243 of the substring at OFFSET to the end, including the terminating
16244 nul. Offsets past the initial length refer to null strings. */
16245 if (offset < init_bytes)
16246 *strsize = init_bytes - offset;
16247 else
16248 *strsize = 1;
16250 else
16252 tree eltype = TREE_TYPE (TREE_TYPE (src));
16253 /* Support only properly NUL-terminated single byte strings. */
16254 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16255 return NULL;
16256 if (string[init_bytes - 1] != '\0')
16257 return NULL;
16260 return offset < init_bytes ? string + offset : "";
16263 /* Return a pointer to a NUL-terminated string corresponding to
16264 the expression STR referencing a constant string, possibly
16265 involving a constant offset. Return null if STR either doesn't
16266 reference a constant string or if it involves a nonconstant
16267 offset. */
16269 const char *
16270 c_getstr (tree str)
16272 return getbyterep (str, NULL);
16275 /* Given a tree T, compute which bits in T may be nonzero. */
16277 wide_int
16278 tree_nonzero_bits (const_tree t)
16280 switch (TREE_CODE (t))
16282 case INTEGER_CST:
16283 return wi::to_wide (t);
16284 case SSA_NAME:
16285 return get_nonzero_bits (t);
16286 case NON_LVALUE_EXPR:
16287 case SAVE_EXPR:
16288 return tree_nonzero_bits (TREE_OPERAND (t, 0));
16289 case BIT_AND_EXPR:
16290 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16291 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16292 case BIT_IOR_EXPR:
16293 case BIT_XOR_EXPR:
16294 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16295 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16296 case COND_EXPR:
16297 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16298 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16299 CASE_CONVERT:
16300 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16301 TYPE_PRECISION (TREE_TYPE (t)),
16302 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16303 case PLUS_EXPR:
16304 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16306 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16307 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16308 if (wi::bit_and (nzbits1, nzbits2) == 0)
16309 return wi::bit_or (nzbits1, nzbits2);
16311 break;
16312 case LSHIFT_EXPR:
16313 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16315 tree type = TREE_TYPE (t);
16316 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16317 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16318 TYPE_PRECISION (type));
16319 return wi::neg_p (arg1)
16320 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16321 : wi::lshift (nzbits, arg1);
16323 break;
16324 case RSHIFT_EXPR:
16325 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16327 tree type = TREE_TYPE (t);
16328 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16329 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16330 TYPE_PRECISION (type));
16331 return wi::neg_p (arg1)
16332 ? wi::lshift (nzbits, -arg1)
16333 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16335 break;
16336 default:
16337 break;
16340 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16343 #if CHECKING_P
16345 namespace selftest {
16347 /* Helper functions for writing tests of folding trees. */
16349 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
16351 static void
16352 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16353 tree constant)
16355 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16358 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16359 wrapping WRAPPED_EXPR. */
16361 static void
16362 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16363 tree wrapped_expr)
16365 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16366 ASSERT_NE (wrapped_expr, result);
16367 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16368 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16371 /* Verify that various arithmetic binary operations are folded
16372 correctly. */
16374 static void
16375 test_arithmetic_folding ()
16377 tree type = integer_type_node;
16378 tree x = create_tmp_var_raw (type, "x");
16379 tree zero = build_zero_cst (type);
16380 tree one = build_int_cst (type, 1);
16382 /* Addition. */
16383 /* 1 <-- (0 + 1) */
16384 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16385 one);
16386 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16387 one);
16389 /* (nonlvalue)x <-- (x + 0) */
16390 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16393 /* Subtraction. */
16394 /* 0 <-- (x - x) */
16395 assert_binop_folds_to_const (x, MINUS_EXPR, x,
16396 zero);
16397 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16400 /* Multiplication. */
16401 /* 0 <-- (x * 0) */
16402 assert_binop_folds_to_const (x, MULT_EXPR, zero,
16403 zero);
16405 /* (nonlvalue)x <-- (x * 1) */
16406 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16410 /* Verify that various binary operations on vectors are folded
16411 correctly. */
16413 static void
16414 test_vector_folding ()
16416 tree inner_type = integer_type_node;
16417 tree type = build_vector_type (inner_type, 4);
16418 tree zero = build_zero_cst (type);
16419 tree one = build_one_cst (type);
16420 tree index = build_index_vector (type, 0, 1);
16422 /* Verify equality tests that return a scalar boolean result. */
16423 tree res_type = boolean_type_node;
16424 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16425 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16426 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16427 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16428 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16429 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16430 index, one)));
16431 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16432 index, index)));
16433 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16434 index, index)));
16437 /* Verify folding of VEC_DUPLICATE_EXPRs. */
16439 static void
16440 test_vec_duplicate_folding ()
16442 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16443 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16444 /* This will be 1 if VEC_MODE isn't a vector mode. */
16445 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16447 tree type = build_vector_type (ssizetype, nunits);
16448 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16449 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16450 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16453 /* Run all of the selftests within this file. */
16455 void
16456 fold_const_c_tests ()
16458 test_arithmetic_folding ();
16459 test_vector_folding ();
16460 test_vec_duplicate_folding ();
16463 } // namespace selftest
16465 #endif /* CHECKING_P */