testsuite: remove SPE tests.
[official-gcc.git] / gcc / fold-const.c
blob67a379f4f2b517df80f65f929c858dce7878c967
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
146 static location_t
147 expr_location_or (tree t, location_t loc)
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
168 return x;
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 widest_int quo;
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 return NULL_TREE;
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
196 static int fold_deferring_overflow_warnings;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
213 void
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 const char *warnmsg;
232 location_t locus;
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
248 if (!issue || warnmsg == NULL)
249 return;
251 if (gimple_no_warning_p (stmt))
252 return;
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
259 if (!issue_strict_overflow_warning (code))
260 return;
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL, 0);
278 /* Whether we are deferring overflow warnings. */
280 bool
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings > 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 if (fold_deferring_overflow_warnings > 0)
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
308 bool
309 negate_mathfn_p (combined_fn fn)
311 switch (fn)
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_ROUNDEVEN:
332 CASE_CFN_ROUNDEVEN_FN:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
346 default:
347 break;
349 return false;
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
355 bool
356 may_negate_without_overflow_p (const_tree t)
358 tree type;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
366 return !wi::only_sign_bit_p (wi::to_wide (t));
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
372 static bool
373 negate_expr_p (tree t)
375 tree type;
377 if (t == 0)
378 return false;
380 type = TREE_TYPE (t);
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 return true;
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
395 case FIXED_CST:
396 return true;
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 /* Steps don't prevent negation. */
416 unsigned int count = vector_cst_encoded_nelts (t);
417 for (unsigned int i = 0; i < count; ++i)
418 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (ANY_INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! ANY_INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 && (wi::popcount
462 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 break;
465 /* Fall through. */
467 case RDIV_EXPR:
468 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 return negate_expr_p (TREE_OPERAND (t, 1))
470 || negate_expr_p (TREE_OPERAND (t, 0));
471 break;
473 case TRUNC_DIV_EXPR:
474 case ROUND_DIV_EXPR:
475 case EXACT_DIV_EXPR:
476 if (TYPE_UNSIGNED (type))
477 break;
478 /* In general we can't negate A in A / B, because if A is INT_MIN and
479 B is not 1 we change the sign of the result. */
480 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 && negate_expr_p (TREE_OPERAND (t, 0)))
482 return true;
483 /* In general we can't negate B in A / B, because if A is INT_MIN and
484 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 and actually traps on some architectures. */
486 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 && ! integer_onep (TREE_OPERAND (t, 1))))
490 return negate_expr_p (TREE_OPERAND (t, 1));
491 break;
493 case NOP_EXPR:
494 /* Negate -((double)float) as (double)(-float). */
495 if (TREE_CODE (type) == REAL_TYPE)
497 tree tem = strip_float_extensions (t);
498 if (tem != t)
499 return negate_expr_p (tem);
501 break;
503 case CALL_EXPR:
504 /* Negate -f(x) as f(-x). */
505 if (negate_mathfn_p (get_call_combined_fn (t)))
506 return negate_expr_p (CALL_EXPR_ARG (t, 0));
507 break;
509 case RSHIFT_EXPR:
510 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
511 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
513 tree op1 = TREE_OPERAND (t, 1);
514 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 return true;
517 break;
519 default:
520 break;
522 return false;
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526 simplification is possible.
527 If negate_expr_p would return true for T, NULL_TREE will never be
528 returned. */
530 static tree
531 fold_negate_expr_1 (location_t loc, tree t)
533 tree type = TREE_TYPE (t);
534 tree tem;
536 switch (TREE_CODE (t))
538 /* Convert - (~A) to A + 1. */
539 case BIT_NOT_EXPR:
540 if (INTEGRAL_TYPE_P (type))
541 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 build_one_cst (type));
543 break;
545 case INTEGER_CST:
546 tem = fold_negate_const (t, type);
547 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 || (ANY_INTEGRAL_TYPE_P (type)
549 && !TYPE_OVERFLOW_TRAPS (type)
550 && TYPE_OVERFLOW_WRAPS (type))
551 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 return tem;
553 break;
555 case POLY_INT_CST:
556 case REAL_CST:
557 case FIXED_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case COMPLEX_CST:
563 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 if (rpart && ipart)
566 return build_complex (type, rpart, ipart);
568 break;
570 case VECTOR_CST:
572 tree_vector_builder elts;
573 elts.new_unary_operation (type, t, true);
574 unsigned int count = elts.encoded_nelts ();
575 for (unsigned int i = 0; i < count; ++i)
577 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 if (elt == NULL_TREE)
579 return NULL_TREE;
580 elts.quick_push (elt);
583 return elts.build ();
586 case COMPLEX_EXPR:
587 if (negate_expr_p (t))
588 return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 break;
593 case CONJ_EXPR:
594 if (negate_expr_p (t))
595 return fold_build1_loc (loc, CONJ_EXPR, type,
596 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 break;
599 case NEGATE_EXPR:
600 if (!TYPE_OVERFLOW_SANITIZED (type))
601 return TREE_OPERAND (t, 0);
602 break;
604 case PLUS_EXPR:
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 && !HONOR_SIGNED_ZEROS (element_mode (type)))
608 /* -(A + B) -> (-B) - A. */
609 if (negate_expr_p (TREE_OPERAND (t, 1)))
611 tem = negate_expr (TREE_OPERAND (t, 1));
612 return fold_build2_loc (loc, MINUS_EXPR, type,
613 tem, TREE_OPERAND (t, 0));
616 /* -(A + B) -> (-A) - B. */
617 if (negate_expr_p (TREE_OPERAND (t, 0)))
619 tem = negate_expr (TREE_OPERAND (t, 0));
620 return fold_build2_loc (loc, MINUS_EXPR, type,
621 tem, TREE_OPERAND (t, 1));
624 break;
626 case MINUS_EXPR:
627 /* - (A - B) -> B - A */
628 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632 break;
634 case MULT_EXPR:
635 if (TYPE_UNSIGNED (type))
636 break;
638 /* Fall through. */
640 case RDIV_EXPR:
641 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
643 tem = TREE_OPERAND (t, 1);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 TREE_OPERAND (t, 0), negate_expr (tem));
647 tem = TREE_OPERAND (t, 0);
648 if (negate_expr_p (tem))
649 return fold_build2_loc (loc, TREE_CODE (t), type,
650 negate_expr (tem), TREE_OPERAND (t, 1));
652 break;
654 case TRUNC_DIV_EXPR:
655 case ROUND_DIV_EXPR:
656 case EXACT_DIV_EXPR:
657 if (TYPE_UNSIGNED (type))
658 break;
659 /* In general we can't negate A in A / B, because if A is INT_MIN and
660 B is not 1 we change the sign of the result. */
661 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 && negate_expr_p (TREE_OPERAND (t, 0)))
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (TREE_OPERAND (t, 0)),
665 TREE_OPERAND (t, 1));
666 /* In general we can't negate B in A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. */
669 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 && ! integer_onep (TREE_OPERAND (t, 1))))
673 && negate_expr_p (TREE_OPERAND (t, 1)))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0),
676 negate_expr (TREE_OPERAND (t, 1)));
677 break;
679 case NOP_EXPR:
680 /* Convert -((double)float) into (double)(-float). */
681 if (TREE_CODE (type) == REAL_TYPE)
683 tem = strip_float_extensions (t);
684 if (tem != t && negate_expr_p (tem))
685 return fold_convert_loc (loc, type, negate_expr (tem));
687 break;
689 case CALL_EXPR:
690 /* Negate -f(x) as f(-x). */
691 if (negate_mathfn_p (get_call_combined_fn (t))
692 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
694 tree fndecl, arg;
696 fndecl = get_callee_fndecl (t);
697 arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 return build_call_expr_loc (loc, fndecl, 1, arg);
700 break;
702 case RSHIFT_EXPR:
703 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
704 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
706 tree op1 = TREE_OPERAND (t, 1);
707 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
709 tree ntype = TYPE_UNSIGNED (type)
710 ? signed_type_for (type)
711 : unsigned_type_for (type);
712 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 return fold_convert_loc (loc, type, temp);
717 break;
719 default:
720 break;
723 return NULL_TREE;
726 /* A wrapper for fold_negate_expr_1. */
728 static tree
729 fold_negate_expr (location_t loc, tree t)
731 tree type = TREE_TYPE (t);
732 STRIP_SIGN_NOPS (t);
733 tree tem = fold_negate_expr_1 (loc, t);
734 if (tem == NULL_TREE)
735 return NULL_TREE;
736 return fold_convert_loc (loc, type, tem);
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
741 return NULL_TREE. */
743 static tree
744 negate_expr (tree t)
746 tree type, tem;
747 location_t loc;
749 if (t == NULL_TREE)
750 return NULL_TREE;
752 loc = EXPR_LOCATION (t);
753 type = TREE_TYPE (t);
754 STRIP_SIGN_NOPS (t);
756 tem = fold_negate_expr (loc, t);
757 if (!tem)
758 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759 return fold_convert_loc (loc, type, tem);
762 /* Split a tree IN into a constant, literal and variable parts that could be
763 combined with CODE to make IN. "constant" means an expression with
764 TREE_CONSTANT but that isn't an actual constant. CODE must be a
765 commutative arithmetic operation. Store the constant part into *CONP,
766 the literal in *LITP and return the variable part. If a part isn't
767 present, set it to null. If the tree does not decompose in this way,
768 return the entire tree as the variable part and the other parts as null.
770 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
771 case, we negate an operand that was subtracted. Except if it is a
772 literal for which we use *MINUS_LITP instead.
774 If NEGATE_P is true, we are negating all of IN, again except a literal
775 for which we use *MINUS_LITP instead. If a variable part is of pointer
776 type, it is negated after converting to TYPE. This prevents us from
777 generating illegal MINUS pointer expression. LOC is the location of
778 the converted variable part.
780 If IN is itself a literal or constant, return it as appropriate.
782 Note that we do not guarantee that any of the three values will be the
783 same type as IN, but they will have the same signedness and mode. */
785 static tree
786 split_tree (tree in, tree type, enum tree_code code,
787 tree *minus_varp, tree *conp, tree *minus_conp,
788 tree *litp, tree *minus_litp, int negate_p)
790 tree var = 0;
791 *minus_varp = 0;
792 *conp = 0;
793 *minus_conp = 0;
794 *litp = 0;
795 *minus_litp = 0;
797 /* Strip any conversions that don't change the machine mode or signedness. */
798 STRIP_SIGN_NOPS (in);
800 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801 || TREE_CODE (in) == FIXED_CST)
802 *litp = in;
803 else if (TREE_CODE (in) == code
804 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 /* We can associate addition and subtraction together (even
807 though the C standard doesn't say so) for integers because
808 the value is not affected. For reals, the value might be
809 affected, so we can't. */
810 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 || (code == MINUS_EXPR
813 && (TREE_CODE (in) == PLUS_EXPR
814 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p && *conp)
847 *minus_conp = *conp, *conp = 0;
848 if (neg_var_p && var)
849 *minus_varp = var, var = 0;
851 else if (TREE_CONSTANT (in))
852 *conp = in;
853 else if (TREE_CODE (in) == BIT_NOT_EXPR
854 && code == PLUS_EXPR)
856 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
857 when IN is constant. */
858 *litp = build_minus_one_cst (type);
859 *minus_varp = TREE_OPERAND (in, 0);
861 else
862 var = in;
864 if (negate_p)
866 if (*litp)
867 *minus_litp = *litp, *litp = 0;
868 else if (*minus_litp)
869 *litp = *minus_litp, *minus_litp = 0;
870 if (*conp)
871 *minus_conp = *conp, *conp = 0;
872 else if (*minus_conp)
873 *conp = *minus_conp, *minus_conp = 0;
874 if (var)
875 *minus_varp = var, var = 0;
876 else if (*minus_varp)
877 var = *minus_varp, *minus_varp = 0;
880 if (*litp
881 && TREE_OVERFLOW_P (*litp))
882 *litp = drop_tree_overflow (*litp);
883 if (*minus_litp
884 && TREE_OVERFLOW_P (*minus_litp))
885 *minus_litp = drop_tree_overflow (*minus_litp);
887 return var;
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 if (t1 == 0)
900 gcc_assert (t2 == 0 || code != MINUS_EXPR);
901 return t2;
903 else if (t2 == 0)
904 return t1;
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
913 if (code == PLUS_EXPR)
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
928 else if (code == MINUS_EXPR)
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
953 switch (code)
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
961 default:
962 break;
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971 a new constant in RES. Return FALSE if we don't know how to
972 evaluate CODE at compile-time. */
974 bool
975 wide_int_binop (wide_int &res,
976 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 signop sign, wi::overflow_type *overflow)
979 wide_int tmp;
980 *overflow = wi::OVF_NONE;
981 switch (code)
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
999 tmp = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1005 else
1006 tmp = arg2;
1008 if (code == RSHIFT_EXPR)
1009 /* It's unclear from the C standard whether shifts can overflow.
1010 The following code ignores overflow; perhaps a C standard
1011 interpretation ruling is needed. */
1012 res = wi::rshift (arg1, tmp, sign);
1013 else
1014 res = wi::lshift (arg1, tmp);
1015 break;
1017 case RROTATE_EXPR:
1018 case LROTATE_EXPR:
1019 if (wi::neg_p (arg2))
1021 tmp = -arg2;
1022 if (code == RROTATE_EXPR)
1023 code = LROTATE_EXPR;
1024 else
1025 code = RROTATE_EXPR;
1027 else
1028 tmp = arg2;
1030 if (code == RROTATE_EXPR)
1031 res = wi::rrotate (arg1, tmp);
1032 else
1033 res = wi::lrotate (arg1, tmp);
1034 break;
1036 case PLUS_EXPR:
1037 res = wi::add (arg1, arg2, sign, overflow);
1038 break;
1040 case MINUS_EXPR:
1041 res = wi::sub (arg1, arg2, sign, overflow);
1042 break;
1044 case MULT_EXPR:
1045 res = wi::mul (arg1, arg2, sign, overflow);
1046 break;
1048 case MULT_HIGHPART_EXPR:
1049 res = wi::mul_high (arg1, arg2, sign);
1050 break;
1052 case TRUNC_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (arg2 == 0)
1055 return false;
1056 res = wi::div_trunc (arg1, arg2, sign, overflow);
1057 break;
1059 case FLOOR_DIV_EXPR:
1060 if (arg2 == 0)
1061 return false;
1062 res = wi::div_floor (arg1, arg2, sign, overflow);
1063 break;
1065 case CEIL_DIV_EXPR:
1066 if (arg2 == 0)
1067 return false;
1068 res = wi::div_ceil (arg1, arg2, sign, overflow);
1069 break;
1071 case ROUND_DIV_EXPR:
1072 if (arg2 == 0)
1073 return false;
1074 res = wi::div_round (arg1, arg2, sign, overflow);
1075 break;
1077 case TRUNC_MOD_EXPR:
1078 if (arg2 == 0)
1079 return false;
1080 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081 break;
1083 case FLOOR_MOD_EXPR:
1084 if (arg2 == 0)
1085 return false;
1086 res = wi::mod_floor (arg1, arg2, sign, overflow);
1087 break;
1089 case CEIL_MOD_EXPR:
1090 if (arg2 == 0)
1091 return false;
1092 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093 break;
1095 case ROUND_MOD_EXPR:
1096 if (arg2 == 0)
1097 return false;
1098 res = wi::mod_round (arg1, arg2, sign, overflow);
1099 break;
1101 case MIN_EXPR:
1102 res = wi::min (arg1, arg2, sign);
1103 break;
1105 case MAX_EXPR:
1106 res = wi::max (arg1, arg2, sign);
1107 break;
1109 default:
1110 return false;
1112 return true;
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116 produce a new constant in RES. Return FALSE if we don't know how
1117 to evaluate CODE at compile-time. */
1119 static bool
1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 const_tree arg1, const_tree arg2,
1122 signop sign, wi::overflow_type *overflow)
1124 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126 switch (code)
1128 case PLUS_EXPR:
1129 res = wi::add (wi::to_poly_wide (arg1),
1130 wi::to_poly_wide (arg2), sign, overflow);
1131 break;
1133 case MINUS_EXPR:
1134 res = wi::sub (wi::to_poly_wide (arg1),
1135 wi::to_poly_wide (arg2), sign, overflow);
1136 break;
1138 case MULT_EXPR:
1139 if (TREE_CODE (arg2) == INTEGER_CST)
1140 res = wi::mul (wi::to_poly_wide (arg1),
1141 wi::to_wide (arg2), sign, overflow);
1142 else if (TREE_CODE (arg1) == INTEGER_CST)
1143 res = wi::mul (wi::to_poly_wide (arg2),
1144 wi::to_wide (arg1), sign, overflow);
1145 else
1146 return NULL_TREE;
1147 break;
1149 case LSHIFT_EXPR:
1150 if (TREE_CODE (arg2) == INTEGER_CST)
1151 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152 else
1153 return false;
1154 break;
1156 case BIT_IOR_EXPR:
1157 if (TREE_CODE (arg2) != INTEGER_CST
1158 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 &res))
1160 return false;
1161 break;
1163 default:
1164 return false;
1166 return true;
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170 produce a new constant. Return NULL_TREE if we don't know how to
1171 evaluate CODE at compile-time. */
1173 tree
1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 int overflowable)
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 return NULL_TREE;
1188 poly_res = res;
1190 else if (!poly_int_tree_p (arg1)
1191 || !poly_int_tree_p (arg2)
1192 || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193 return NULL_TREE;
1194 return force_fit_type (type, poly_res, overflowable,
1195 (((sign == SIGNED || overflowable == -1)
1196 && overflow)
1197 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1200 /* Return true if binary operation OP distributes over addition in operand
1201 OPNO, with the other operand being held constant. OPNO counts from 1. */
1203 static bool
1204 distributes_over_addition_p (tree_code op, int opno)
1206 switch (op)
1208 case PLUS_EXPR:
1209 case MINUS_EXPR:
1210 case MULT_EXPR:
1211 return true;
1213 case LSHIFT_EXPR:
1214 return opno == 1;
1216 default:
1217 return false;
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222 constant. We assume ARG1 and ARG2 have the same data type, or at least
1223 are the same kind of constant and the same machine mode. Return zero if
1224 combining the constants is not allowed in the current operating mode. */
1226 static tree
1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1229 /* Sanity check for the recursive cases. */
1230 if (!arg1 || !arg2)
1231 return NULL_TREE;
1233 STRIP_NOPS (arg1);
1234 STRIP_NOPS (arg2);
1236 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1238 if (code == POINTER_PLUS_EXPR)
1239 return int_const_binop (PLUS_EXPR,
1240 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1242 return int_const_binop (code, arg1, arg2);
1245 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1247 machine_mode mode;
1248 REAL_VALUE_TYPE d1;
1249 REAL_VALUE_TYPE d2;
1250 REAL_VALUE_TYPE value;
1251 REAL_VALUE_TYPE result;
1252 bool inexact;
1253 tree t, type;
1255 /* The following codes are handled by real_arithmetic. */
1256 switch (code)
1258 case PLUS_EXPR:
1259 case MINUS_EXPR:
1260 case MULT_EXPR:
1261 case RDIV_EXPR:
1262 case MIN_EXPR:
1263 case MAX_EXPR:
1264 break;
1266 default:
1267 return NULL_TREE;
1270 d1 = TREE_REAL_CST (arg1);
1271 d2 = TREE_REAL_CST (arg2);
1273 type = TREE_TYPE (arg1);
1274 mode = TYPE_MODE (type);
1276 /* Don't perform operation if we honor signaling NaNs and
1277 either operand is a signaling NaN. */
1278 if (HONOR_SNANS (mode)
1279 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 return NULL_TREE;
1283 /* Don't perform operation if it would raise a division
1284 by zero exception. */
1285 if (code == RDIV_EXPR
1286 && real_equal (&d2, &dconst0)
1287 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 return NULL_TREE;
1290 /* If either operand is a NaN, just return it. Otherwise, set up
1291 for floating-point trap; we return an overflow. */
1292 if (REAL_VALUE_ISNAN (d1))
1294 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 is off. */
1296 d1.signalling = 0;
1297 t = build_real (type, d1);
1298 return t;
1300 else if (REAL_VALUE_ISNAN (d2))
1302 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 is off. */
1304 d2.signalling = 0;
1305 t = build_real (type, d2);
1306 return t;
1309 inexact = real_arithmetic (&value, code, &d1, &d2);
1310 real_convert (&result, mode, &value);
1312 /* Don't constant fold this floating point operation if
1313 the result has overflowed and flag_trapping_math. */
1314 if (flag_trapping_math
1315 && MODE_HAS_INFINITIES (mode)
1316 && REAL_VALUE_ISINF (result)
1317 && !REAL_VALUE_ISINF (d1)
1318 && !REAL_VALUE_ISINF (d2))
1319 return NULL_TREE;
1321 /* Don't constant fold this floating point operation if the
1322 result may dependent upon the run-time rounding mode and
1323 flag_rounding_math is set, or if GCC's software emulation
1324 is unable to accurately represent the result. */
1325 if ((flag_rounding_math
1326 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 && (inexact || !real_identical (&result, &value)))
1328 return NULL_TREE;
1330 t = build_real (type, result);
1332 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333 return t;
1336 if (TREE_CODE (arg1) == FIXED_CST)
1338 FIXED_VALUE_TYPE f1;
1339 FIXED_VALUE_TYPE f2;
1340 FIXED_VALUE_TYPE result;
1341 tree t, type;
1342 int sat_p;
1343 bool overflow_p;
1345 /* The following codes are handled by fixed_arithmetic. */
1346 switch (code)
1348 case PLUS_EXPR:
1349 case MINUS_EXPR:
1350 case MULT_EXPR:
1351 case TRUNC_DIV_EXPR:
1352 if (TREE_CODE (arg2) != FIXED_CST)
1353 return NULL_TREE;
1354 f2 = TREE_FIXED_CST (arg2);
1355 break;
1357 case LSHIFT_EXPR:
1358 case RSHIFT_EXPR:
1360 if (TREE_CODE (arg2) != INTEGER_CST)
1361 return NULL_TREE;
1362 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 f2.data.high = w2.elt (1);
1364 f2.data.low = w2.ulow ();
1365 f2.mode = SImode;
1367 break;
1369 default:
1370 return NULL_TREE;
1373 f1 = TREE_FIXED_CST (arg1);
1374 type = TREE_TYPE (arg1);
1375 sat_p = TYPE_SATURATING (type);
1376 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377 t = build_fixed (type, result);
1378 /* Propagate overflow flags. */
1379 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 TREE_OVERFLOW (t) = 1;
1381 return t;
1384 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1386 tree type = TREE_TYPE (arg1);
1387 tree r1 = TREE_REALPART (arg1);
1388 tree i1 = TREE_IMAGPART (arg1);
1389 tree r2 = TREE_REALPART (arg2);
1390 tree i2 = TREE_IMAGPART (arg2);
1391 tree real, imag;
1393 switch (code)
1395 case PLUS_EXPR:
1396 case MINUS_EXPR:
1397 real = const_binop (code, r1, r2);
1398 imag = const_binop (code, i1, i2);
1399 break;
1401 case MULT_EXPR:
1402 if (COMPLEX_FLOAT_TYPE_P (type))
1403 return do_mpc_arg2 (arg1, arg2, type,
1404 /* do_nonfinite= */ folding_initializer,
1405 mpc_mul);
1407 real = const_binop (MINUS_EXPR,
1408 const_binop (MULT_EXPR, r1, r2),
1409 const_binop (MULT_EXPR, i1, i2));
1410 imag = const_binop (PLUS_EXPR,
1411 const_binop (MULT_EXPR, r1, i2),
1412 const_binop (MULT_EXPR, i1, r2));
1413 break;
1415 case RDIV_EXPR:
1416 if (COMPLEX_FLOAT_TYPE_P (type))
1417 return do_mpc_arg2 (arg1, arg2, type,
1418 /* do_nonfinite= */ folding_initializer,
1419 mpc_div);
1420 /* Fallthru. */
1421 case TRUNC_DIV_EXPR:
1422 case CEIL_DIV_EXPR:
1423 case FLOOR_DIV_EXPR:
1424 case ROUND_DIV_EXPR:
1425 if (flag_complex_method == 0)
1427 /* Keep this algorithm in sync with
1428 tree-complex.c:expand_complex_div_straight().
1430 Expand complex division to scalars, straightforward algorithm.
1431 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 t = br*br + bi*bi
1434 tree magsquared
1435 = const_binop (PLUS_EXPR,
1436 const_binop (MULT_EXPR, r2, r2),
1437 const_binop (MULT_EXPR, i2, i2));
1438 tree t1
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r1, r2),
1441 const_binop (MULT_EXPR, i1, i2));
1442 tree t2
1443 = const_binop (MINUS_EXPR,
1444 const_binop (MULT_EXPR, i1, r2),
1445 const_binop (MULT_EXPR, r1, i2));
1447 real = const_binop (code, t1, magsquared);
1448 imag = const_binop (code, t2, magsquared);
1450 else
1452 /* Keep this algorithm in sync with
1453 tree-complex.c:expand_complex_div_wide().
1455 Expand complex division to scalars, modified algorithm to minimize
1456 overflow with wide input ranges. */
1457 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 fold_abs_const (r2, TREE_TYPE (type)),
1459 fold_abs_const (i2, TREE_TYPE (type)));
1461 if (integer_nonzerop (compare))
1463 /* In the TRUE branch, we compute
1464 ratio = br/bi;
1465 div = (br * ratio) + bi;
1466 tr = (ar * ratio) + ai;
1467 ti = (ai * ratio) - ar;
1468 tr = tr / div;
1469 ti = ti / div; */
1470 tree ratio = const_binop (code, r2, i2);
1471 tree div = const_binop (PLUS_EXPR, i2,
1472 const_binop (MULT_EXPR, r2, ratio));
1473 real = const_binop (MULT_EXPR, r1, ratio);
1474 real = const_binop (PLUS_EXPR, real, i1);
1475 real = const_binop (code, real, div);
1477 imag = const_binop (MULT_EXPR, i1, ratio);
1478 imag = const_binop (MINUS_EXPR, imag, r1);
1479 imag = const_binop (code, imag, div);
1481 else
1483 /* In the FALSE branch, we compute
1484 ratio = d/c;
1485 divisor = (d * ratio) + c;
1486 tr = (b * ratio) + a;
1487 ti = b - (a * ratio);
1488 tr = tr / div;
1489 ti = ti / div; */
1490 tree ratio = const_binop (code, i2, r2);
1491 tree div = const_binop (PLUS_EXPR, r2,
1492 const_binop (MULT_EXPR, i2, ratio));
1494 real = const_binop (MULT_EXPR, i1, ratio);
1495 real = const_binop (PLUS_EXPR, real, r1);
1496 real = const_binop (code, real, div);
1498 imag = const_binop (MULT_EXPR, r1, ratio);
1499 imag = const_binop (MINUS_EXPR, i1, imag);
1500 imag = const_binop (code, imag, div);
1503 break;
1505 default:
1506 return NULL_TREE;
1509 if (real && imag)
1510 return build_complex (type, real, imag);
1513 if (TREE_CODE (arg1) == VECTOR_CST
1514 && TREE_CODE (arg2) == VECTOR_CST
1515 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1518 tree type = TREE_TYPE (arg1);
1519 bool step_ok_p;
1520 if (VECTOR_CST_STEPPED_P (arg1)
1521 && VECTOR_CST_STEPPED_P (arg2))
1522 /* We can operate directly on the encoding if:
1524 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 implies
1526 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1528 Addition and subtraction are the supported operators
1529 for which this is true. */
1530 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531 else if (VECTOR_CST_STEPPED_P (arg1))
1532 /* We can operate directly on stepped encodings if:
1534 a3 - a2 == a2 - a1
1535 implies:
1536 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1538 which is true if (x -> x op c) distributes over addition. */
1539 step_ok_p = distributes_over_addition_p (code, 1);
1540 else
1541 /* Similarly in reverse. */
1542 step_ok_p = distributes_over_addition_p (code, 2);
1543 tree_vector_builder elts;
1544 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 return NULL_TREE;
1546 unsigned int count = elts.encoded_nelts ();
1547 for (unsigned int i = 0; i < count; ++i)
1549 tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 tree elem2 = VECTOR_CST_ELT (arg2, i);
1552 tree elt = const_binop (code, elem1, elem2);
1554 /* It is possible that const_binop cannot handle the given
1555 code and return NULL_TREE */
1556 if (elt == NULL_TREE)
1557 return NULL_TREE;
1558 elts.quick_push (elt);
1561 return elts.build ();
1564 /* Shifts allow a scalar offset for a vector. */
1565 if (TREE_CODE (arg1) == VECTOR_CST
1566 && TREE_CODE (arg2) == INTEGER_CST)
1568 tree type = TREE_TYPE (arg1);
1569 bool step_ok_p = distributes_over_addition_p (code, 1);
1570 tree_vector_builder elts;
1571 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 return NULL_TREE;
1573 unsigned int count = elts.encoded_nelts ();
1574 for (unsigned int i = 0; i < count; ++i)
1576 tree elem1 = VECTOR_CST_ELT (arg1, i);
1578 tree elt = const_binop (code, elem1, arg2);
1580 /* It is possible that const_binop cannot handle the given
1581 code and return NULL_TREE. */
1582 if (elt == NULL_TREE)
1583 return NULL_TREE;
1584 elts.quick_push (elt);
1587 return elts.build ();
1589 return NULL_TREE;
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593 to fold_relational_const. */
1595 tree
1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1598 if (TREE_CODE_CLASS (code) == tcc_comparison)
1599 return fold_relational_const (code, type, arg1, arg2);
1601 /* ??? Until we make the const_binop worker take the type of the
1602 result as argument put those cases that need it here. */
1603 switch (code)
1605 case VEC_SERIES_EXPR:
1606 if (CONSTANT_CLASS_P (arg1)
1607 && CONSTANT_CLASS_P (arg2))
1608 return build_vec_series (type, arg1, arg2);
1609 return NULL_TREE;
1611 case COMPLEX_EXPR:
1612 if ((TREE_CODE (arg1) == REAL_CST
1613 && TREE_CODE (arg2) == REAL_CST)
1614 || (TREE_CODE (arg1) == INTEGER_CST
1615 && TREE_CODE (arg2) == INTEGER_CST))
1616 return build_complex (type, arg1, arg2);
1617 return NULL_TREE;
1619 case POINTER_DIFF_EXPR:
1620 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1622 poly_offset_int res = (wi::to_poly_offset (arg1)
1623 - wi::to_poly_offset (arg2));
1624 return force_fit_type (type, res, 1,
1625 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1627 return NULL_TREE;
1629 case VEC_PACK_TRUNC_EXPR:
1630 case VEC_PACK_FIX_TRUNC_EXPR:
1631 case VEC_PACK_FLOAT_EXPR:
1633 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1635 if (TREE_CODE (arg1) != VECTOR_CST
1636 || TREE_CODE (arg2) != VECTOR_CST)
1637 return NULL_TREE;
1639 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 return NULL_TREE;
1642 out_nelts = in_nelts * 2;
1643 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1646 tree_vector_builder elts (type, out_nelts, 1);
1647 for (i = 0; i < out_nelts; i++)
1649 tree elt = (i < in_nelts
1650 ? VECTOR_CST_ELT (arg1, i)
1651 : VECTOR_CST_ELT (arg2, i - in_nelts));
1652 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 ? NOP_EXPR
1654 : code == VEC_PACK_FLOAT_EXPR
1655 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 TREE_TYPE (type), elt);
1657 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 return NULL_TREE;
1659 elts.quick_push (elt);
1662 return elts.build ();
1665 case VEC_WIDEN_MULT_LO_EXPR:
1666 case VEC_WIDEN_MULT_HI_EXPR:
1667 case VEC_WIDEN_MULT_EVEN_EXPR:
1668 case VEC_WIDEN_MULT_ODD_EXPR:
1670 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1672 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 return NULL_TREE;
1675 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 return NULL_TREE;
1677 out_nelts = in_nelts / 2;
1678 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1681 if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 scale = 1, ofs = 0;
1687 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 scale = 1, ofs = 1;
1690 tree_vector_builder elts (type, out_nelts, 1);
1691 for (out = 0; out < out_nelts; out++)
1693 unsigned int in = (out << scale) + ofs;
1694 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 VECTOR_CST_ELT (arg1, in));
1696 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 VECTOR_CST_ELT (arg2, in));
1699 if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 return NULL_TREE;
1701 tree elt = const_binop (MULT_EXPR, t1, t2);
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1707 return elts.build ();
1710 default:;
1713 if (TREE_CODE_CLASS (code) != tcc_binary)
1714 return NULL_TREE;
1716 /* Make sure type and arg0 have the same saturating flag. */
1717 gcc_checking_assert (TYPE_SATURATING (type)
1718 == TYPE_SATURATING (TREE_TYPE (arg1)));
1720 return const_binop (code, arg1, arg2);
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724 Return zero if computing the constants is not possible. */
1726 tree
1727 const_unop (enum tree_code code, tree type, tree arg0)
1729 /* Don't perform the operation, other than NEGATE and ABS, if
1730 flag_signaling_nans is on and the operand is a signaling NaN. */
1731 if (TREE_CODE (arg0) == REAL_CST
1732 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734 && code != NEGATE_EXPR
1735 && code != ABS_EXPR
1736 && code != ABSU_EXPR)
1737 return NULL_TREE;
1739 switch (code)
1741 CASE_CONVERT:
1742 case FLOAT_EXPR:
1743 case FIX_TRUNC_EXPR:
1744 case FIXED_CONVERT_EXPR:
1745 return fold_convert_const (code, type, arg0);
1747 case ADDR_SPACE_CONVERT_EXPR:
1748 /* If the source address is 0, and the source address space
1749 cannot have a valid object at 0, fold to dest type null. */
1750 if (integer_zerop (arg0)
1751 && !(targetm.addr_space.zero_address_valid
1752 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 return fold_convert_const (code, type, arg0);
1754 break;
1756 case VIEW_CONVERT_EXPR:
1757 return fold_view_convert_expr (type, arg0);
1759 case NEGATE_EXPR:
1761 /* Can't call fold_negate_const directly here as that doesn't
1762 handle all cases and we might not be able to negate some
1763 constants. */
1764 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 if (tem && CONSTANT_CLASS_P (tem))
1766 return tem;
1767 break;
1770 case ABS_EXPR:
1771 case ABSU_EXPR:
1772 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 return fold_abs_const (arg0, type);
1774 break;
1776 case CONJ_EXPR:
1777 if (TREE_CODE (arg0) == COMPLEX_CST)
1779 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 TREE_TYPE (type));
1781 return build_complex (type, TREE_REALPART (arg0), ipart);
1783 break;
1785 case BIT_NOT_EXPR:
1786 if (TREE_CODE (arg0) == INTEGER_CST)
1787 return fold_not_const (arg0, type);
1788 else if (POLY_INT_CST_P (arg0))
1789 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790 /* Perform BIT_NOT_EXPR on each element individually. */
1791 else if (TREE_CODE (arg0) == VECTOR_CST)
1793 tree elem;
1795 /* This can cope with stepped encodings because ~x == -1 - x. */
1796 tree_vector_builder elements;
1797 elements.new_unary_operation (type, arg0, true);
1798 unsigned int i, count = elements.encoded_nelts ();
1799 for (i = 0; i < count; ++i)
1801 elem = VECTOR_CST_ELT (arg0, i);
1802 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 if (elem == NULL_TREE)
1804 break;
1805 elements.quick_push (elem);
1807 if (i == count)
1808 return elements.build ();
1810 break;
1812 case TRUTH_NOT_EXPR:
1813 if (TREE_CODE (arg0) == INTEGER_CST)
1814 return constant_boolean_node (integer_zerop (arg0), type);
1815 break;
1817 case REALPART_EXPR:
1818 if (TREE_CODE (arg0) == COMPLEX_CST)
1819 return fold_convert (type, TREE_REALPART (arg0));
1820 break;
1822 case IMAGPART_EXPR:
1823 if (TREE_CODE (arg0) == COMPLEX_CST)
1824 return fold_convert (type, TREE_IMAGPART (arg0));
1825 break;
1827 case VEC_UNPACK_LO_EXPR:
1828 case VEC_UNPACK_HI_EXPR:
1829 case VEC_UNPACK_FLOAT_LO_EXPR:
1830 case VEC_UNPACK_FLOAT_HI_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1834 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 enum tree_code subcode;
1837 if (TREE_CODE (arg0) != VECTOR_CST)
1838 return NULL_TREE;
1840 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 return NULL_TREE;
1842 out_nelts = in_nelts / 2;
1843 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1845 unsigned int offset = 0;
1846 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 offset = out_nelts;
1851 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 subcode = NOP_EXPR;
1853 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 subcode = FLOAT_EXPR;
1856 else
1857 subcode = FIX_TRUNC_EXPR;
1859 tree_vector_builder elts (type, out_nelts, 1);
1860 for (i = 0; i < out_nelts; i++)
1862 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 VECTOR_CST_ELT (arg0, i + offset));
1864 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 return NULL_TREE;
1866 elts.quick_push (elt);
1869 return elts.build ();
1872 case VEC_DUPLICATE_EXPR:
1873 if (CONSTANT_CLASS_P (arg0))
1874 return build_vector_from_val (type, arg0);
1875 return NULL_TREE;
1877 default:
1878 break;
1881 return NULL_TREE;
1884 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1885 indicates which particular sizetype to create. */
1887 tree
1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1890 return build_int_cst (sizetype_tab[(int) kind], number);
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1894 is a tree code. The type of the result is taken from the operands.
1895 Both must be equivalent integer types, ala int_binop_types_match_p.
1896 If the operands are constant, so is the result. */
1898 tree
1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1901 tree type = TREE_TYPE (arg0);
1903 if (arg0 == error_mark_node || arg1 == error_mark_node)
1904 return error_mark_node;
1906 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907 TREE_TYPE (arg1)));
1909 /* Handle the special case of two poly_int constants faster. */
1910 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1912 /* And some specific cases even faster than that. */
1913 if (code == PLUS_EXPR)
1915 if (integer_zerop (arg0)
1916 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 return arg1;
1918 if (integer_zerop (arg1)
1919 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 return arg0;
1922 else if (code == MINUS_EXPR)
1924 if (integer_zerop (arg1)
1925 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 return arg0;
1928 else if (code == MULT_EXPR)
1930 if (integer_onep (arg0)
1931 && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 return arg1;
1935 /* Handle general case of two integer constants. For sizetype
1936 constant calculations we always want to know about overflow,
1937 even in the unsigned case. */
1938 tree res = int_const_binop (code, arg0, arg1, -1);
1939 if (res != NULL_TREE)
1940 return res;
1943 return fold_build2_loc (loc, code, type, arg0, arg1);
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947 compute the difference between the two values. Return the value
1948 in signed type corresponding to the type of the operands. */
1950 tree
1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1953 tree type = TREE_TYPE (arg0);
1954 tree ctype;
1956 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 TREE_TYPE (arg1)));
1959 /* If the type is already signed, just do the simple thing. */
1960 if (!TYPE_UNSIGNED (type))
1961 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1963 if (type == sizetype)
1964 ctype = ssizetype;
1965 else if (type == bitsizetype)
1966 ctype = sbitsizetype;
1967 else
1968 ctype = signed_type_for (type);
1970 /* If either operand is not a constant, do the conversions to the signed
1971 type and subtract. The hardware will do the right thing with any
1972 overflow in the subtraction. */
1973 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974 return size_binop_loc (loc, MINUS_EXPR,
1975 fold_convert_loc (loc, ctype, arg0),
1976 fold_convert_loc (loc, ctype, arg1));
1978 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980 overflow) and negate (which can't either). Special-case a result
1981 of zero while we're here. */
1982 if (tree_int_cst_equal (arg0, arg1))
1983 return build_int_cst (ctype, 0);
1984 else if (tree_int_cst_lt (arg1, arg0))
1985 return fold_convert_loc (loc, ctype,
1986 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987 else
1988 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 fold_convert_loc (loc, ctype,
1990 size_binop_loc (loc,
1991 MINUS_EXPR,
1992 arg1, arg0)));
1995 /* A subroutine of fold_convert_const handling conversions of an
1996 INTEGER_CST to another integer type. */
1998 static tree
1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2001 /* Given an integer constant, make new constant with new type,
2002 appropriately sign-extended or truncated. Use widest_int
2003 so that any extension is done according ARG1's type. */
2004 return force_fit_type (type, wi::to_widest (arg1),
2005 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 TREE_OVERFLOW (arg1));
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010 to an integer type. */
2012 static tree
2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2015 bool overflow = false;
2016 tree t;
2018 /* The following code implements the floating point to integer
2019 conversion rules required by the Java Language Specification,
2020 that IEEE NaNs are mapped to zero and values that overflow
2021 the target precision saturate, i.e. values greater than
2022 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023 are mapped to INT_MIN. These semantics are allowed by the
2024 C and C++ standards that simply state that the behavior of
2025 FP-to-integer conversion is unspecified upon overflow. */
2027 wide_int val;
2028 REAL_VALUE_TYPE r;
2029 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2031 switch (code)
2033 case FIX_TRUNC_EXPR:
2034 real_trunc (&r, VOIDmode, &x);
2035 break;
2037 default:
2038 gcc_unreachable ();
2041 /* If R is NaN, return zero and show we have an overflow. */
2042 if (REAL_VALUE_ISNAN (r))
2044 overflow = true;
2045 val = wi::zero (TYPE_PRECISION (type));
2048 /* See if R is less than the lower bound or greater than the
2049 upper bound. */
2051 if (! overflow)
2053 tree lt = TYPE_MIN_VALUE (type);
2054 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055 if (real_less (&r, &l))
2057 overflow = true;
2058 val = wi::to_wide (lt);
2062 if (! overflow)
2064 tree ut = TYPE_MAX_VALUE (type);
2065 if (ut)
2067 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 if (real_less (&u, &r))
2070 overflow = true;
2071 val = wi::to_wide (ut);
2076 if (! overflow)
2077 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2079 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080 return t;
2083 /* A subroutine of fold_convert_const handling conversions of a
2084 FIXED_CST to an integer type. */
2086 static tree
2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2089 tree t;
2090 double_int temp, temp_trunc;
2091 scalar_mode mode;
2093 /* Right shift FIXED_CST to temp by fbit. */
2094 temp = TREE_FIXED_CST (arg1).data;
2095 mode = TREE_FIXED_CST (arg1).mode;
2096 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2098 temp = temp.rshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2102 /* Left shift temp to temp_trunc by fbit. */
2103 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 HOST_BITS_PER_DOUBLE_INT,
2105 SIGNED_FIXED_POINT_MODE_P (mode));
2107 else
2109 temp = double_int_zero;
2110 temp_trunc = double_int_zero;
2113 /* If FIXED_CST is negative, we need to round the value toward 0.
2114 By checking if the fractional bits are not zero to add 1 to temp. */
2115 if (SIGNED_FIXED_POINT_MODE_P (mode)
2116 && temp_trunc.is_negative ()
2117 && TREE_FIXED_CST (arg1).data != temp_trunc)
2118 temp += double_int_one;
2120 /* Given a fixed-point constant, make new constant with new type,
2121 appropriately sign-extended or truncated. */
2122 t = force_fit_type (type, temp, -1,
2123 (temp.is_negative ()
2124 && (TYPE_UNSIGNED (type)
2125 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 | TREE_OVERFLOW (arg1));
2128 return t;
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132 to another floating point type. */
2134 static tree
2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2137 REAL_VALUE_TYPE value;
2138 tree t;
2140 /* Don't perform the operation if flag_signaling_nans is on
2141 and the operand is a signaling NaN. */
2142 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144 return NULL_TREE;
2146 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147 t = build_real (type, value);
2149 /* If converting an infinity or NAN to a representation that doesn't
2150 have one, set the overflow bit so that we can produce some kind of
2151 error message at the appropriate point if necessary. It's not the
2152 most user-friendly message, but it's better than nothing. */
2153 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155 TREE_OVERFLOW (t) = 1;
2156 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 && !MODE_HAS_NANS (TYPE_MODE (type)))
2158 TREE_OVERFLOW (t) = 1;
2159 /* Regular overflow, conversion produced an infinity in a mode that
2160 can't represent them. */
2161 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 && REAL_VALUE_ISINF (value)
2163 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164 TREE_OVERFLOW (t) = 1;
2165 else
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167 return t;
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171 to a floating point type. */
2173 static tree
2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2176 REAL_VALUE_TYPE value;
2177 tree t;
2179 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 &TREE_FIXED_CST (arg1));
2181 t = build_real (type, value);
2183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184 return t;
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188 to another fixed-point type. */
2190 static tree
2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2193 FIXED_VALUE_TYPE value;
2194 tree t;
2195 bool overflow_p;
2197 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199 t = build_fixed (type, value);
2201 /* Propagate overflow flags. */
2202 if (overflow_p | TREE_OVERFLOW (arg1))
2203 TREE_OVERFLOW (t) = 1;
2204 return t;
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208 to a fixed-point type. */
2210 static tree
2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2213 FIXED_VALUE_TYPE value;
2214 tree t;
2215 bool overflow_p;
2216 double_int di;
2218 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2220 di.low = TREE_INT_CST_ELT (arg1, 0);
2221 if (TREE_INT_CST_NUNITS (arg1) == 1)
2222 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223 else
2224 di.high = TREE_INT_CST_ELT (arg1, 1);
2226 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 TYPE_SATURATING (type));
2229 t = build_fixed (type, value);
2231 /* Propagate overflow flags. */
2232 if (overflow_p | TREE_OVERFLOW (arg1))
2233 TREE_OVERFLOW (t) = 1;
2234 return t;
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238 to a fixed-point type. */
2240 static tree
2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2243 FIXED_VALUE_TYPE value;
2244 tree t;
2245 bool overflow_p;
2247 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 &TREE_REAL_CST (arg1),
2249 TYPE_SATURATING (type));
2250 t = build_fixed (type, value);
2252 /* Propagate overflow flags. */
2253 if (overflow_p | TREE_OVERFLOW (arg1))
2254 TREE_OVERFLOW (t) = 1;
2255 return t;
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259 type TYPE. If no simplification can be done return NULL_TREE. */
2261 static tree
2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2264 tree arg_type = TREE_TYPE (arg1);
2265 if (arg_type == type)
2266 return arg1;
2268 /* We can't widen types, since the runtime value could overflow the
2269 original type before being extended to the new type. */
2270 if (POLY_INT_CST_P (arg1)
2271 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273 return build_poly_int_cst (type,
2274 poly_wide_int::from (poly_int_cst_value (arg1),
2275 TYPE_PRECISION (type),
2276 TYPE_SIGN (arg_type)));
2278 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279 || TREE_CODE (type) == OFFSET_TYPE)
2281 if (TREE_CODE (arg1) == INTEGER_CST)
2282 return fold_convert_const_int_from_int (type, arg1);
2283 else if (TREE_CODE (arg1) == REAL_CST)
2284 return fold_convert_const_int_from_real (code, type, arg1);
2285 else if (TREE_CODE (arg1) == FIXED_CST)
2286 return fold_convert_const_int_from_fixed (type, arg1);
2288 else if (TREE_CODE (type) == REAL_TYPE)
2290 if (TREE_CODE (arg1) == INTEGER_CST)
2291 return build_real_from_int_cst (type, arg1);
2292 else if (TREE_CODE (arg1) == REAL_CST)
2293 return fold_convert_const_real_from_real (type, arg1);
2294 else if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_real_from_fixed (type, arg1);
2297 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2299 if (TREE_CODE (arg1) == FIXED_CST)
2300 return fold_convert_const_fixed_from_fixed (type, arg1);
2301 else if (TREE_CODE (arg1) == INTEGER_CST)
2302 return fold_convert_const_fixed_from_int (type, arg1);
2303 else if (TREE_CODE (arg1) == REAL_CST)
2304 return fold_convert_const_fixed_from_real (type, arg1);
2306 else if (TREE_CODE (type) == VECTOR_TYPE)
2308 if (TREE_CODE (arg1) == VECTOR_CST
2309 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2311 tree elttype = TREE_TYPE (type);
2312 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 /* We can't handle steps directly when extending, since the
2314 values need to wrap at the original precision first. */
2315 bool step_ok_p
2316 = (INTEGRAL_TYPE_P (elttype)
2317 && INTEGRAL_TYPE_P (arg1_elttype)
2318 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 tree_vector_builder v;
2320 if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 return NULL_TREE;
2322 unsigned int len = v.encoded_nelts ();
2323 for (unsigned int i = 0; i < len; ++i)
2325 tree elt = VECTOR_CST_ELT (arg1, i);
2326 tree cvt = fold_convert_const (code, elttype, elt);
2327 if (cvt == NULL_TREE)
2328 return NULL_TREE;
2329 v.quick_push (cvt);
2331 return v.build ();
2334 return NULL_TREE;
2337 /* Construct a vector of zero elements of vector type TYPE. */
2339 static tree
2340 build_zero_vector (tree type)
2342 tree t;
2344 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345 return build_vector_from_val (type, t);
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2350 bool
2351 fold_convertible_p (const_tree type, const_tree arg)
2353 tree orig = TREE_TYPE (arg);
2355 if (type == orig)
2356 return true;
2358 if (TREE_CODE (arg) == ERROR_MARK
2359 || TREE_CODE (type) == ERROR_MARK
2360 || TREE_CODE (orig) == ERROR_MARK)
2361 return false;
2363 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364 return true;
2366 switch (TREE_CODE (type))
2368 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369 case POINTER_TYPE: case REFERENCE_TYPE:
2370 case OFFSET_TYPE:
2371 return (INTEGRAL_TYPE_P (orig)
2372 || (POINTER_TYPE_P (orig)
2373 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 || TREE_CODE (orig) == OFFSET_TYPE);
2376 case REAL_TYPE:
2377 case FIXED_POINT_TYPE:
2378 case VOID_TYPE:
2379 return TREE_CODE (type) == TREE_CODE (orig);
2381 case VECTOR_TYPE:
2382 return (VECTOR_TYPE_P (orig)
2383 && known_eq (TYPE_VECTOR_SUBPARTS (type),
2384 TYPE_VECTOR_SUBPARTS (orig))
2385 && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2387 default:
2388 return false;
2392 /* Convert expression ARG to type TYPE. Used by the middle-end for
2393 simple conversions in preference to calling the front-end's convert. */
2395 tree
2396 fold_convert_loc (location_t loc, tree type, tree arg)
2398 tree orig = TREE_TYPE (arg);
2399 tree tem;
2401 if (type == orig)
2402 return arg;
2404 if (TREE_CODE (arg) == ERROR_MARK
2405 || TREE_CODE (type) == ERROR_MARK
2406 || TREE_CODE (orig) == ERROR_MARK)
2407 return error_mark_node;
2409 switch (TREE_CODE (type))
2411 case POINTER_TYPE:
2412 case REFERENCE_TYPE:
2413 /* Handle conversions between pointers to different address spaces. */
2414 if (POINTER_TYPE_P (orig)
2415 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2416 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2417 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2418 /* fall through */
2420 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2421 case OFFSET_TYPE:
2422 if (TREE_CODE (arg) == INTEGER_CST)
2424 tem = fold_convert_const (NOP_EXPR, type, arg);
2425 if (tem != NULL_TREE)
2426 return tem;
2428 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2429 || TREE_CODE (orig) == OFFSET_TYPE)
2430 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2431 if (TREE_CODE (orig) == COMPLEX_TYPE)
2432 return fold_convert_loc (loc, type,
2433 fold_build1_loc (loc, REALPART_EXPR,
2434 TREE_TYPE (orig), arg));
2435 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2436 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2437 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2439 case REAL_TYPE:
2440 if (TREE_CODE (arg) == INTEGER_CST)
2442 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2443 if (tem != NULL_TREE)
2444 return tem;
2446 else if (TREE_CODE (arg) == REAL_CST)
2448 tem = fold_convert_const (NOP_EXPR, type, arg);
2449 if (tem != NULL_TREE)
2450 return tem;
2452 else if (TREE_CODE (arg) == FIXED_CST)
2454 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2455 if (tem != NULL_TREE)
2456 return tem;
2459 switch (TREE_CODE (orig))
2461 case INTEGER_TYPE:
2462 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2463 case POINTER_TYPE: case REFERENCE_TYPE:
2464 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2466 case REAL_TYPE:
2467 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2469 case FIXED_POINT_TYPE:
2470 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2472 case COMPLEX_TYPE:
2473 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2474 return fold_convert_loc (loc, type, tem);
2476 default:
2477 gcc_unreachable ();
2480 case FIXED_POINT_TYPE:
2481 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2482 || TREE_CODE (arg) == REAL_CST)
2484 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2485 if (tem != NULL_TREE)
2486 goto fold_convert_exit;
2489 switch (TREE_CODE (orig))
2491 case FIXED_POINT_TYPE:
2492 case INTEGER_TYPE:
2493 case ENUMERAL_TYPE:
2494 case BOOLEAN_TYPE:
2495 case REAL_TYPE:
2496 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2498 case COMPLEX_TYPE:
2499 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2500 return fold_convert_loc (loc, type, tem);
2502 default:
2503 gcc_unreachable ();
2506 case COMPLEX_TYPE:
2507 switch (TREE_CODE (orig))
2509 case INTEGER_TYPE:
2510 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2512 case REAL_TYPE:
2513 case FIXED_POINT_TYPE:
2514 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2515 fold_convert_loc (loc, TREE_TYPE (type), arg),
2516 fold_convert_loc (loc, TREE_TYPE (type),
2517 integer_zero_node));
2518 case COMPLEX_TYPE:
2520 tree rpart, ipart;
2522 if (TREE_CODE (arg) == COMPLEX_EXPR)
2524 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2525 TREE_OPERAND (arg, 0));
2526 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2527 TREE_OPERAND (arg, 1));
2528 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2531 arg = save_expr (arg);
2532 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2533 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2534 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2535 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2536 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2539 default:
2540 gcc_unreachable ();
2543 case VECTOR_TYPE:
2544 if (integer_zerop (arg))
2545 return build_zero_vector (type);
2546 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2547 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2548 || TREE_CODE (orig) == VECTOR_TYPE);
2549 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2551 case VOID_TYPE:
2552 tem = fold_ignored_result (arg);
2553 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2555 default:
2556 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2557 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2558 gcc_unreachable ();
2560 fold_convert_exit:
2561 protected_set_expr_location_unshare (tem, loc);
2562 return tem;
2565 /* Return false if expr can be assumed not to be an lvalue, true
2566 otherwise. */
2568 static bool
2569 maybe_lvalue_p (const_tree x)
2571 /* We only need to wrap lvalue tree codes. */
2572 switch (TREE_CODE (x))
2574 case VAR_DECL:
2575 case PARM_DECL:
2576 case RESULT_DECL:
2577 case LABEL_DECL:
2578 case FUNCTION_DECL:
2579 case SSA_NAME:
2581 case COMPONENT_REF:
2582 case MEM_REF:
2583 case INDIRECT_REF:
2584 case ARRAY_REF:
2585 case ARRAY_RANGE_REF:
2586 case BIT_FIELD_REF:
2587 case OBJ_TYPE_REF:
2589 case REALPART_EXPR:
2590 case IMAGPART_EXPR:
2591 case PREINCREMENT_EXPR:
2592 case PREDECREMENT_EXPR:
2593 case SAVE_EXPR:
2594 case TRY_CATCH_EXPR:
2595 case WITH_CLEANUP_EXPR:
2596 case COMPOUND_EXPR:
2597 case MODIFY_EXPR:
2598 case TARGET_EXPR:
2599 case COND_EXPR:
2600 case BIND_EXPR:
2601 case VIEW_CONVERT_EXPR:
2602 break;
2604 default:
2605 /* Assume the worst for front-end tree codes. */
2606 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2607 break;
2608 return false;
2611 return true;
2614 /* Return an expr equal to X but certainly not valid as an lvalue. */
2616 tree
2617 non_lvalue_loc (location_t loc, tree x)
2619 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2620 us. */
2621 if (in_gimple_form)
2622 return x;
2624 if (! maybe_lvalue_p (x))
2625 return x;
2626 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2629 /* When pedantic, return an expr equal to X but certainly not valid as a
2630 pedantic lvalue. Otherwise, return X. */
2632 static tree
2633 pedantic_non_lvalue_loc (location_t loc, tree x)
2635 return protected_set_expr_location_unshare (x, loc);
2638 /* Given a tree comparison code, return the code that is the logical inverse.
2639 It is generally not safe to do this for floating-point comparisons, except
2640 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2641 ERROR_MARK in this case. */
2643 enum tree_code
2644 invert_tree_comparison (enum tree_code code, bool honor_nans)
2646 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2647 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2648 return ERROR_MARK;
2650 switch (code)
2652 case EQ_EXPR:
2653 return NE_EXPR;
2654 case NE_EXPR:
2655 return EQ_EXPR;
2656 case GT_EXPR:
2657 return honor_nans ? UNLE_EXPR : LE_EXPR;
2658 case GE_EXPR:
2659 return honor_nans ? UNLT_EXPR : LT_EXPR;
2660 case LT_EXPR:
2661 return honor_nans ? UNGE_EXPR : GE_EXPR;
2662 case LE_EXPR:
2663 return honor_nans ? UNGT_EXPR : GT_EXPR;
2664 case LTGT_EXPR:
2665 return UNEQ_EXPR;
2666 case UNEQ_EXPR:
2667 return LTGT_EXPR;
2668 case UNGT_EXPR:
2669 return LE_EXPR;
2670 case UNGE_EXPR:
2671 return LT_EXPR;
2672 case UNLT_EXPR:
2673 return GE_EXPR;
2674 case UNLE_EXPR:
2675 return GT_EXPR;
2676 case ORDERED_EXPR:
2677 return UNORDERED_EXPR;
2678 case UNORDERED_EXPR:
2679 return ORDERED_EXPR;
2680 default:
2681 gcc_unreachable ();
2685 /* Similar, but return the comparison that results if the operands are
2686 swapped. This is safe for floating-point. */
2688 enum tree_code
2689 swap_tree_comparison (enum tree_code code)
2691 switch (code)
2693 case EQ_EXPR:
2694 case NE_EXPR:
2695 case ORDERED_EXPR:
2696 case UNORDERED_EXPR:
2697 case LTGT_EXPR:
2698 case UNEQ_EXPR:
2699 return code;
2700 case GT_EXPR:
2701 return LT_EXPR;
2702 case GE_EXPR:
2703 return LE_EXPR;
2704 case LT_EXPR:
2705 return GT_EXPR;
2706 case LE_EXPR:
2707 return GE_EXPR;
2708 case UNGT_EXPR:
2709 return UNLT_EXPR;
2710 case UNGE_EXPR:
2711 return UNLE_EXPR;
2712 case UNLT_EXPR:
2713 return UNGT_EXPR;
2714 case UNLE_EXPR:
2715 return UNGE_EXPR;
2716 default:
2717 gcc_unreachable ();
2722 /* Convert a comparison tree code from an enum tree_code representation
2723 into a compcode bit-based encoding. This function is the inverse of
2724 compcode_to_comparison. */
2726 static enum comparison_code
2727 comparison_to_compcode (enum tree_code code)
2729 switch (code)
2731 case LT_EXPR:
2732 return COMPCODE_LT;
2733 case EQ_EXPR:
2734 return COMPCODE_EQ;
2735 case LE_EXPR:
2736 return COMPCODE_LE;
2737 case GT_EXPR:
2738 return COMPCODE_GT;
2739 case NE_EXPR:
2740 return COMPCODE_NE;
2741 case GE_EXPR:
2742 return COMPCODE_GE;
2743 case ORDERED_EXPR:
2744 return COMPCODE_ORD;
2745 case UNORDERED_EXPR:
2746 return COMPCODE_UNORD;
2747 case UNLT_EXPR:
2748 return COMPCODE_UNLT;
2749 case UNEQ_EXPR:
2750 return COMPCODE_UNEQ;
2751 case UNLE_EXPR:
2752 return COMPCODE_UNLE;
2753 case UNGT_EXPR:
2754 return COMPCODE_UNGT;
2755 case LTGT_EXPR:
2756 return COMPCODE_LTGT;
2757 case UNGE_EXPR:
2758 return COMPCODE_UNGE;
2759 default:
2760 gcc_unreachable ();
2764 /* Convert a compcode bit-based encoding of a comparison operator back
2765 to GCC's enum tree_code representation. This function is the
2766 inverse of comparison_to_compcode. */
2768 static enum tree_code
2769 compcode_to_comparison (enum comparison_code code)
2771 switch (code)
2773 case COMPCODE_LT:
2774 return LT_EXPR;
2775 case COMPCODE_EQ:
2776 return EQ_EXPR;
2777 case COMPCODE_LE:
2778 return LE_EXPR;
2779 case COMPCODE_GT:
2780 return GT_EXPR;
2781 case COMPCODE_NE:
2782 return NE_EXPR;
2783 case COMPCODE_GE:
2784 return GE_EXPR;
2785 case COMPCODE_ORD:
2786 return ORDERED_EXPR;
2787 case COMPCODE_UNORD:
2788 return UNORDERED_EXPR;
2789 case COMPCODE_UNLT:
2790 return UNLT_EXPR;
2791 case COMPCODE_UNEQ:
2792 return UNEQ_EXPR;
2793 case COMPCODE_UNLE:
2794 return UNLE_EXPR;
2795 case COMPCODE_UNGT:
2796 return UNGT_EXPR;
2797 case COMPCODE_LTGT:
2798 return LTGT_EXPR;
2799 case COMPCODE_UNGE:
2800 return UNGE_EXPR;
2801 default:
2802 gcc_unreachable ();
2806 /* Return true if COND1 tests the opposite condition of COND2. */
2808 bool
2809 inverse_conditions_p (const_tree cond1, const_tree cond2)
2811 return (COMPARISON_CLASS_P (cond1)
2812 && COMPARISON_CLASS_P (cond2)
2813 && (invert_tree_comparison
2814 (TREE_CODE (cond1),
2815 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2816 && operand_equal_p (TREE_OPERAND (cond1, 0),
2817 TREE_OPERAND (cond2, 0), 0)
2818 && operand_equal_p (TREE_OPERAND (cond1, 1),
2819 TREE_OPERAND (cond2, 1), 0));
2822 /* Return a tree for the comparison which is the combination of
2823 doing the AND or OR (depending on CODE) of the two operations LCODE
2824 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2825 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2826 if this makes the transformation invalid. */
2828 tree
2829 combine_comparisons (location_t loc,
2830 enum tree_code code, enum tree_code lcode,
2831 enum tree_code rcode, tree truth_type,
2832 tree ll_arg, tree lr_arg)
2834 bool honor_nans = HONOR_NANS (ll_arg);
2835 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2836 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2837 int compcode;
2839 switch (code)
2841 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2842 compcode = lcompcode & rcompcode;
2843 break;
2845 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2846 compcode = lcompcode | rcompcode;
2847 break;
2849 default:
2850 return NULL_TREE;
2853 if (!honor_nans)
2855 /* Eliminate unordered comparisons, as well as LTGT and ORD
2856 which are not used unless the mode has NaNs. */
2857 compcode &= ~COMPCODE_UNORD;
2858 if (compcode == COMPCODE_LTGT)
2859 compcode = COMPCODE_NE;
2860 else if (compcode == COMPCODE_ORD)
2861 compcode = COMPCODE_TRUE;
2863 else if (flag_trapping_math)
2865 /* Check that the original operation and the optimized ones will trap
2866 under the same condition. */
2867 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2868 && (lcompcode != COMPCODE_EQ)
2869 && (lcompcode != COMPCODE_ORD);
2870 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2871 && (rcompcode != COMPCODE_EQ)
2872 && (rcompcode != COMPCODE_ORD);
2873 bool trap = (compcode & COMPCODE_UNORD) == 0
2874 && (compcode != COMPCODE_EQ)
2875 && (compcode != COMPCODE_ORD);
2877 /* In a short-circuited boolean expression the LHS might be
2878 such that the RHS, if evaluated, will never trap. For
2879 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2880 if neither x nor y is NaN. (This is a mixed blessing: for
2881 example, the expression above will never trap, hence
2882 optimizing it to x < y would be invalid). */
2883 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2884 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2885 rtrap = false;
2887 /* If the comparison was short-circuited, and only the RHS
2888 trapped, we may now generate a spurious trap. */
2889 if (rtrap && !ltrap
2890 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2891 return NULL_TREE;
2893 /* If we changed the conditions that cause a trap, we lose. */
2894 if ((ltrap || rtrap) != trap)
2895 return NULL_TREE;
2898 if (compcode == COMPCODE_TRUE)
2899 return constant_boolean_node (true, truth_type);
2900 else if (compcode == COMPCODE_FALSE)
2901 return constant_boolean_node (false, truth_type);
2902 else
2904 enum tree_code tcode;
2906 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2907 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2911 /* Return nonzero if two operands (typically of the same tree node)
2912 are necessarily equal. FLAGS modifies behavior as follows:
2914 If OEP_ONLY_CONST is set, only return nonzero for constants.
2915 This function tests whether the operands are indistinguishable;
2916 it does not test whether they are equal using C's == operation.
2917 The distinction is important for IEEE floating point, because
2918 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2919 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2921 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2922 even though it may hold multiple values during a function.
2923 This is because a GCC tree node guarantees that nothing else is
2924 executed between the evaluation of its "operands" (which may often
2925 be evaluated in arbitrary order). Hence if the operands themselves
2926 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2927 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2928 unset means assuming isochronic (or instantaneous) tree equivalence.
2929 Unless comparing arbitrary expression trees, such as from different
2930 statements, this flag can usually be left unset.
2932 If OEP_PURE_SAME is set, then pure functions with identical arguments
2933 are considered the same. It is used when the caller has other ways
2934 to ensure that global memory is unchanged in between.
2936 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2937 not values of expressions.
2939 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2940 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2942 If OEP_BITWISE is set, then require the values to be bitwise identical
2943 rather than simply numerically equal. Do not take advantage of things
2944 like math-related flags or undefined behavior; only return true for
2945 values that are provably bitwise identical in all circumstances.
2947 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2948 any operand with side effect. This is unnecesarily conservative in the
2949 case we know that arg0 and arg1 are in disjoint code paths (such as in
2950 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2951 addresses with TREE_CONSTANT flag set so we know that &var == &var
2952 even if var is volatile. */
2954 bool
2955 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2956 unsigned int flags)
2958 bool r;
2959 if (verify_hash_value (arg0, arg1, flags, &r))
2960 return r;
2962 STRIP_ANY_LOCATION_WRAPPER (arg0);
2963 STRIP_ANY_LOCATION_WRAPPER (arg1);
2965 /* If either is ERROR_MARK, they aren't equal. */
2966 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2967 || TREE_TYPE (arg0) == error_mark_node
2968 || TREE_TYPE (arg1) == error_mark_node)
2969 return false;
2971 /* Similar, if either does not have a type (like a template id),
2972 they aren't equal. */
2973 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2974 return false;
2976 /* Bitwise identity makes no sense if the values have different layouts. */
2977 if ((flags & OEP_BITWISE)
2978 && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2979 return false;
2981 /* We cannot consider pointers to different address space equal. */
2982 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2983 && POINTER_TYPE_P (TREE_TYPE (arg1))
2984 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2985 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2986 return false;
2988 /* Check equality of integer constants before bailing out due to
2989 precision differences. */
2990 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2992 /* Address of INTEGER_CST is not defined; check that we did not forget
2993 to drop the OEP_ADDRESS_OF flags. */
2994 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2995 return tree_int_cst_equal (arg0, arg1);
2998 if (!(flags & OEP_ADDRESS_OF))
3000 /* If both types don't have the same signedness, then we can't consider
3001 them equal. We must check this before the STRIP_NOPS calls
3002 because they may change the signedness of the arguments. As pointers
3003 strictly don't have a signedness, require either two pointers or
3004 two non-pointers as well. */
3005 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3006 || POINTER_TYPE_P (TREE_TYPE (arg0))
3007 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3008 return false;
3010 /* If both types don't have the same precision, then it is not safe
3011 to strip NOPs. */
3012 if (element_precision (TREE_TYPE (arg0))
3013 != element_precision (TREE_TYPE (arg1)))
3014 return false;
3016 STRIP_NOPS (arg0);
3017 STRIP_NOPS (arg1);
3019 #if 0
3020 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3021 sanity check once the issue is solved. */
3022 else
3023 /* Addresses of conversions and SSA_NAMEs (and many other things)
3024 are not defined. Check that we did not forget to drop the
3025 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3026 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3027 && TREE_CODE (arg0) != SSA_NAME);
3028 #endif
3030 /* In case both args are comparisons but with different comparison
3031 code, try to swap the comparison operands of one arg to produce
3032 a match and compare that variant. */
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034 && COMPARISON_CLASS_P (arg0)
3035 && COMPARISON_CLASS_P (arg1))
3037 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3039 if (TREE_CODE (arg0) == swap_code)
3040 return operand_equal_p (TREE_OPERAND (arg0, 0),
3041 TREE_OPERAND (arg1, 1), flags)
3042 && operand_equal_p (TREE_OPERAND (arg0, 1),
3043 TREE_OPERAND (arg1, 0), flags);
3046 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3048 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3049 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3051 else if (flags & OEP_ADDRESS_OF)
3053 /* If we are interested in comparing addresses ignore
3054 MEM_REF wrappings of the base that can appear just for
3055 TBAA reasons. */
3056 if (TREE_CODE (arg0) == MEM_REF
3057 && DECL_P (arg1)
3058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3060 && integer_zerop (TREE_OPERAND (arg0, 1)))
3061 return true;
3062 else if (TREE_CODE (arg1) == MEM_REF
3063 && DECL_P (arg0)
3064 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3065 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3066 && integer_zerop (TREE_OPERAND (arg1, 1)))
3067 return true;
3068 return false;
3070 else
3071 return false;
3074 /* When not checking adddresses, this is needed for conversions and for
3075 COMPONENT_REF. Might as well play it safe and always test this. */
3076 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3077 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3078 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3079 && !(flags & OEP_ADDRESS_OF)))
3080 return false;
3082 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3083 We don't care about side effects in that case because the SAVE_EXPR
3084 takes care of that for us. In all other cases, two expressions are
3085 equal if they have no side effects. If we have two identical
3086 expressions with side effects that should be treated the same due
3087 to the only side effects being identical SAVE_EXPR's, that will
3088 be detected in the recursive calls below.
3089 If we are taking an invariant address of two identical objects
3090 they are necessarily equal as well. */
3091 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3092 && (TREE_CODE (arg0) == SAVE_EXPR
3093 || (flags & OEP_MATCH_SIDE_EFFECTS)
3094 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3095 return true;
3097 /* Next handle constant cases, those for which we can return 1 even
3098 if ONLY_CONST is set. */
3099 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3100 switch (TREE_CODE (arg0))
3102 case INTEGER_CST:
3103 return tree_int_cst_equal (arg0, arg1);
3105 case FIXED_CST:
3106 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3107 TREE_FIXED_CST (arg1));
3109 case REAL_CST:
3110 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3111 return true;
3113 if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3115 /* If we do not distinguish between signed and unsigned zero,
3116 consider them equal. */
3117 if (real_zerop (arg0) && real_zerop (arg1))
3118 return true;
3120 return false;
3122 case VECTOR_CST:
3124 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3125 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3126 return false;
3128 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3129 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3130 return false;
3132 unsigned int count = vector_cst_encoded_nelts (arg0);
3133 for (unsigned int i = 0; i < count; ++i)
3134 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3135 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3136 return false;
3137 return true;
3140 case COMPLEX_CST:
3141 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3142 flags)
3143 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3144 flags));
3146 case STRING_CST:
3147 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3148 && ! memcmp (TREE_STRING_POINTER (arg0),
3149 TREE_STRING_POINTER (arg1),
3150 TREE_STRING_LENGTH (arg0)));
3152 case ADDR_EXPR:
3153 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3154 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3155 flags | OEP_ADDRESS_OF
3156 | OEP_MATCH_SIDE_EFFECTS);
3157 case CONSTRUCTOR:
3158 /* In GIMPLE empty constructors are allowed in initializers of
3159 aggregates. */
3160 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3161 default:
3162 break;
3165 /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3166 two instances of undefined behavior will give identical results. */
3167 if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3168 return false;
3170 /* Define macros to test an operand from arg0 and arg1 for equality and a
3171 variant that allows null and views null as being different from any
3172 non-null value. In the latter case, if either is null, the both
3173 must be; otherwise, do the normal comparison. */
3174 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3175 TREE_OPERAND (arg1, N), flags)
3177 #define OP_SAME_WITH_NULL(N) \
3178 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3179 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3181 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3183 case tcc_unary:
3184 /* Two conversions are equal only if signedness and modes match. */
3185 switch (TREE_CODE (arg0))
3187 CASE_CONVERT:
3188 case FIX_TRUNC_EXPR:
3189 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3190 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3191 return false;
3192 break;
3193 default:
3194 break;
3197 return OP_SAME (0);
3200 case tcc_comparison:
3201 case tcc_binary:
3202 if (OP_SAME (0) && OP_SAME (1))
3203 return true;
3205 /* For commutative ops, allow the other order. */
3206 return (commutative_tree_code (TREE_CODE (arg0))
3207 && operand_equal_p (TREE_OPERAND (arg0, 0),
3208 TREE_OPERAND (arg1, 1), flags)
3209 && operand_equal_p (TREE_OPERAND (arg0, 1),
3210 TREE_OPERAND (arg1, 0), flags));
3212 case tcc_reference:
3213 /* If either of the pointer (or reference) expressions we are
3214 dereferencing contain a side effect, these cannot be equal,
3215 but their addresses can be. */
3216 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3217 && (TREE_SIDE_EFFECTS (arg0)
3218 || TREE_SIDE_EFFECTS (arg1)))
3219 return false;
3221 switch (TREE_CODE (arg0))
3223 case INDIRECT_REF:
3224 if (!(flags & OEP_ADDRESS_OF))
3226 if (TYPE_ALIGN (TREE_TYPE (arg0))
3227 != TYPE_ALIGN (TREE_TYPE (arg1)))
3228 return false;
3229 /* Verify that the access types are compatible. */
3230 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3231 != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3232 return false;
3234 flags &= ~OEP_ADDRESS_OF;
3235 return OP_SAME (0);
3237 case IMAGPART_EXPR:
3238 /* Require the same offset. */
3239 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3240 TYPE_SIZE (TREE_TYPE (arg1)),
3241 flags & ~OEP_ADDRESS_OF))
3242 return false;
3244 /* Fallthru. */
3245 case REALPART_EXPR:
3246 case VIEW_CONVERT_EXPR:
3247 return OP_SAME (0);
3249 case TARGET_MEM_REF:
3250 case MEM_REF:
3251 if (!(flags & OEP_ADDRESS_OF))
3253 /* Require equal access sizes */
3254 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3255 && (!TYPE_SIZE (TREE_TYPE (arg0))
3256 || !TYPE_SIZE (TREE_TYPE (arg1))
3257 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3258 TYPE_SIZE (TREE_TYPE (arg1)),
3259 flags)))
3260 return false;
3261 /* Verify that access happens in similar types. */
3262 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3263 return false;
3264 /* Verify that accesses are TBAA compatible. */
3265 if (!alias_ptr_types_compatible_p
3266 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3267 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3268 || (MR_DEPENDENCE_CLIQUE (arg0)
3269 != MR_DEPENDENCE_CLIQUE (arg1))
3270 || (MR_DEPENDENCE_BASE (arg0)
3271 != MR_DEPENDENCE_BASE (arg1)))
3272 return false;
3273 /* Verify that alignment is compatible. */
3274 if (TYPE_ALIGN (TREE_TYPE (arg0))
3275 != TYPE_ALIGN (TREE_TYPE (arg1)))
3276 return false;
3278 flags &= ~OEP_ADDRESS_OF;
3279 return (OP_SAME (0) && OP_SAME (1)
3280 /* TARGET_MEM_REF require equal extra operands. */
3281 && (TREE_CODE (arg0) != TARGET_MEM_REF
3282 || (OP_SAME_WITH_NULL (2)
3283 && OP_SAME_WITH_NULL (3)
3284 && OP_SAME_WITH_NULL (4))));
3286 case ARRAY_REF:
3287 case ARRAY_RANGE_REF:
3288 if (!OP_SAME (0))
3289 return false;
3290 flags &= ~OEP_ADDRESS_OF;
3291 /* Compare the array index by value if it is constant first as we
3292 may have different types but same value here. */
3293 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3294 TREE_OPERAND (arg1, 1))
3295 || OP_SAME (1))
3296 && OP_SAME_WITH_NULL (2)
3297 && OP_SAME_WITH_NULL (3)
3298 /* Compare low bound and element size as with OEP_ADDRESS_OF
3299 we have to account for the offset of the ref. */
3300 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3301 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3302 || (operand_equal_p (array_ref_low_bound
3303 (CONST_CAST_TREE (arg0)),
3304 array_ref_low_bound
3305 (CONST_CAST_TREE (arg1)), flags)
3306 && operand_equal_p (array_ref_element_size
3307 (CONST_CAST_TREE (arg0)),
3308 array_ref_element_size
3309 (CONST_CAST_TREE (arg1)),
3310 flags))));
3312 case COMPONENT_REF:
3313 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3314 may be NULL when we're called to compare MEM_EXPRs. */
3315 if (!OP_SAME_WITH_NULL (0)
3316 || !OP_SAME (1))
3317 return false;
3318 flags &= ~OEP_ADDRESS_OF;
3319 return OP_SAME_WITH_NULL (2);
3321 case BIT_FIELD_REF:
3322 if (!OP_SAME (0))
3323 return false;
3324 flags &= ~OEP_ADDRESS_OF;
3325 return OP_SAME (1) && OP_SAME (2);
3327 /* Virtual table call. */
3328 case OBJ_TYPE_REF:
3330 if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3331 OBJ_TYPE_REF_EXPR (arg1), flags))
3332 return false;
3333 if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3334 != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3335 return false;
3336 if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3337 OBJ_TYPE_REF_OBJECT (arg1), flags))
3338 return false;
3339 if (!types_same_for_odr (obj_type_ref_class (arg0),
3340 obj_type_ref_class (arg1)))
3341 return false;
3342 return true;
3345 default:
3346 return false;
3349 case tcc_expression:
3350 switch (TREE_CODE (arg0))
3352 case ADDR_EXPR:
3353 /* Be sure we pass right ADDRESS_OF flag. */
3354 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3355 return operand_equal_p (TREE_OPERAND (arg0, 0),
3356 TREE_OPERAND (arg1, 0),
3357 flags | OEP_ADDRESS_OF);
3359 case TRUTH_NOT_EXPR:
3360 return OP_SAME (0);
3362 case TRUTH_ANDIF_EXPR:
3363 case TRUTH_ORIF_EXPR:
3364 return OP_SAME (0) && OP_SAME (1);
3366 case WIDEN_MULT_PLUS_EXPR:
3367 case WIDEN_MULT_MINUS_EXPR:
3368 if (!OP_SAME (2))
3369 return false;
3370 /* The multiplcation operands are commutative. */
3371 /* FALLTHRU */
3373 case TRUTH_AND_EXPR:
3374 case TRUTH_OR_EXPR:
3375 case TRUTH_XOR_EXPR:
3376 if (OP_SAME (0) && OP_SAME (1))
3377 return true;
3379 /* Otherwise take into account this is a commutative operation. */
3380 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3381 TREE_OPERAND (arg1, 1), flags)
3382 && operand_equal_p (TREE_OPERAND (arg0, 1),
3383 TREE_OPERAND (arg1, 0), flags));
3385 case COND_EXPR:
3386 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3387 return false;
3388 flags &= ~OEP_ADDRESS_OF;
3389 return OP_SAME (0);
3391 case BIT_INSERT_EXPR:
3392 /* BIT_INSERT_EXPR has an implict operand as the type precision
3393 of op1. Need to check to make sure they are the same. */
3394 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3395 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3396 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3397 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3398 return false;
3399 /* FALLTHRU */
3401 case VEC_COND_EXPR:
3402 case DOT_PROD_EXPR:
3403 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3405 case MODIFY_EXPR:
3406 case INIT_EXPR:
3407 case COMPOUND_EXPR:
3408 case PREDECREMENT_EXPR:
3409 case PREINCREMENT_EXPR:
3410 case POSTDECREMENT_EXPR:
3411 case POSTINCREMENT_EXPR:
3412 if (flags & OEP_LEXICOGRAPHIC)
3413 return OP_SAME (0) && OP_SAME (1);
3414 return false;
3416 case CLEANUP_POINT_EXPR:
3417 case EXPR_STMT:
3418 case SAVE_EXPR:
3419 if (flags & OEP_LEXICOGRAPHIC)
3420 return OP_SAME (0);
3421 return false;
3423 default:
3424 return false;
3427 case tcc_vl_exp:
3428 switch (TREE_CODE (arg0))
3430 case CALL_EXPR:
3431 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3432 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3433 /* If not both CALL_EXPRs are either internal or normal function
3434 functions, then they are not equal. */
3435 return false;
3436 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3438 /* If the CALL_EXPRs call different internal functions, then they
3439 are not equal. */
3440 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3441 return false;
3443 else
3445 /* If the CALL_EXPRs call different functions, then they are not
3446 equal. */
3447 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3448 flags))
3449 return false;
3452 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3454 unsigned int cef = call_expr_flags (arg0);
3455 if (flags & OEP_PURE_SAME)
3456 cef &= ECF_CONST | ECF_PURE;
3457 else
3458 cef &= ECF_CONST;
3459 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3460 return false;
3463 /* Now see if all the arguments are the same. */
3465 const_call_expr_arg_iterator iter0, iter1;
3466 const_tree a0, a1;
3467 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3468 a1 = first_const_call_expr_arg (arg1, &iter1);
3469 a0 && a1;
3470 a0 = next_const_call_expr_arg (&iter0),
3471 a1 = next_const_call_expr_arg (&iter1))
3472 if (! operand_equal_p (a0, a1, flags))
3473 return false;
3475 /* If we get here and both argument lists are exhausted
3476 then the CALL_EXPRs are equal. */
3477 return ! (a0 || a1);
3479 default:
3480 return false;
3483 case tcc_declaration:
3484 /* Consider __builtin_sqrt equal to sqrt. */
3485 return (TREE_CODE (arg0) == FUNCTION_DECL
3486 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3487 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3488 && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3489 == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3491 case tcc_exceptional:
3492 if (TREE_CODE (arg0) == CONSTRUCTOR)
3494 if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3495 return false;
3497 /* In GIMPLE constructors are used only to build vectors from
3498 elements. Individual elements in the constructor must be
3499 indexed in increasing order and form an initial sequence.
3501 We make no effort to compare constructors in generic.
3502 (see sem_variable::equals in ipa-icf which can do so for
3503 constants). */
3504 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3505 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3506 return false;
3508 /* Be sure that vectors constructed have the same representation.
3509 We only tested element precision and modes to match.
3510 Vectors may be BLKmode and thus also check that the number of
3511 parts match. */
3512 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3513 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3514 return false;
3516 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3517 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3518 unsigned int len = vec_safe_length (v0);
3520 if (len != vec_safe_length (v1))
3521 return false;
3523 for (unsigned int i = 0; i < len; i++)
3525 constructor_elt *c0 = &(*v0)[i];
3526 constructor_elt *c1 = &(*v1)[i];
3528 if (!operand_equal_p (c0->value, c1->value, flags)
3529 /* In GIMPLE the indexes can be either NULL or matching i.
3530 Double check this so we won't get false
3531 positives for GENERIC. */
3532 || (c0->index
3533 && (TREE_CODE (c0->index) != INTEGER_CST
3534 || compare_tree_int (c0->index, i)))
3535 || (c1->index
3536 && (TREE_CODE (c1->index) != INTEGER_CST
3537 || compare_tree_int (c1->index, i))))
3538 return false;
3540 return true;
3542 else if (TREE_CODE (arg0) == STATEMENT_LIST
3543 && (flags & OEP_LEXICOGRAPHIC))
3545 /* Compare the STATEMENT_LISTs. */
3546 tree_stmt_iterator tsi1, tsi2;
3547 tree body1 = CONST_CAST_TREE (arg0);
3548 tree body2 = CONST_CAST_TREE (arg1);
3549 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3550 tsi_next (&tsi1), tsi_next (&tsi2))
3552 /* The lists don't have the same number of statements. */
3553 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3554 return false;
3555 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3556 return true;
3557 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3558 flags & (OEP_LEXICOGRAPHIC
3559 | OEP_NO_HASH_CHECK)))
3560 return false;
3563 return false;
3565 case tcc_statement:
3566 switch (TREE_CODE (arg0))
3568 case RETURN_EXPR:
3569 if (flags & OEP_LEXICOGRAPHIC)
3570 return OP_SAME_WITH_NULL (0);
3571 return false;
3572 case DEBUG_BEGIN_STMT:
3573 if (flags & OEP_LEXICOGRAPHIC)
3574 return true;
3575 return false;
3576 default:
3577 return false;
3580 default:
3581 return false;
3584 #undef OP_SAME
3585 #undef OP_SAME_WITH_NULL
3588 /* Generate a hash value for an expression. This can be used iteratively
3589 by passing a previous result as the HSTATE argument. */
3591 void
3592 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3593 unsigned int flags)
3595 int i;
3596 enum tree_code code;
3597 enum tree_code_class tclass;
3599 if (t == NULL_TREE || t == error_mark_node)
3601 hstate.merge_hash (0);
3602 return;
3605 STRIP_ANY_LOCATION_WRAPPER (t);
3607 if (!(flags & OEP_ADDRESS_OF))
3608 STRIP_NOPS (t);
3610 code = TREE_CODE (t);
3612 switch (code)
3614 /* Alas, constants aren't shared, so we can't rely on pointer
3615 identity. */
3616 case VOID_CST:
3617 hstate.merge_hash (0);
3618 return;
3619 case INTEGER_CST:
3620 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3621 for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3622 hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3623 return;
3624 case REAL_CST:
3626 unsigned int val2;
3627 if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3628 val2 = rvc_zero;
3629 else
3630 val2 = real_hash (TREE_REAL_CST_PTR (t));
3631 hstate.merge_hash (val2);
3632 return;
3634 case FIXED_CST:
3636 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3637 hstate.merge_hash (val2);
3638 return;
3640 case STRING_CST:
3641 hstate.add ((const void *) TREE_STRING_POINTER (t),
3642 TREE_STRING_LENGTH (t));
3643 return;
3644 case COMPLEX_CST:
3645 hash_operand (TREE_REALPART (t), hstate, flags);
3646 hash_operand (TREE_IMAGPART (t), hstate, flags);
3647 return;
3648 case VECTOR_CST:
3650 hstate.add_int (VECTOR_CST_NPATTERNS (t));
3651 hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3652 unsigned int count = vector_cst_encoded_nelts (t);
3653 for (unsigned int i = 0; i < count; ++i)
3654 hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3655 return;
3657 case SSA_NAME:
3658 /* We can just compare by pointer. */
3659 hstate.add_hwi (SSA_NAME_VERSION (t));
3660 return;
3661 case PLACEHOLDER_EXPR:
3662 /* The node itself doesn't matter. */
3663 return;
3664 case BLOCK:
3665 case OMP_CLAUSE:
3666 /* Ignore. */
3667 return;
3668 case TREE_LIST:
3669 /* A list of expressions, for a CALL_EXPR or as the elements of a
3670 VECTOR_CST. */
3671 for (; t; t = TREE_CHAIN (t))
3672 hash_operand (TREE_VALUE (t), hstate, flags);
3673 return;
3674 case CONSTRUCTOR:
3676 unsigned HOST_WIDE_INT idx;
3677 tree field, value;
3678 flags &= ~OEP_ADDRESS_OF;
3679 hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3680 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3682 /* In GIMPLE the indexes can be either NULL or matching i. */
3683 if (field == NULL_TREE)
3684 field = bitsize_int (idx);
3685 hash_operand (field, hstate, flags);
3686 hash_operand (value, hstate, flags);
3688 return;
3690 case STATEMENT_LIST:
3692 tree_stmt_iterator i;
3693 for (i = tsi_start (CONST_CAST_TREE (t));
3694 !tsi_end_p (i); tsi_next (&i))
3695 hash_operand (tsi_stmt (i), hstate, flags);
3696 return;
3698 case TREE_VEC:
3699 for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3700 hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3701 return;
3702 case IDENTIFIER_NODE:
3703 hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3704 return;
3705 case FUNCTION_DECL:
3706 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3707 Otherwise nodes that compare equal according to operand_equal_p might
3708 get different hash codes. However, don't do this for machine specific
3709 or front end builtins, since the function code is overloaded in those
3710 cases. */
3711 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3712 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3714 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3715 code = TREE_CODE (t);
3717 /* FALL THROUGH */
3718 default:
3719 if (POLY_INT_CST_P (t))
3721 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3722 hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3723 return;
3725 tclass = TREE_CODE_CLASS (code);
3727 if (tclass == tcc_declaration)
3729 /* DECL's have a unique ID */
3730 hstate.add_hwi (DECL_UID (t));
3732 else if (tclass == tcc_comparison && !commutative_tree_code (code))
3734 /* For comparisons that can be swapped, use the lower
3735 tree code. */
3736 enum tree_code ccode = swap_tree_comparison (code);
3737 if (code < ccode)
3738 ccode = code;
3739 hstate.add_object (ccode);
3740 hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3741 hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3743 else if (CONVERT_EXPR_CODE_P (code))
3745 /* NOP_EXPR and CONVERT_EXPR are considered equal by
3746 operand_equal_p. */
3747 enum tree_code ccode = NOP_EXPR;
3748 hstate.add_object (ccode);
3750 /* Don't hash the type, that can lead to having nodes which
3751 compare equal according to operand_equal_p, but which
3752 have different hash codes. Make sure to include signedness
3753 in the hash computation. */
3754 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3755 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3757 /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl. */
3758 else if (code == MEM_REF
3759 && (flags & OEP_ADDRESS_OF) != 0
3760 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3761 && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3762 && integer_zerop (TREE_OPERAND (t, 1)))
3763 hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3764 hstate, flags);
3765 /* Don't ICE on FE specific trees, or their arguments etc.
3766 during operand_equal_p hash verification. */
3767 else if (!IS_EXPR_CODE_CLASS (tclass))
3768 gcc_assert (flags & OEP_HASH_CHECK);
3769 else
3771 unsigned int sflags = flags;
3773 hstate.add_object (code);
3775 switch (code)
3777 case ADDR_EXPR:
3778 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3779 flags |= OEP_ADDRESS_OF;
3780 sflags = flags;
3781 break;
3783 case INDIRECT_REF:
3784 case MEM_REF:
3785 case TARGET_MEM_REF:
3786 flags &= ~OEP_ADDRESS_OF;
3787 sflags = flags;
3788 break;
3790 case ARRAY_REF:
3791 case ARRAY_RANGE_REF:
3792 case COMPONENT_REF:
3793 case BIT_FIELD_REF:
3794 sflags &= ~OEP_ADDRESS_OF;
3795 break;
3797 case COND_EXPR:
3798 flags &= ~OEP_ADDRESS_OF;
3799 break;
3801 case WIDEN_MULT_PLUS_EXPR:
3802 case WIDEN_MULT_MINUS_EXPR:
3804 /* The multiplication operands are commutative. */
3805 inchash::hash one, two;
3806 hash_operand (TREE_OPERAND (t, 0), one, flags);
3807 hash_operand (TREE_OPERAND (t, 1), two, flags);
3808 hstate.add_commutative (one, two);
3809 hash_operand (TREE_OPERAND (t, 2), two, flags);
3810 return;
3813 case CALL_EXPR:
3814 if (CALL_EXPR_FN (t) == NULL_TREE)
3815 hstate.add_int (CALL_EXPR_IFN (t));
3816 break;
3818 case TARGET_EXPR:
3819 /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3820 Usually different TARGET_EXPRs just should use
3821 different temporaries in their slots. */
3822 hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3823 return;
3825 /* Virtual table call. */
3826 case OBJ_TYPE_REF:
3827 inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3828 inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3829 inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3830 return;
3831 default:
3832 break;
3835 /* Don't hash the type, that can lead to having nodes which
3836 compare equal according to operand_equal_p, but which
3837 have different hash codes. */
3838 if (code == NON_LVALUE_EXPR)
3840 /* Make sure to include signness in the hash computation. */
3841 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3842 hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3845 else if (commutative_tree_code (code))
3847 /* It's a commutative expression. We want to hash it the same
3848 however it appears. We do this by first hashing both operands
3849 and then rehashing based on the order of their independent
3850 hashes. */
3851 inchash::hash one, two;
3852 hash_operand (TREE_OPERAND (t, 0), one, flags);
3853 hash_operand (TREE_OPERAND (t, 1), two, flags);
3854 hstate.add_commutative (one, two);
3856 else
3857 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3858 hash_operand (TREE_OPERAND (t, i), hstate,
3859 i == 0 ? flags : sflags);
3861 return;
3865 bool
3866 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3867 unsigned int flags, bool *ret)
3869 /* When checking, verify at the outermost operand_equal_p call that
3870 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3871 hash value. */
3872 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3874 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3876 if (arg0 != arg1)
3878 inchash::hash hstate0 (0), hstate1 (0);
3879 hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3880 hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3881 hashval_t h0 = hstate0.end ();
3882 hashval_t h1 = hstate1.end ();
3883 gcc_assert (h0 == h1);
3885 *ret = true;
3887 else
3888 *ret = false;
3890 return true;
3893 return false;
3897 static operand_compare default_compare_instance;
3899 /* Conveinece wrapper around operand_compare class because usually we do
3900 not need to play with the valueizer. */
3902 bool
3903 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3905 return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3908 namespace inchash
3911 /* Generate a hash value for an expression. This can be used iteratively
3912 by passing a previous result as the HSTATE argument.
3914 This function is intended to produce the same hash for expressions which
3915 would compare equal using operand_equal_p. */
3916 void
3917 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3919 default_compare_instance.hash_operand (t, hstate, flags);
3924 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3925 with a different signedness or a narrower precision. */
3927 static bool
3928 operand_equal_for_comparison_p (tree arg0, tree arg1)
3930 if (operand_equal_p (arg0, arg1, 0))
3931 return true;
3933 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3934 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3935 return false;
3937 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3938 and see if the inner values are the same. This removes any
3939 signedness comparison, which doesn't matter here. */
3940 tree op0 = arg0;
3941 tree op1 = arg1;
3942 STRIP_NOPS (op0);
3943 STRIP_NOPS (op1);
3944 if (operand_equal_p (op0, op1, 0))
3945 return true;
3947 /* Discard a single widening conversion from ARG1 and see if the inner
3948 value is the same as ARG0. */
3949 if (CONVERT_EXPR_P (arg1)
3950 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3951 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3952 < TYPE_PRECISION (TREE_TYPE (arg1))
3953 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3954 return true;
3956 return false;
3959 /* See if ARG is an expression that is either a comparison or is performing
3960 arithmetic on comparisons. The comparisons must only be comparing
3961 two different values, which will be stored in *CVAL1 and *CVAL2; if
3962 they are nonzero it means that some operands have already been found.
3963 No variables may be used anywhere else in the expression except in the
3964 comparisons.
3966 If this is true, return 1. Otherwise, return zero. */
3968 static bool
3969 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3971 enum tree_code code = TREE_CODE (arg);
3972 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3974 /* We can handle some of the tcc_expression cases here. */
3975 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3976 tclass = tcc_unary;
3977 else if (tclass == tcc_expression
3978 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3979 || code == COMPOUND_EXPR))
3980 tclass = tcc_binary;
3982 switch (tclass)
3984 case tcc_unary:
3985 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3987 case tcc_binary:
3988 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3989 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3991 case tcc_constant:
3992 return true;
3994 case tcc_expression:
3995 if (code == COND_EXPR)
3996 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3997 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3998 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3999 return false;
4001 case tcc_comparison:
4002 /* First see if we can handle the first operand, then the second. For
4003 the second operand, we know *CVAL1 can't be zero. It must be that
4004 one side of the comparison is each of the values; test for the
4005 case where this isn't true by failing if the two operands
4006 are the same. */
4008 if (operand_equal_p (TREE_OPERAND (arg, 0),
4009 TREE_OPERAND (arg, 1), 0))
4010 return false;
4012 if (*cval1 == 0)
4013 *cval1 = TREE_OPERAND (arg, 0);
4014 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4016 else if (*cval2 == 0)
4017 *cval2 = TREE_OPERAND (arg, 0);
4018 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4020 else
4021 return false;
4023 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4025 else if (*cval2 == 0)
4026 *cval2 = TREE_OPERAND (arg, 1);
4027 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4029 else
4030 return false;
4032 return true;
4034 default:
4035 return false;
4039 /* ARG is a tree that is known to contain just arithmetic operations and
4040 comparisons. Evaluate the operations in the tree substituting NEW0 for
4041 any occurrence of OLD0 as an operand of a comparison and likewise for
4042 NEW1 and OLD1. */
4044 static tree
4045 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4046 tree old1, tree new1)
4048 tree type = TREE_TYPE (arg);
4049 enum tree_code code = TREE_CODE (arg);
4050 enum tree_code_class tclass = TREE_CODE_CLASS (code);
4052 /* We can handle some of the tcc_expression cases here. */
4053 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4054 tclass = tcc_unary;
4055 else if (tclass == tcc_expression
4056 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4057 tclass = tcc_binary;
4059 switch (tclass)
4061 case tcc_unary:
4062 return fold_build1_loc (loc, code, type,
4063 eval_subst (loc, TREE_OPERAND (arg, 0),
4064 old0, new0, old1, new1));
4066 case tcc_binary:
4067 return fold_build2_loc (loc, code, type,
4068 eval_subst (loc, TREE_OPERAND (arg, 0),
4069 old0, new0, old1, new1),
4070 eval_subst (loc, TREE_OPERAND (arg, 1),
4071 old0, new0, old1, new1));
4073 case tcc_expression:
4074 switch (code)
4076 case SAVE_EXPR:
4077 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4078 old1, new1);
4080 case COMPOUND_EXPR:
4081 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4082 old1, new1);
4084 case COND_EXPR:
4085 return fold_build3_loc (loc, code, type,
4086 eval_subst (loc, TREE_OPERAND (arg, 0),
4087 old0, new0, old1, new1),
4088 eval_subst (loc, TREE_OPERAND (arg, 1),
4089 old0, new0, old1, new1),
4090 eval_subst (loc, TREE_OPERAND (arg, 2),
4091 old0, new0, old1, new1));
4092 default:
4093 break;
4095 /* Fall through - ??? */
4097 case tcc_comparison:
4099 tree arg0 = TREE_OPERAND (arg, 0);
4100 tree arg1 = TREE_OPERAND (arg, 1);
4102 /* We need to check both for exact equality and tree equality. The
4103 former will be true if the operand has a side-effect. In that
4104 case, we know the operand occurred exactly once. */
4106 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4107 arg0 = new0;
4108 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4109 arg0 = new1;
4111 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4112 arg1 = new0;
4113 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4114 arg1 = new1;
4116 return fold_build2_loc (loc, code, type, arg0, arg1);
4119 default:
4120 return arg;
4124 /* Return a tree for the case when the result of an expression is RESULT
4125 converted to TYPE and OMITTED was previously an operand of the expression
4126 but is now not needed (e.g., we folded OMITTED * 0).
4128 If OMITTED has side effects, we must evaluate it. Otherwise, just do
4129 the conversion of RESULT to TYPE. */
4131 tree
4132 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4134 tree t = fold_convert_loc (loc, type, result);
4136 /* If the resulting operand is an empty statement, just return the omitted
4137 statement casted to void. */
4138 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4139 return build1_loc (loc, NOP_EXPR, void_type_node,
4140 fold_ignored_result (omitted));
4142 if (TREE_SIDE_EFFECTS (omitted))
4143 return build2_loc (loc, COMPOUND_EXPR, type,
4144 fold_ignored_result (omitted), t);
4146 return non_lvalue_loc (loc, t);
4149 /* Return a tree for the case when the result of an expression is RESULT
4150 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4151 of the expression but are now not needed.
4153 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4154 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4155 evaluated before OMITTED2. Otherwise, if neither has side effects,
4156 just do the conversion of RESULT to TYPE. */
4158 tree
4159 omit_two_operands_loc (location_t loc, tree type, tree result,
4160 tree omitted1, tree omitted2)
4162 tree t = fold_convert_loc (loc, type, result);
4164 if (TREE_SIDE_EFFECTS (omitted2))
4165 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4166 if (TREE_SIDE_EFFECTS (omitted1))
4167 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4169 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4173 /* Return a simplified tree node for the truth-negation of ARG. This
4174 never alters ARG itself. We assume that ARG is an operation that
4175 returns a truth value (0 or 1).
4177 FIXME: one would think we would fold the result, but it causes
4178 problems with the dominator optimizer. */
4180 static tree
4181 fold_truth_not_expr (location_t loc, tree arg)
4183 tree type = TREE_TYPE (arg);
4184 enum tree_code code = TREE_CODE (arg);
4185 location_t loc1, loc2;
4187 /* If this is a comparison, we can simply invert it, except for
4188 floating-point non-equality comparisons, in which case we just
4189 enclose a TRUTH_NOT_EXPR around what we have. */
4191 if (TREE_CODE_CLASS (code) == tcc_comparison)
4193 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4194 if (FLOAT_TYPE_P (op_type)
4195 && flag_trapping_math
4196 && code != ORDERED_EXPR && code != UNORDERED_EXPR
4197 && code != NE_EXPR && code != EQ_EXPR)
4198 return NULL_TREE;
4200 code = invert_tree_comparison (code, HONOR_NANS (op_type));
4201 if (code == ERROR_MARK)
4202 return NULL_TREE;
4204 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4205 TREE_OPERAND (arg, 1));
4206 if (TREE_NO_WARNING (arg))
4207 TREE_NO_WARNING (ret) = 1;
4208 return ret;
4211 switch (code)
4213 case INTEGER_CST:
4214 return constant_boolean_node (integer_zerop (arg), type);
4216 case TRUTH_AND_EXPR:
4217 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4218 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4219 return build2_loc (loc, TRUTH_OR_EXPR, type,
4220 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4221 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4223 case TRUTH_OR_EXPR:
4224 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4225 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4226 return build2_loc (loc, TRUTH_AND_EXPR, type,
4227 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4228 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4230 case TRUTH_XOR_EXPR:
4231 /* Here we can invert either operand. We invert the first operand
4232 unless the second operand is a TRUTH_NOT_EXPR in which case our
4233 result is the XOR of the first operand with the inside of the
4234 negation of the second operand. */
4236 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4237 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4238 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4239 else
4240 return build2_loc (loc, TRUTH_XOR_EXPR, type,
4241 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4242 TREE_OPERAND (arg, 1));
4244 case TRUTH_ANDIF_EXPR:
4245 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4246 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4247 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4248 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4249 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4251 case TRUTH_ORIF_EXPR:
4252 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4253 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4254 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4255 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4256 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4258 case TRUTH_NOT_EXPR:
4259 return TREE_OPERAND (arg, 0);
4261 case COND_EXPR:
4263 tree arg1 = TREE_OPERAND (arg, 1);
4264 tree arg2 = TREE_OPERAND (arg, 2);
4266 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4267 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4269 /* A COND_EXPR may have a throw as one operand, which
4270 then has void type. Just leave void operands
4271 as they are. */
4272 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4273 VOID_TYPE_P (TREE_TYPE (arg1))
4274 ? arg1 : invert_truthvalue_loc (loc1, arg1),
4275 VOID_TYPE_P (TREE_TYPE (arg2))
4276 ? arg2 : invert_truthvalue_loc (loc2, arg2));
4279 case COMPOUND_EXPR:
4280 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4281 return build2_loc (loc, COMPOUND_EXPR, type,
4282 TREE_OPERAND (arg, 0),
4283 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4285 case NON_LVALUE_EXPR:
4286 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4287 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4289 CASE_CONVERT:
4290 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4291 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4293 /* fall through */
4295 case FLOAT_EXPR:
4296 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4297 return build1_loc (loc, TREE_CODE (arg), type,
4298 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4300 case BIT_AND_EXPR:
4301 if (!integer_onep (TREE_OPERAND (arg, 1)))
4302 return NULL_TREE;
4303 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4305 case SAVE_EXPR:
4306 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4308 case CLEANUP_POINT_EXPR:
4309 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4310 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4311 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4313 default:
4314 return NULL_TREE;
4318 /* Fold the truth-negation of ARG. This never alters ARG itself. We
4319 assume that ARG is an operation that returns a truth value (0 or 1
4320 for scalars, 0 or -1 for vectors). Return the folded expression if
4321 folding is successful. Otherwise, return NULL_TREE. */
4323 static tree
4324 fold_invert_truthvalue (location_t loc, tree arg)
4326 tree type = TREE_TYPE (arg);
4327 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4328 ? BIT_NOT_EXPR
4329 : TRUTH_NOT_EXPR,
4330 type, arg);
4333 /* Return a simplified tree node for the truth-negation of ARG. This
4334 never alters ARG itself. We assume that ARG is an operation that
4335 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
4337 tree
4338 invert_truthvalue_loc (location_t loc, tree arg)
4340 if (TREE_CODE (arg) == ERROR_MARK)
4341 return arg;
4343 tree type = TREE_TYPE (arg);
4344 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4345 ? BIT_NOT_EXPR
4346 : TRUTH_NOT_EXPR,
4347 type, arg);
4350 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4351 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
4352 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
4353 is the original memory reference used to preserve the alias set of
4354 the access. */
4356 static tree
4357 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4358 HOST_WIDE_INT bitsize, poly_int64 bitpos,
4359 int unsignedp, int reversep)
4361 tree result, bftype;
4363 /* Attempt not to lose the access path if possible. */
4364 if (TREE_CODE (orig_inner) == COMPONENT_REF)
4366 tree ninner = TREE_OPERAND (orig_inner, 0);
4367 machine_mode nmode;
4368 poly_int64 nbitsize, nbitpos;
4369 tree noffset;
4370 int nunsignedp, nreversep, nvolatilep = 0;
4371 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4372 &noffset, &nmode, &nunsignedp,
4373 &nreversep, &nvolatilep);
4374 if (base == inner
4375 && noffset == NULL_TREE
4376 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4377 && !reversep
4378 && !nreversep
4379 && !nvolatilep)
4381 inner = ninner;
4382 bitpos -= nbitpos;
4386 alias_set_type iset = get_alias_set (orig_inner);
4387 if (iset == 0 && get_alias_set (inner) != iset)
4388 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4389 build_fold_addr_expr (inner),
4390 build_int_cst (ptr_type_node, 0));
4392 if (known_eq (bitpos, 0) && !reversep)
4394 tree size = TYPE_SIZE (TREE_TYPE (inner));
4395 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4396 || POINTER_TYPE_P (TREE_TYPE (inner)))
4397 && tree_fits_shwi_p (size)
4398 && tree_to_shwi (size) == bitsize)
4399 return fold_convert_loc (loc, type, inner);
4402 bftype = type;
4403 if (TYPE_PRECISION (bftype) != bitsize
4404 || TYPE_UNSIGNED (bftype) == !unsignedp)
4405 bftype = build_nonstandard_integer_type (bitsize, 0);
4407 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4408 bitsize_int (bitsize), bitsize_int (bitpos));
4409 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4411 if (bftype != type)
4412 result = fold_convert_loc (loc, type, result);
4414 return result;
4417 /* Optimize a bit-field compare.
4419 There are two cases: First is a compare against a constant and the
4420 second is a comparison of two items where the fields are at the same
4421 bit position relative to the start of a chunk (byte, halfword, word)
4422 large enough to contain it. In these cases we can avoid the shift
4423 implicit in bitfield extractions.
4425 For constants, we emit a compare of the shifted constant with the
4426 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4427 compared. For two fields at the same position, we do the ANDs with the
4428 similar mask and compare the result of the ANDs.
4430 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4431 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4432 are the left and right operands of the comparison, respectively.
4434 If the optimization described above can be done, we return the resulting
4435 tree. Otherwise we return zero. */
4437 static tree
4438 optimize_bit_field_compare (location_t loc, enum tree_code code,
4439 tree compare_type, tree lhs, tree rhs)
4441 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4442 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4443 tree type = TREE_TYPE (lhs);
4444 tree unsigned_type;
4445 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4446 machine_mode lmode, rmode;
4447 scalar_int_mode nmode;
4448 int lunsignedp, runsignedp;
4449 int lreversep, rreversep;
4450 int lvolatilep = 0, rvolatilep = 0;
4451 tree linner, rinner = NULL_TREE;
4452 tree mask;
4453 tree offset;
4455 /* Get all the information about the extractions being done. If the bit size
4456 is the same as the size of the underlying object, we aren't doing an
4457 extraction at all and so can do nothing. We also don't want to
4458 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4459 then will no longer be able to replace it. */
4460 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4461 &lunsignedp, &lreversep, &lvolatilep);
4462 if (linner == lhs
4463 || !known_size_p (plbitsize)
4464 || !plbitsize.is_constant (&lbitsize)
4465 || !plbitpos.is_constant (&lbitpos)
4466 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4467 || offset != 0
4468 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4469 || lvolatilep)
4470 return 0;
4472 if (const_p)
4473 rreversep = lreversep;
4474 else
4476 /* If this is not a constant, we can only do something if bit positions,
4477 sizes, signedness and storage order are the same. */
4478 rinner
4479 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4480 &runsignedp, &rreversep, &rvolatilep);
4482 if (rinner == rhs
4483 || maybe_ne (lbitpos, rbitpos)
4484 || maybe_ne (lbitsize, rbitsize)
4485 || lunsignedp != runsignedp
4486 || lreversep != rreversep
4487 || offset != 0
4488 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4489 || rvolatilep)
4490 return 0;
4493 /* Honor the C++ memory model and mimic what RTL expansion does. */
4494 poly_uint64 bitstart = 0;
4495 poly_uint64 bitend = 0;
4496 if (TREE_CODE (lhs) == COMPONENT_REF)
4498 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4499 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4500 return 0;
4503 /* See if we can find a mode to refer to this field. We should be able to,
4504 but fail if we can't. */
4505 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4506 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4507 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4508 TYPE_ALIGN (TREE_TYPE (rinner))),
4509 BITS_PER_WORD, false, &nmode))
4510 return 0;
4512 /* Set signed and unsigned types of the precision of this mode for the
4513 shifts below. */
4514 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4516 /* Compute the bit position and size for the new reference and our offset
4517 within it. If the new reference is the same size as the original, we
4518 won't optimize anything, so return zero. */
4519 nbitsize = GET_MODE_BITSIZE (nmode);
4520 nbitpos = lbitpos & ~ (nbitsize - 1);
4521 lbitpos -= nbitpos;
4522 if (nbitsize == lbitsize)
4523 return 0;
4525 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4526 lbitpos = nbitsize - lbitsize - lbitpos;
4528 /* Make the mask to be used against the extracted field. */
4529 mask = build_int_cst_type (unsigned_type, -1);
4530 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4531 mask = const_binop (RSHIFT_EXPR, mask,
4532 size_int (nbitsize - lbitsize - lbitpos));
4534 if (! const_p)
4536 if (nbitpos < 0)
4537 return 0;
4539 /* If not comparing with constant, just rework the comparison
4540 and return. */
4541 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4542 nbitsize, nbitpos, 1, lreversep);
4543 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4544 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4545 nbitsize, nbitpos, 1, rreversep);
4546 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4547 return fold_build2_loc (loc, code, compare_type, t1, t2);
4550 /* Otherwise, we are handling the constant case. See if the constant is too
4551 big for the field. Warn and return a tree for 0 (false) if so. We do
4552 this not only for its own sake, but to avoid having to test for this
4553 error case below. If we didn't, we might generate wrong code.
4555 For unsigned fields, the constant shifted right by the field length should
4556 be all zero. For signed fields, the high-order bits should agree with
4557 the sign bit. */
4559 if (lunsignedp)
4561 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4563 warning (0, "comparison is always %d due to width of bit-field",
4564 code == NE_EXPR);
4565 return constant_boolean_node (code == NE_EXPR, compare_type);
4568 else
4570 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4571 if (tem != 0 && tem != -1)
4573 warning (0, "comparison is always %d due to width of bit-field",
4574 code == NE_EXPR);
4575 return constant_boolean_node (code == NE_EXPR, compare_type);
4579 if (nbitpos < 0)
4580 return 0;
4582 /* Single-bit compares should always be against zero. */
4583 if (lbitsize == 1 && ! integer_zerop (rhs))
4585 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4586 rhs = build_int_cst (type, 0);
4589 /* Make a new bitfield reference, shift the constant over the
4590 appropriate number of bits and mask it with the computed mask
4591 (in case this was a signed field). If we changed it, make a new one. */
4592 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4593 nbitsize, nbitpos, 1, lreversep);
4595 rhs = const_binop (BIT_AND_EXPR,
4596 const_binop (LSHIFT_EXPR,
4597 fold_convert_loc (loc, unsigned_type, rhs),
4598 size_int (lbitpos)),
4599 mask);
4601 lhs = build2_loc (loc, code, compare_type,
4602 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4603 return lhs;
4606 /* Subroutine for fold_truth_andor_1: decode a field reference.
4608 If EXP is a comparison reference, we return the innermost reference.
4610 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4611 set to the starting bit number.
4613 If the innermost field can be completely contained in a mode-sized
4614 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4616 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4617 otherwise it is not changed.
4619 *PUNSIGNEDP is set to the signedness of the field.
4621 *PREVERSEP is set to the storage order of the field.
4623 *PMASK is set to the mask used. This is either contained in a
4624 BIT_AND_EXPR or derived from the width of the field.
4626 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4628 Return 0 if this is not a component reference or is one that we can't
4629 do anything with. */
4631 static tree
4632 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4633 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4634 int *punsignedp, int *preversep, int *pvolatilep,
4635 tree *pmask, tree *pand_mask)
4637 tree exp = *exp_;
4638 tree outer_type = 0;
4639 tree and_mask = 0;
4640 tree mask, inner, offset;
4641 tree unsigned_type;
4642 unsigned int precision;
4644 /* All the optimizations using this function assume integer fields.
4645 There are problems with FP fields since the type_for_size call
4646 below can fail for, e.g., XFmode. */
4647 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4648 return NULL_TREE;
4650 /* We are interested in the bare arrangement of bits, so strip everything
4651 that doesn't affect the machine mode. However, record the type of the
4652 outermost expression if it may matter below. */
4653 if (CONVERT_EXPR_P (exp)
4654 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4655 outer_type = TREE_TYPE (exp);
4656 STRIP_NOPS (exp);
4658 if (TREE_CODE (exp) == BIT_AND_EXPR)
4660 and_mask = TREE_OPERAND (exp, 1);
4661 exp = TREE_OPERAND (exp, 0);
4662 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4663 if (TREE_CODE (and_mask) != INTEGER_CST)
4664 return NULL_TREE;
4667 poly_int64 poly_bitsize, poly_bitpos;
4668 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4669 pmode, punsignedp, preversep, pvolatilep);
4670 if ((inner == exp && and_mask == 0)
4671 || !poly_bitsize.is_constant (pbitsize)
4672 || !poly_bitpos.is_constant (pbitpos)
4673 || *pbitsize < 0
4674 || offset != 0
4675 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4676 /* Reject out-of-bound accesses (PR79731). */
4677 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4678 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4679 *pbitpos + *pbitsize) < 0))
4680 return NULL_TREE;
4682 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4683 if (unsigned_type == NULL_TREE)
4684 return NULL_TREE;
4686 *exp_ = exp;
4688 /* If the number of bits in the reference is the same as the bitsize of
4689 the outer type, then the outer type gives the signedness. Otherwise
4690 (in case of a small bitfield) the signedness is unchanged. */
4691 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4692 *punsignedp = TYPE_UNSIGNED (outer_type);
4694 /* Compute the mask to access the bitfield. */
4695 precision = TYPE_PRECISION (unsigned_type);
4697 mask = build_int_cst_type (unsigned_type, -1);
4699 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4700 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4702 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4703 if (and_mask != 0)
4704 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4705 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4707 *pmask = mask;
4708 *pand_mask = and_mask;
4709 return inner;
4712 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4713 bit positions and MASK is SIGNED. */
4715 static bool
4716 all_ones_mask_p (const_tree mask, unsigned int size)
4718 tree type = TREE_TYPE (mask);
4719 unsigned int precision = TYPE_PRECISION (type);
4721 /* If this function returns true when the type of the mask is
4722 UNSIGNED, then there will be errors. In particular see
4723 gcc.c-torture/execute/990326-1.c. There does not appear to be
4724 any documentation paper trail as to why this is so. But the pre
4725 wide-int worked with that restriction and it has been preserved
4726 here. */
4727 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4728 return false;
4730 return wi::mask (size, false, precision) == wi::to_wide (mask);
4733 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4734 represents the sign bit of EXP's type. If EXP represents a sign
4735 or zero extension, also test VAL against the unextended type.
4736 The return value is the (sub)expression whose sign bit is VAL,
4737 or NULL_TREE otherwise. */
4739 tree
4740 sign_bit_p (tree exp, const_tree val)
4742 int width;
4743 tree t;
4745 /* Tree EXP must have an integral type. */
4746 t = TREE_TYPE (exp);
4747 if (! INTEGRAL_TYPE_P (t))
4748 return NULL_TREE;
4750 /* Tree VAL must be an integer constant. */
4751 if (TREE_CODE (val) != INTEGER_CST
4752 || TREE_OVERFLOW (val))
4753 return NULL_TREE;
4755 width = TYPE_PRECISION (t);
4756 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4757 return exp;
4759 /* Handle extension from a narrower type. */
4760 if (TREE_CODE (exp) == NOP_EXPR
4761 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4762 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4764 return NULL_TREE;
4767 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4768 to be evaluated unconditionally. */
4770 static bool
4771 simple_operand_p (const_tree exp)
4773 /* Strip any conversions that don't change the machine mode. */
4774 STRIP_NOPS (exp);
4776 return (CONSTANT_CLASS_P (exp)
4777 || TREE_CODE (exp) == SSA_NAME
4778 || (DECL_P (exp)
4779 && ! TREE_ADDRESSABLE (exp)
4780 && ! TREE_THIS_VOLATILE (exp)
4781 && ! DECL_NONLOCAL (exp)
4782 /* Don't regard global variables as simple. They may be
4783 allocated in ways unknown to the compiler (shared memory,
4784 #pragma weak, etc). */
4785 && ! TREE_PUBLIC (exp)
4786 && ! DECL_EXTERNAL (exp)
4787 /* Weakrefs are not safe to be read, since they can be NULL.
4788 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4789 have DECL_WEAK flag set. */
4790 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4791 /* Loading a static variable is unduly expensive, but global
4792 registers aren't expensive. */
4793 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4796 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4797 to be evaluated unconditionally.
4798 I addition to simple_operand_p, we assume that comparisons, conversions,
4799 and logic-not operations are simple, if their operands are simple, too. */
4801 static bool
4802 simple_operand_p_2 (tree exp)
4804 enum tree_code code;
4806 if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4807 return false;
4809 while (CONVERT_EXPR_P (exp))
4810 exp = TREE_OPERAND (exp, 0);
4812 code = TREE_CODE (exp);
4814 if (TREE_CODE_CLASS (code) == tcc_comparison)
4815 return (simple_operand_p (TREE_OPERAND (exp, 0))
4816 && simple_operand_p (TREE_OPERAND (exp, 1)));
4818 if (code == TRUTH_NOT_EXPR)
4819 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4821 return simple_operand_p (exp);
4825 /* The following functions are subroutines to fold_range_test and allow it to
4826 try to change a logical combination of comparisons into a range test.
4828 For example, both
4829 X == 2 || X == 3 || X == 4 || X == 5
4831 X >= 2 && X <= 5
4832 are converted to
4833 (unsigned) (X - 2) <= 3
4835 We describe each set of comparisons as being either inside or outside
4836 a range, using a variable named like IN_P, and then describe the
4837 range with a lower and upper bound. If one of the bounds is omitted,
4838 it represents either the highest or lowest value of the type.
4840 In the comments below, we represent a range by two numbers in brackets
4841 preceded by a "+" to designate being inside that range, or a "-" to
4842 designate being outside that range, so the condition can be inverted by
4843 flipping the prefix. An omitted bound is represented by a "-". For
4844 example, "- [-, 10]" means being outside the range starting at the lowest
4845 possible value and ending at 10, in other words, being greater than 10.
4846 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4847 always false.
4849 We set up things so that the missing bounds are handled in a consistent
4850 manner so neither a missing bound nor "true" and "false" need to be
4851 handled using a special case. */
4853 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4854 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4855 and UPPER1_P are nonzero if the respective argument is an upper bound
4856 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4857 must be specified for a comparison. ARG1 will be converted to ARG0's
4858 type if both are specified. */
4860 static tree
4861 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4862 tree arg1, int upper1_p)
4864 tree tem;
4865 int result;
4866 int sgn0, sgn1;
4868 /* If neither arg represents infinity, do the normal operation.
4869 Else, if not a comparison, return infinity. Else handle the special
4870 comparison rules. Note that most of the cases below won't occur, but
4871 are handled for consistency. */
4873 if (arg0 != 0 && arg1 != 0)
4875 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4876 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4877 STRIP_NOPS (tem);
4878 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4881 if (TREE_CODE_CLASS (code) != tcc_comparison)
4882 return 0;
4884 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4885 for neither. In real maths, we cannot assume open ended ranges are
4886 the same. But, this is computer arithmetic, where numbers are finite.
4887 We can therefore make the transformation of any unbounded range with
4888 the value Z, Z being greater than any representable number. This permits
4889 us to treat unbounded ranges as equal. */
4890 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4891 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4892 switch (code)
4894 case EQ_EXPR:
4895 result = sgn0 == sgn1;
4896 break;
4897 case NE_EXPR:
4898 result = sgn0 != sgn1;
4899 break;
4900 case LT_EXPR:
4901 result = sgn0 < sgn1;
4902 break;
4903 case LE_EXPR:
4904 result = sgn0 <= sgn1;
4905 break;
4906 case GT_EXPR:
4907 result = sgn0 > sgn1;
4908 break;
4909 case GE_EXPR:
4910 result = sgn0 >= sgn1;
4911 break;
4912 default:
4913 gcc_unreachable ();
4916 return constant_boolean_node (result, type);
4919 /* Helper routine for make_range. Perform one step for it, return
4920 new expression if the loop should continue or NULL_TREE if it should
4921 stop. */
4923 tree
4924 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4925 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4926 bool *strict_overflow_p)
4928 tree arg0_type = TREE_TYPE (arg0);
4929 tree n_low, n_high, low = *p_low, high = *p_high;
4930 int in_p = *p_in_p, n_in_p;
4932 switch (code)
4934 case TRUTH_NOT_EXPR:
4935 /* We can only do something if the range is testing for zero. */
4936 if (low == NULL_TREE || high == NULL_TREE
4937 || ! integer_zerop (low) || ! integer_zerop (high))
4938 return NULL_TREE;
4939 *p_in_p = ! in_p;
4940 return arg0;
4942 case EQ_EXPR: case NE_EXPR:
4943 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4944 /* We can only do something if the range is testing for zero
4945 and if the second operand is an integer constant. Note that
4946 saying something is "in" the range we make is done by
4947 complementing IN_P since it will set in the initial case of
4948 being not equal to zero; "out" is leaving it alone. */
4949 if (low == NULL_TREE || high == NULL_TREE
4950 || ! integer_zerop (low) || ! integer_zerop (high)
4951 || TREE_CODE (arg1) != INTEGER_CST)
4952 return NULL_TREE;
4954 switch (code)
4956 case NE_EXPR: /* - [c, c] */
4957 low = high = arg1;
4958 break;
4959 case EQ_EXPR: /* + [c, c] */
4960 in_p = ! in_p, low = high = arg1;
4961 break;
4962 case GT_EXPR: /* - [-, c] */
4963 low = 0, high = arg1;
4964 break;
4965 case GE_EXPR: /* + [c, -] */
4966 in_p = ! in_p, low = arg1, high = 0;
4967 break;
4968 case LT_EXPR: /* - [c, -] */
4969 low = arg1, high = 0;
4970 break;
4971 case LE_EXPR: /* + [-, c] */
4972 in_p = ! in_p, low = 0, high = arg1;
4973 break;
4974 default:
4975 gcc_unreachable ();
4978 /* If this is an unsigned comparison, we also know that EXP is
4979 greater than or equal to zero. We base the range tests we make
4980 on that fact, so we record it here so we can parse existing
4981 range tests. We test arg0_type since often the return type
4982 of, e.g. EQ_EXPR, is boolean. */
4983 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4985 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4986 in_p, low, high, 1,
4987 build_int_cst (arg0_type, 0),
4988 NULL_TREE))
4989 return NULL_TREE;
4991 in_p = n_in_p, low = n_low, high = n_high;
4993 /* If the high bound is missing, but we have a nonzero low
4994 bound, reverse the range so it goes from zero to the low bound
4995 minus 1. */
4996 if (high == 0 && low && ! integer_zerop (low))
4998 in_p = ! in_p;
4999 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5000 build_int_cst (TREE_TYPE (low), 1), 0);
5001 low = build_int_cst (arg0_type, 0);
5005 *p_low = low;
5006 *p_high = high;
5007 *p_in_p = in_p;
5008 return arg0;
5010 case NEGATE_EXPR:
5011 /* If flag_wrapv and ARG0_TYPE is signed, make sure
5012 low and high are non-NULL, then normalize will DTRT. */
5013 if (!TYPE_UNSIGNED (arg0_type)
5014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5016 if (low == NULL_TREE)
5017 low = TYPE_MIN_VALUE (arg0_type);
5018 if (high == NULL_TREE)
5019 high = TYPE_MAX_VALUE (arg0_type);
5022 /* (-x) IN [a,b] -> x in [-b, -a] */
5023 n_low = range_binop (MINUS_EXPR, exp_type,
5024 build_int_cst (exp_type, 0),
5025 0, high, 1);
5026 n_high = range_binop (MINUS_EXPR, exp_type,
5027 build_int_cst (exp_type, 0),
5028 0, low, 0);
5029 if (n_high != 0 && TREE_OVERFLOW (n_high))
5030 return NULL_TREE;
5031 goto normalize;
5033 case BIT_NOT_EXPR:
5034 /* ~ X -> -X - 1 */
5035 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5036 build_int_cst (exp_type, 1));
5038 case PLUS_EXPR:
5039 case MINUS_EXPR:
5040 if (TREE_CODE (arg1) != INTEGER_CST)
5041 return NULL_TREE;
5043 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5044 move a constant to the other side. */
5045 if (!TYPE_UNSIGNED (arg0_type)
5046 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5047 return NULL_TREE;
5049 /* If EXP is signed, any overflow in the computation is undefined,
5050 so we don't worry about it so long as our computations on
5051 the bounds don't overflow. For unsigned, overflow is defined
5052 and this is exactly the right thing. */
5053 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5054 arg0_type, low, 0, arg1, 0);
5055 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5056 arg0_type, high, 1, arg1, 0);
5057 if ((n_low != 0 && TREE_OVERFLOW (n_low))
5058 || (n_high != 0 && TREE_OVERFLOW (n_high)))
5059 return NULL_TREE;
5061 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5062 *strict_overflow_p = true;
5064 normalize:
5065 /* Check for an unsigned range which has wrapped around the maximum
5066 value thus making n_high < n_low, and normalize it. */
5067 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5069 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5070 build_int_cst (TREE_TYPE (n_high), 1), 0);
5071 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5072 build_int_cst (TREE_TYPE (n_low), 1), 0);
5074 /* If the range is of the form +/- [ x+1, x ], we won't
5075 be able to normalize it. But then, it represents the
5076 whole range or the empty set, so make it
5077 +/- [ -, - ]. */
5078 if (tree_int_cst_equal (n_low, low)
5079 && tree_int_cst_equal (n_high, high))
5080 low = high = 0;
5081 else
5082 in_p = ! in_p;
5084 else
5085 low = n_low, high = n_high;
5087 *p_low = low;
5088 *p_high = high;
5089 *p_in_p = in_p;
5090 return arg0;
5092 CASE_CONVERT:
5093 case NON_LVALUE_EXPR:
5094 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5095 return NULL_TREE;
5097 if (! INTEGRAL_TYPE_P (arg0_type)
5098 || (low != 0 && ! int_fits_type_p (low, arg0_type))
5099 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5100 return NULL_TREE;
5102 n_low = low, n_high = high;
5104 if (n_low != 0)
5105 n_low = fold_convert_loc (loc, arg0_type, n_low);
5107 if (n_high != 0)
5108 n_high = fold_convert_loc (loc, arg0_type, n_high);
5110 /* If we're converting arg0 from an unsigned type, to exp,
5111 a signed type, we will be doing the comparison as unsigned.
5112 The tests above have already verified that LOW and HIGH
5113 are both positive.
5115 So we have to ensure that we will handle large unsigned
5116 values the same way that the current signed bounds treat
5117 negative values. */
5119 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5121 tree high_positive;
5122 tree equiv_type;
5123 /* For fixed-point modes, we need to pass the saturating flag
5124 as the 2nd parameter. */
5125 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5126 equiv_type
5127 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5128 TYPE_SATURATING (arg0_type));
5129 else
5130 equiv_type
5131 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5133 /* A range without an upper bound is, naturally, unbounded.
5134 Since convert would have cropped a very large value, use
5135 the max value for the destination type. */
5136 high_positive
5137 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5138 : TYPE_MAX_VALUE (arg0_type);
5140 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5141 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5142 fold_convert_loc (loc, arg0_type,
5143 high_positive),
5144 build_int_cst (arg0_type, 1));
5146 /* If the low bound is specified, "and" the range with the
5147 range for which the original unsigned value will be
5148 positive. */
5149 if (low != 0)
5151 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5152 1, fold_convert_loc (loc, arg0_type,
5153 integer_zero_node),
5154 high_positive))
5155 return NULL_TREE;
5157 in_p = (n_in_p == in_p);
5159 else
5161 /* Otherwise, "or" the range with the range of the input
5162 that will be interpreted as negative. */
5163 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5164 1, fold_convert_loc (loc, arg0_type,
5165 integer_zero_node),
5166 high_positive))
5167 return NULL_TREE;
5169 in_p = (in_p != n_in_p);
5173 *p_low = n_low;
5174 *p_high = n_high;
5175 *p_in_p = in_p;
5176 return arg0;
5178 default:
5179 return NULL_TREE;
5183 /* Given EXP, a logical expression, set the range it is testing into
5184 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
5185 actually being tested. *PLOW and *PHIGH will be made of the same
5186 type as the returned expression. If EXP is not a comparison, we
5187 will most likely not be returning a useful value and range. Set
5188 *STRICT_OVERFLOW_P to true if the return value is only valid
5189 because signed overflow is undefined; otherwise, do not change
5190 *STRICT_OVERFLOW_P. */
5192 tree
5193 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5194 bool *strict_overflow_p)
5196 enum tree_code code;
5197 tree arg0, arg1 = NULL_TREE;
5198 tree exp_type, nexp;
5199 int in_p;
5200 tree low, high;
5201 location_t loc = EXPR_LOCATION (exp);
5203 /* Start with simply saying "EXP != 0" and then look at the code of EXP
5204 and see if we can refine the range. Some of the cases below may not
5205 happen, but it doesn't seem worth worrying about this. We "continue"
5206 the outer loop when we've changed something; otherwise we "break"
5207 the switch, which will "break" the while. */
5209 in_p = 0;
5210 low = high = build_int_cst (TREE_TYPE (exp), 0);
5212 while (1)
5214 code = TREE_CODE (exp);
5215 exp_type = TREE_TYPE (exp);
5216 arg0 = NULL_TREE;
5218 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5220 if (TREE_OPERAND_LENGTH (exp) > 0)
5221 arg0 = TREE_OPERAND (exp, 0);
5222 if (TREE_CODE_CLASS (code) == tcc_binary
5223 || TREE_CODE_CLASS (code) == tcc_comparison
5224 || (TREE_CODE_CLASS (code) == tcc_expression
5225 && TREE_OPERAND_LENGTH (exp) > 1))
5226 arg1 = TREE_OPERAND (exp, 1);
5228 if (arg0 == NULL_TREE)
5229 break;
5231 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5232 &high, &in_p, strict_overflow_p);
5233 if (nexp == NULL_TREE)
5234 break;
5235 exp = nexp;
5238 /* If EXP is a constant, we can evaluate whether this is true or false. */
5239 if (TREE_CODE (exp) == INTEGER_CST)
5241 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5242 exp, 0, low, 0))
5243 && integer_onep (range_binop (LE_EXPR, integer_type_node,
5244 exp, 1, high, 1)));
5245 low = high = 0;
5246 exp = 0;
5249 *pin_p = in_p, *plow = low, *phigh = high;
5250 return exp;
5253 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5254 a bitwise check i.e. when
5255 LOW == 0xXX...X00...0
5256 HIGH == 0xXX...X11...1
5257 Return corresponding mask in MASK and stem in VALUE. */
5259 static bool
5260 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5261 tree *value)
5263 if (TREE_CODE (low) != INTEGER_CST
5264 || TREE_CODE (high) != INTEGER_CST)
5265 return false;
5267 unsigned prec = TYPE_PRECISION (type);
5268 wide_int lo = wi::to_wide (low, prec);
5269 wide_int hi = wi::to_wide (high, prec);
5271 wide_int end_mask = lo ^ hi;
5272 if ((end_mask & (end_mask + 1)) != 0
5273 || (lo & end_mask) != 0)
5274 return false;
5276 wide_int stem_mask = ~end_mask;
5277 wide_int stem = lo & stem_mask;
5278 if (stem != (hi & stem_mask))
5279 return false;
5281 *mask = wide_int_to_tree (type, stem_mask);
5282 *value = wide_int_to_tree (type, stem);
5284 return true;
5287 /* Helper routine for build_range_check and match.pd. Return the type to
5288 perform the check or NULL if it shouldn't be optimized. */
5290 tree
5291 range_check_type (tree etype)
5293 /* First make sure that arithmetics in this type is valid, then make sure
5294 that it wraps around. */
5295 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5296 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5298 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5300 tree utype, minv, maxv;
5302 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5303 for the type in question, as we rely on this here. */
5304 utype = unsigned_type_for (etype);
5305 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5306 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5307 build_int_cst (TREE_TYPE (maxv), 1), 1);
5308 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5310 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5311 minv, 1, maxv, 1)))
5312 etype = utype;
5313 else
5314 return NULL_TREE;
5316 else if (POINTER_TYPE_P (etype))
5317 etype = unsigned_type_for (etype);
5318 return etype;
5321 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5322 type, TYPE, return an expression to test if EXP is in (or out of, depending
5323 on IN_P) the range. Return 0 if the test couldn't be created. */
5325 tree
5326 build_range_check (location_t loc, tree type, tree exp, int in_p,
5327 tree low, tree high)
5329 tree etype = TREE_TYPE (exp), mask, value;
5331 /* Disable this optimization for function pointer expressions
5332 on targets that require function pointer canonicalization. */
5333 if (targetm.have_canonicalize_funcptr_for_compare ()
5334 && POINTER_TYPE_P (etype)
5335 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5336 return NULL_TREE;
5338 if (! in_p)
5340 value = build_range_check (loc, type, exp, 1, low, high);
5341 if (value != 0)
5342 return invert_truthvalue_loc (loc, value);
5344 return 0;
5347 if (low == 0 && high == 0)
5348 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5350 if (low == 0)
5351 return fold_build2_loc (loc, LE_EXPR, type, exp,
5352 fold_convert_loc (loc, etype, high));
5354 if (high == 0)
5355 return fold_build2_loc (loc, GE_EXPR, type, exp,
5356 fold_convert_loc (loc, etype, low));
5358 if (operand_equal_p (low, high, 0))
5359 return fold_build2_loc (loc, EQ_EXPR, type, exp,
5360 fold_convert_loc (loc, etype, low));
5362 if (TREE_CODE (exp) == BIT_AND_EXPR
5363 && maskable_range_p (low, high, etype, &mask, &value))
5364 return fold_build2_loc (loc, EQ_EXPR, type,
5365 fold_build2_loc (loc, BIT_AND_EXPR, etype,
5366 exp, mask),
5367 value);
5369 if (integer_zerop (low))
5371 if (! TYPE_UNSIGNED (etype))
5373 etype = unsigned_type_for (etype);
5374 high = fold_convert_loc (loc, etype, high);
5375 exp = fold_convert_loc (loc, etype, exp);
5377 return build_range_check (loc, type, exp, 1, 0, high);
5380 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5381 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5383 int prec = TYPE_PRECISION (etype);
5385 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5387 if (TYPE_UNSIGNED (etype))
5389 tree signed_etype = signed_type_for (etype);
5390 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5391 etype
5392 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5393 else
5394 etype = signed_etype;
5395 exp = fold_convert_loc (loc, etype, exp);
5397 return fold_build2_loc (loc, GT_EXPR, type, exp,
5398 build_int_cst (etype, 0));
5402 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5403 This requires wrap-around arithmetics for the type of the expression. */
5404 etype = range_check_type (etype);
5405 if (etype == NULL_TREE)
5406 return NULL_TREE;
5408 high = fold_convert_loc (loc, etype, high);
5409 low = fold_convert_loc (loc, etype, low);
5410 exp = fold_convert_loc (loc, etype, exp);
5412 value = const_binop (MINUS_EXPR, high, low);
5414 if (value != 0 && !TREE_OVERFLOW (value))
5415 return build_range_check (loc, type,
5416 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5417 1, build_int_cst (etype, 0), value);
5419 return 0;
5422 /* Return the predecessor of VAL in its type, handling the infinite case. */
5424 static tree
5425 range_predecessor (tree val)
5427 tree type = TREE_TYPE (val);
5429 if (INTEGRAL_TYPE_P (type)
5430 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5431 return 0;
5432 else
5433 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5434 build_int_cst (TREE_TYPE (val), 1), 0);
5437 /* Return the successor of VAL in its type, handling the infinite case. */
5439 static tree
5440 range_successor (tree val)
5442 tree type = TREE_TYPE (val);
5444 if (INTEGRAL_TYPE_P (type)
5445 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5446 return 0;
5447 else
5448 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5449 build_int_cst (TREE_TYPE (val), 1), 0);
5452 /* Given two ranges, see if we can merge them into one. Return 1 if we
5453 can, 0 if we can't. Set the output range into the specified parameters. */
5455 bool
5456 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5457 tree high0, int in1_p, tree low1, tree high1)
5459 int no_overlap;
5460 int subset;
5461 int temp;
5462 tree tem;
5463 int in_p;
5464 tree low, high;
5465 int lowequal = ((low0 == 0 && low1 == 0)
5466 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5467 low0, 0, low1, 0)));
5468 int highequal = ((high0 == 0 && high1 == 0)
5469 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5470 high0, 1, high1, 1)));
5472 /* Make range 0 be the range that starts first, or ends last if they
5473 start at the same value. Swap them if it isn't. */
5474 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5475 low0, 0, low1, 0))
5476 || (lowequal
5477 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5478 high1, 1, high0, 1))))
5480 temp = in0_p, in0_p = in1_p, in1_p = temp;
5481 tem = low0, low0 = low1, low1 = tem;
5482 tem = high0, high0 = high1, high1 = tem;
5485 /* If the second range is != high1 where high1 is the type maximum of
5486 the type, try first merging with < high1 range. */
5487 if (low1
5488 && high1
5489 && TREE_CODE (low1) == INTEGER_CST
5490 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5491 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5492 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5493 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5494 && operand_equal_p (low1, high1, 0))
5496 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5497 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5498 !in1_p, NULL_TREE, range_predecessor (low1)))
5499 return true;
5500 /* Similarly for the second range != low1 where low1 is the type minimum
5501 of the type, try first merging with > low1 range. */
5502 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5503 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5504 !in1_p, range_successor (low1), NULL_TREE))
5505 return true;
5508 /* Now flag two cases, whether the ranges are disjoint or whether the
5509 second range is totally subsumed in the first. Note that the tests
5510 below are simplified by the ones above. */
5511 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5512 high0, 1, low1, 0));
5513 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5514 high1, 1, high0, 1));
5516 /* We now have four cases, depending on whether we are including or
5517 excluding the two ranges. */
5518 if (in0_p && in1_p)
5520 /* If they don't overlap, the result is false. If the second range
5521 is a subset it is the result. Otherwise, the range is from the start
5522 of the second to the end of the first. */
5523 if (no_overlap)
5524 in_p = 0, low = high = 0;
5525 else if (subset)
5526 in_p = 1, low = low1, high = high1;
5527 else
5528 in_p = 1, low = low1, high = high0;
5531 else if (in0_p && ! in1_p)
5533 /* If they don't overlap, the result is the first range. If they are
5534 equal, the result is false. If the second range is a subset of the
5535 first, and the ranges begin at the same place, we go from just after
5536 the end of the second range to the end of the first. If the second
5537 range is not a subset of the first, or if it is a subset and both
5538 ranges end at the same place, the range starts at the start of the
5539 first range and ends just before the second range.
5540 Otherwise, we can't describe this as a single range. */
5541 if (no_overlap)
5542 in_p = 1, low = low0, high = high0;
5543 else if (lowequal && highequal)
5544 in_p = 0, low = high = 0;
5545 else if (subset && lowequal)
5547 low = range_successor (high1);
5548 high = high0;
5549 in_p = 1;
5550 if (low == 0)
5552 /* We are in the weird situation where high0 > high1 but
5553 high1 has no successor. Punt. */
5554 return 0;
5557 else if (! subset || highequal)
5559 low = low0;
5560 high = range_predecessor (low1);
5561 in_p = 1;
5562 if (high == 0)
5564 /* low0 < low1 but low1 has no predecessor. Punt. */
5565 return 0;
5568 else
5569 return 0;
5572 else if (! in0_p && in1_p)
5574 /* If they don't overlap, the result is the second range. If the second
5575 is a subset of the first, the result is false. Otherwise,
5576 the range starts just after the first range and ends at the
5577 end of the second. */
5578 if (no_overlap)
5579 in_p = 1, low = low1, high = high1;
5580 else if (subset || highequal)
5581 in_p = 0, low = high = 0;
5582 else
5584 low = range_successor (high0);
5585 high = high1;
5586 in_p = 1;
5587 if (low == 0)
5589 /* high1 > high0 but high0 has no successor. Punt. */
5590 return 0;
5595 else
5597 /* The case where we are excluding both ranges. Here the complex case
5598 is if they don't overlap. In that case, the only time we have a
5599 range is if they are adjacent. If the second is a subset of the
5600 first, the result is the first. Otherwise, the range to exclude
5601 starts at the beginning of the first range and ends at the end of the
5602 second. */
5603 if (no_overlap)
5605 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5606 range_successor (high0),
5607 1, low1, 0)))
5608 in_p = 0, low = low0, high = high1;
5609 else
5611 /* Canonicalize - [min, x] into - [-, x]. */
5612 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5613 switch (TREE_CODE (TREE_TYPE (low0)))
5615 case ENUMERAL_TYPE:
5616 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5617 GET_MODE_BITSIZE
5618 (TYPE_MODE (TREE_TYPE (low0)))))
5619 break;
5620 /* FALLTHROUGH */
5621 case INTEGER_TYPE:
5622 if (tree_int_cst_equal (low0,
5623 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5624 low0 = 0;
5625 break;
5626 case POINTER_TYPE:
5627 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5628 && integer_zerop (low0))
5629 low0 = 0;
5630 break;
5631 default:
5632 break;
5635 /* Canonicalize - [x, max] into - [x, -]. */
5636 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5637 switch (TREE_CODE (TREE_TYPE (high1)))
5639 case ENUMERAL_TYPE:
5640 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5641 GET_MODE_BITSIZE
5642 (TYPE_MODE (TREE_TYPE (high1)))))
5643 break;
5644 /* FALLTHROUGH */
5645 case INTEGER_TYPE:
5646 if (tree_int_cst_equal (high1,
5647 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5648 high1 = 0;
5649 break;
5650 case POINTER_TYPE:
5651 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5652 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5653 high1, 1,
5654 build_int_cst (TREE_TYPE (high1), 1),
5655 1)))
5656 high1 = 0;
5657 break;
5658 default:
5659 break;
5662 /* The ranges might be also adjacent between the maximum and
5663 minimum values of the given type. For
5664 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5665 return + [x + 1, y - 1]. */
5666 if (low0 == 0 && high1 == 0)
5668 low = range_successor (high0);
5669 high = range_predecessor (low1);
5670 if (low == 0 || high == 0)
5671 return 0;
5673 in_p = 1;
5675 else
5676 return 0;
5679 else if (subset)
5680 in_p = 0, low = low0, high = high0;
5681 else
5682 in_p = 0, low = low0, high = high1;
5685 *pin_p = in_p, *plow = low, *phigh = high;
5686 return 1;
5690 /* Subroutine of fold, looking inside expressions of the form
5691 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5692 of the COND_EXPR. This function is being used also to optimize
5693 A op B ? C : A, by reversing the comparison first.
5695 Return a folded expression whose code is not a COND_EXPR
5696 anymore, or NULL_TREE if no folding opportunity is found. */
5698 static tree
5699 fold_cond_expr_with_comparison (location_t loc, tree type,
5700 tree arg0, tree arg1, tree arg2)
5702 enum tree_code comp_code = TREE_CODE (arg0);
5703 tree arg00 = TREE_OPERAND (arg0, 0);
5704 tree arg01 = TREE_OPERAND (arg0, 1);
5705 tree arg1_type = TREE_TYPE (arg1);
5706 tree tem;
5708 STRIP_NOPS (arg1);
5709 STRIP_NOPS (arg2);
5711 /* If we have A op 0 ? A : -A, consider applying the following
5712 transformations:
5714 A == 0? A : -A same as -A
5715 A != 0? A : -A same as A
5716 A >= 0? A : -A same as abs (A)
5717 A > 0? A : -A same as abs (A)
5718 A <= 0? A : -A same as -abs (A)
5719 A < 0? A : -A same as -abs (A)
5721 None of these transformations work for modes with signed
5722 zeros. If A is +/-0, the first two transformations will
5723 change the sign of the result (from +0 to -0, or vice
5724 versa). The last four will fix the sign of the result,
5725 even though the original expressions could be positive or
5726 negative, depending on the sign of A.
5728 Note that all these transformations are correct if A is
5729 NaN, since the two alternatives (A and -A) are also NaNs. */
5730 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5731 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5732 ? real_zerop (arg01)
5733 : integer_zerop (arg01))
5734 && ((TREE_CODE (arg2) == NEGATE_EXPR
5735 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5736 /* In the case that A is of the form X-Y, '-A' (arg2) may
5737 have already been folded to Y-X, check for that. */
5738 || (TREE_CODE (arg1) == MINUS_EXPR
5739 && TREE_CODE (arg2) == MINUS_EXPR
5740 && operand_equal_p (TREE_OPERAND (arg1, 0),
5741 TREE_OPERAND (arg2, 1), 0)
5742 && operand_equal_p (TREE_OPERAND (arg1, 1),
5743 TREE_OPERAND (arg2, 0), 0))))
5744 switch (comp_code)
5746 case EQ_EXPR:
5747 case UNEQ_EXPR:
5748 tem = fold_convert_loc (loc, arg1_type, arg1);
5749 return fold_convert_loc (loc, type, negate_expr (tem));
5750 case NE_EXPR:
5751 case LTGT_EXPR:
5752 return fold_convert_loc (loc, type, arg1);
5753 case UNGE_EXPR:
5754 case UNGT_EXPR:
5755 if (flag_trapping_math)
5756 break;
5757 /* Fall through. */
5758 case GE_EXPR:
5759 case GT_EXPR:
5760 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5761 break;
5762 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5763 return fold_convert_loc (loc, type, tem);
5764 case UNLE_EXPR:
5765 case UNLT_EXPR:
5766 if (flag_trapping_math)
5767 break;
5768 /* FALLTHRU */
5769 case LE_EXPR:
5770 case LT_EXPR:
5771 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5772 break;
5773 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5774 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5776 /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5777 is not, invokes UB both in abs and in the negation of it.
5778 So, use ABSU_EXPR instead. */
5779 tree utype = unsigned_type_for (TREE_TYPE (arg1));
5780 tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5781 tem = negate_expr (tem);
5782 return fold_convert_loc (loc, type, tem);
5784 else
5786 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5787 return negate_expr (fold_convert_loc (loc, type, tem));
5789 default:
5790 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5791 break;
5794 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5795 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5796 both transformations are correct when A is NaN: A != 0
5797 is then true, and A == 0 is false. */
5799 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5800 && integer_zerop (arg01) && integer_zerop (arg2))
5802 if (comp_code == NE_EXPR)
5803 return fold_convert_loc (loc, type, arg1);
5804 else if (comp_code == EQ_EXPR)
5805 return build_zero_cst (type);
5808 /* Try some transformations of A op B ? A : B.
5810 A == B? A : B same as B
5811 A != B? A : B same as A
5812 A >= B? A : B same as max (A, B)
5813 A > B? A : B same as max (B, A)
5814 A <= B? A : B same as min (A, B)
5815 A < B? A : B same as min (B, A)
5817 As above, these transformations don't work in the presence
5818 of signed zeros. For example, if A and B are zeros of
5819 opposite sign, the first two transformations will change
5820 the sign of the result. In the last four, the original
5821 expressions give different results for (A=+0, B=-0) and
5822 (A=-0, B=+0), but the transformed expressions do not.
5824 The first two transformations are correct if either A or B
5825 is a NaN. In the first transformation, the condition will
5826 be false, and B will indeed be chosen. In the case of the
5827 second transformation, the condition A != B will be true,
5828 and A will be chosen.
5830 The conversions to max() and min() are not correct if B is
5831 a number and A is not. The conditions in the original
5832 expressions will be false, so all four give B. The min()
5833 and max() versions would give a NaN instead. */
5834 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5835 && operand_equal_for_comparison_p (arg01, arg2)
5836 /* Avoid these transformations if the COND_EXPR may be used
5837 as an lvalue in the C++ front-end. PR c++/19199. */
5838 && (in_gimple_form
5839 || VECTOR_TYPE_P (type)
5840 || (! lang_GNU_CXX ()
5841 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5842 || ! maybe_lvalue_p (arg1)
5843 || ! maybe_lvalue_p (arg2)))
5845 tree comp_op0 = arg00;
5846 tree comp_op1 = arg01;
5847 tree comp_type = TREE_TYPE (comp_op0);
5849 switch (comp_code)
5851 case EQ_EXPR:
5852 return fold_convert_loc (loc, type, arg2);
5853 case NE_EXPR:
5854 return fold_convert_loc (loc, type, arg1);
5855 case LE_EXPR:
5856 case LT_EXPR:
5857 case UNLE_EXPR:
5858 case UNLT_EXPR:
5859 /* In C++ a ?: expression can be an lvalue, so put the
5860 operand which will be used if they are equal first
5861 so that we can convert this back to the
5862 corresponding COND_EXPR. */
5863 if (!HONOR_NANS (arg1))
5865 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5866 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5867 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5868 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5869 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5870 comp_op1, comp_op0);
5871 return fold_convert_loc (loc, type, tem);
5873 break;
5874 case GE_EXPR:
5875 case GT_EXPR:
5876 case UNGE_EXPR:
5877 case UNGT_EXPR:
5878 if (!HONOR_NANS (arg1))
5880 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5881 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5882 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5883 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5884 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5885 comp_op1, comp_op0);
5886 return fold_convert_loc (loc, type, tem);
5888 break;
5889 case UNEQ_EXPR:
5890 if (!HONOR_NANS (arg1))
5891 return fold_convert_loc (loc, type, arg2);
5892 break;
5893 case LTGT_EXPR:
5894 if (!HONOR_NANS (arg1))
5895 return fold_convert_loc (loc, type, arg1);
5896 break;
5897 default:
5898 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5899 break;
5903 return NULL_TREE;
5908 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5909 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5910 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5911 false) >= 2)
5912 #endif
5914 /* EXP is some logical combination of boolean tests. See if we can
5915 merge it into some range test. Return the new tree if so. */
5917 static tree
5918 fold_range_test (location_t loc, enum tree_code code, tree type,
5919 tree op0, tree op1)
5921 int or_op = (code == TRUTH_ORIF_EXPR
5922 || code == TRUTH_OR_EXPR);
5923 int in0_p, in1_p, in_p;
5924 tree low0, low1, low, high0, high1, high;
5925 bool strict_overflow_p = false;
5926 tree tem, lhs, rhs;
5927 const char * const warnmsg = G_("assuming signed overflow does not occur "
5928 "when simplifying range test");
5930 if (!INTEGRAL_TYPE_P (type))
5931 return 0;
5933 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5934 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5936 /* If this is an OR operation, invert both sides; we will invert
5937 again at the end. */
5938 if (or_op)
5939 in0_p = ! in0_p, in1_p = ! in1_p;
5941 /* If both expressions are the same, if we can merge the ranges, and we
5942 can build the range test, return it or it inverted. If one of the
5943 ranges is always true or always false, consider it to be the same
5944 expression as the other. */
5945 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5946 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5947 in1_p, low1, high1)
5948 && (tem = (build_range_check (loc, type,
5949 lhs != 0 ? lhs
5950 : rhs != 0 ? rhs : integer_zero_node,
5951 in_p, low, high))) != 0)
5953 if (strict_overflow_p)
5954 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5955 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5958 /* On machines where the branch cost is expensive, if this is a
5959 short-circuited branch and the underlying object on both sides
5960 is the same, make a non-short-circuit operation. */
5961 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5962 if (param_logical_op_non_short_circuit != -1)
5963 logical_op_non_short_circuit
5964 = param_logical_op_non_short_circuit;
5965 if (logical_op_non_short_circuit
5966 && !flag_sanitize_coverage
5967 && lhs != 0 && rhs != 0
5968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5969 && operand_equal_p (lhs, rhs, 0))
5971 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5972 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5973 which cases we can't do this. */
5974 if (simple_operand_p (lhs))
5975 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5976 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5977 type, op0, op1);
5979 else if (!lang_hooks.decls.global_bindings_p ()
5980 && !CONTAINS_PLACEHOLDER_P (lhs))
5982 tree common = save_expr (lhs);
5984 if ((lhs = build_range_check (loc, type, common,
5985 or_op ? ! in0_p : in0_p,
5986 low0, high0)) != 0
5987 && (rhs = build_range_check (loc, type, common,
5988 or_op ? ! in1_p : in1_p,
5989 low1, high1)) != 0)
5991 if (strict_overflow_p)
5992 fold_overflow_warning (warnmsg,
5993 WARN_STRICT_OVERFLOW_COMPARISON);
5994 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5995 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5996 type, lhs, rhs);
6001 return 0;
6004 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6005 bit value. Arrange things so the extra bits will be set to zero if and
6006 only if C is signed-extended to its full width. If MASK is nonzero,
6007 it is an INTEGER_CST that should be AND'ed with the extra bits. */
6009 static tree
6010 unextend (tree c, int p, int unsignedp, tree mask)
6012 tree type = TREE_TYPE (c);
6013 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6014 tree temp;
6016 if (p == modesize || unsignedp)
6017 return c;
6019 /* We work by getting just the sign bit into the low-order bit, then
6020 into the high-order bit, then sign-extend. We then XOR that value
6021 with C. */
6022 temp = build_int_cst (TREE_TYPE (c),
6023 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6025 /* We must use a signed type in order to get an arithmetic right shift.
6026 However, we must also avoid introducing accidental overflows, so that
6027 a subsequent call to integer_zerop will work. Hence we must
6028 do the type conversion here. At this point, the constant is either
6029 zero or one, and the conversion to a signed type can never overflow.
6030 We could get an overflow if this conversion is done anywhere else. */
6031 if (TYPE_UNSIGNED (type))
6032 temp = fold_convert (signed_type_for (type), temp);
6034 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6035 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6036 if (mask != 0)
6037 temp = const_binop (BIT_AND_EXPR, temp,
6038 fold_convert (TREE_TYPE (c), mask));
6039 /* If necessary, convert the type back to match the type of C. */
6040 if (TYPE_UNSIGNED (type))
6041 temp = fold_convert (type, temp);
6043 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6046 /* For an expression that has the form
6047 (A && B) || ~B
6049 (A || B) && ~B,
6050 we can drop one of the inner expressions and simplify to
6051 A || ~B
6053 A && ~B
6054 LOC is the location of the resulting expression. OP is the inner
6055 logical operation; the left-hand side in the examples above, while CMPOP
6056 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
6057 removing a condition that guards another, as in
6058 (A != NULL && A->...) || A == NULL
6059 which we must not transform. If RHS_ONLY is true, only eliminate the
6060 right-most operand of the inner logical operation. */
6062 static tree
6063 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6064 bool rhs_only)
6066 tree type = TREE_TYPE (cmpop);
6067 enum tree_code code = TREE_CODE (cmpop);
6068 enum tree_code truthop_code = TREE_CODE (op);
6069 tree lhs = TREE_OPERAND (op, 0);
6070 tree rhs = TREE_OPERAND (op, 1);
6071 tree orig_lhs = lhs, orig_rhs = rhs;
6072 enum tree_code rhs_code = TREE_CODE (rhs);
6073 enum tree_code lhs_code = TREE_CODE (lhs);
6074 enum tree_code inv_code;
6076 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6077 return NULL_TREE;
6079 if (TREE_CODE_CLASS (code) != tcc_comparison)
6080 return NULL_TREE;
6082 if (rhs_code == truthop_code)
6084 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6085 if (newrhs != NULL_TREE)
6087 rhs = newrhs;
6088 rhs_code = TREE_CODE (rhs);
6091 if (lhs_code == truthop_code && !rhs_only)
6093 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6094 if (newlhs != NULL_TREE)
6096 lhs = newlhs;
6097 lhs_code = TREE_CODE (lhs);
6101 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6102 if (inv_code == rhs_code
6103 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6104 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6105 return lhs;
6106 if (!rhs_only && inv_code == lhs_code
6107 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6108 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6109 return rhs;
6110 if (rhs != orig_rhs || lhs != orig_lhs)
6111 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6112 lhs, rhs);
6113 return NULL_TREE;
6116 /* Find ways of folding logical expressions of LHS and RHS:
6117 Try to merge two comparisons to the same innermost item.
6118 Look for range tests like "ch >= '0' && ch <= '9'".
6119 Look for combinations of simple terms on machines with expensive branches
6120 and evaluate the RHS unconditionally.
6122 For example, if we have p->a == 2 && p->b == 4 and we can make an
6123 object large enough to span both A and B, we can do this with a comparison
6124 against the object ANDed with the a mask.
6126 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6127 operations to do this with one comparison.
6129 We check for both normal comparisons and the BIT_AND_EXPRs made this by
6130 function and the one above.
6132 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
6133 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6135 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6136 two operands.
6138 We return the simplified tree or 0 if no optimization is possible. */
6140 static tree
6141 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6142 tree lhs, tree rhs)
6144 /* If this is the "or" of two comparisons, we can do something if
6145 the comparisons are NE_EXPR. If this is the "and", we can do something
6146 if the comparisons are EQ_EXPR. I.e.,
6147 (a->b == 2 && a->c == 4) can become (a->new == NEW).
6149 WANTED_CODE is this operation code. For single bit fields, we can
6150 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6151 comparison for one-bit fields. */
6153 enum tree_code wanted_code;
6154 enum tree_code lcode, rcode;
6155 tree ll_arg, lr_arg, rl_arg, rr_arg;
6156 tree ll_inner, lr_inner, rl_inner, rr_inner;
6157 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6158 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6159 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6160 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6161 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6162 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6163 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6164 scalar_int_mode lnmode, rnmode;
6165 tree ll_mask, lr_mask, rl_mask, rr_mask;
6166 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6167 tree l_const, r_const;
6168 tree lntype, rntype, result;
6169 HOST_WIDE_INT first_bit, end_bit;
6170 int volatilep;
6172 /* Start by getting the comparison codes. Fail if anything is volatile.
6173 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6174 it were surrounded with a NE_EXPR. */
6176 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6177 return 0;
6179 lcode = TREE_CODE (lhs);
6180 rcode = TREE_CODE (rhs);
6182 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6184 lhs = build2 (NE_EXPR, truth_type, lhs,
6185 build_int_cst (TREE_TYPE (lhs), 0));
6186 lcode = NE_EXPR;
6189 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6191 rhs = build2 (NE_EXPR, truth_type, rhs,
6192 build_int_cst (TREE_TYPE (rhs), 0));
6193 rcode = NE_EXPR;
6196 if (TREE_CODE_CLASS (lcode) != tcc_comparison
6197 || TREE_CODE_CLASS (rcode) != tcc_comparison)
6198 return 0;
6200 ll_arg = TREE_OPERAND (lhs, 0);
6201 lr_arg = TREE_OPERAND (lhs, 1);
6202 rl_arg = TREE_OPERAND (rhs, 0);
6203 rr_arg = TREE_OPERAND (rhs, 1);
6205 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
6206 if (simple_operand_p (ll_arg)
6207 && simple_operand_p (lr_arg))
6209 if (operand_equal_p (ll_arg, rl_arg, 0)
6210 && operand_equal_p (lr_arg, rr_arg, 0))
6212 result = combine_comparisons (loc, code, lcode, rcode,
6213 truth_type, ll_arg, lr_arg);
6214 if (result)
6215 return result;
6217 else if (operand_equal_p (ll_arg, rr_arg, 0)
6218 && operand_equal_p (lr_arg, rl_arg, 0))
6220 result = combine_comparisons (loc, code, lcode,
6221 swap_tree_comparison (rcode),
6222 truth_type, ll_arg, lr_arg);
6223 if (result)
6224 return result;
6228 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6229 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6231 /* If the RHS can be evaluated unconditionally and its operands are
6232 simple, it wins to evaluate the RHS unconditionally on machines
6233 with expensive branches. In this case, this isn't a comparison
6234 that can be merged. */
6236 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6237 false) >= 2
6238 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6239 && simple_operand_p (rl_arg)
6240 && simple_operand_p (rr_arg))
6242 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
6243 if (code == TRUTH_OR_EXPR
6244 && lcode == NE_EXPR && integer_zerop (lr_arg)
6245 && rcode == NE_EXPR && integer_zerop (rr_arg)
6246 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6247 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6248 return build2_loc (loc, NE_EXPR, truth_type,
6249 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6250 ll_arg, rl_arg),
6251 build_int_cst (TREE_TYPE (ll_arg), 0));
6253 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
6254 if (code == TRUTH_AND_EXPR
6255 && lcode == EQ_EXPR && integer_zerop (lr_arg)
6256 && rcode == EQ_EXPR && integer_zerop (rr_arg)
6257 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6258 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6259 return build2_loc (loc, EQ_EXPR, truth_type,
6260 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6261 ll_arg, rl_arg),
6262 build_int_cst (TREE_TYPE (ll_arg), 0));
6265 /* See if the comparisons can be merged. Then get all the parameters for
6266 each side. */
6268 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6269 || (rcode != EQ_EXPR && rcode != NE_EXPR))
6270 return 0;
6272 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6273 volatilep = 0;
6274 ll_inner = decode_field_reference (loc, &ll_arg,
6275 &ll_bitsize, &ll_bitpos, &ll_mode,
6276 &ll_unsignedp, &ll_reversep, &volatilep,
6277 &ll_mask, &ll_and_mask);
6278 lr_inner = decode_field_reference (loc, &lr_arg,
6279 &lr_bitsize, &lr_bitpos, &lr_mode,
6280 &lr_unsignedp, &lr_reversep, &volatilep,
6281 &lr_mask, &lr_and_mask);
6282 rl_inner = decode_field_reference (loc, &rl_arg,
6283 &rl_bitsize, &rl_bitpos, &rl_mode,
6284 &rl_unsignedp, &rl_reversep, &volatilep,
6285 &rl_mask, &rl_and_mask);
6286 rr_inner = decode_field_reference (loc, &rr_arg,
6287 &rr_bitsize, &rr_bitpos, &rr_mode,
6288 &rr_unsignedp, &rr_reversep, &volatilep,
6289 &rr_mask, &rr_and_mask);
6291 /* It must be true that the inner operation on the lhs of each
6292 comparison must be the same if we are to be able to do anything.
6293 Then see if we have constants. If not, the same must be true for
6294 the rhs's. */
6295 if (volatilep
6296 || ll_reversep != rl_reversep
6297 || ll_inner == 0 || rl_inner == 0
6298 || ! operand_equal_p (ll_inner, rl_inner, 0))
6299 return 0;
6301 if (TREE_CODE (lr_arg) == INTEGER_CST
6302 && TREE_CODE (rr_arg) == INTEGER_CST)
6304 l_const = lr_arg, r_const = rr_arg;
6305 lr_reversep = ll_reversep;
6307 else if (lr_reversep != rr_reversep
6308 || lr_inner == 0 || rr_inner == 0
6309 || ! operand_equal_p (lr_inner, rr_inner, 0))
6310 return 0;
6311 else
6312 l_const = r_const = 0;
6314 /* If either comparison code is not correct for our logical operation,
6315 fail. However, we can convert a one-bit comparison against zero into
6316 the opposite comparison against that bit being set in the field. */
6318 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6319 if (lcode != wanted_code)
6321 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6323 /* Make the left operand unsigned, since we are only interested
6324 in the value of one bit. Otherwise we are doing the wrong
6325 thing below. */
6326 ll_unsignedp = 1;
6327 l_const = ll_mask;
6329 else
6330 return 0;
6333 /* This is analogous to the code for l_const above. */
6334 if (rcode != wanted_code)
6336 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6338 rl_unsignedp = 1;
6339 r_const = rl_mask;
6341 else
6342 return 0;
6345 /* See if we can find a mode that contains both fields being compared on
6346 the left. If we can't, fail. Otherwise, update all constants and masks
6347 to be relative to a field of that size. */
6348 first_bit = MIN (ll_bitpos, rl_bitpos);
6349 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6350 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6351 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6352 volatilep, &lnmode))
6353 return 0;
6355 lnbitsize = GET_MODE_BITSIZE (lnmode);
6356 lnbitpos = first_bit & ~ (lnbitsize - 1);
6357 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6358 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6360 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6362 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6363 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6366 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6367 size_int (xll_bitpos));
6368 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6369 size_int (xrl_bitpos));
6371 if (l_const)
6373 l_const = fold_convert_loc (loc, lntype, l_const);
6374 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6375 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6376 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6377 fold_build1_loc (loc, BIT_NOT_EXPR,
6378 lntype, ll_mask))))
6380 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6382 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6385 if (r_const)
6387 r_const = fold_convert_loc (loc, lntype, r_const);
6388 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6389 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6390 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6391 fold_build1_loc (loc, BIT_NOT_EXPR,
6392 lntype, rl_mask))))
6394 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6396 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6400 /* If the right sides are not constant, do the same for it. Also,
6401 disallow this optimization if a size, signedness or storage order
6402 mismatch occurs between the left and right sides. */
6403 if (l_const == 0)
6405 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6406 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6407 || ll_reversep != lr_reversep
6408 /* Make sure the two fields on the right
6409 correspond to the left without being swapped. */
6410 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6411 return 0;
6413 first_bit = MIN (lr_bitpos, rr_bitpos);
6414 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6415 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6416 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6417 volatilep, &rnmode))
6418 return 0;
6420 rnbitsize = GET_MODE_BITSIZE (rnmode);
6421 rnbitpos = first_bit & ~ (rnbitsize - 1);
6422 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6423 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6425 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6427 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6428 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6431 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6432 rntype, lr_mask),
6433 size_int (xlr_bitpos));
6434 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6435 rntype, rr_mask),
6436 size_int (xrr_bitpos));
6438 /* Make a mask that corresponds to both fields being compared.
6439 Do this for both items being compared. If the operands are the
6440 same size and the bits being compared are in the same position
6441 then we can do this by masking both and comparing the masked
6442 results. */
6443 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6444 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6445 if (lnbitsize == rnbitsize
6446 && xll_bitpos == xlr_bitpos
6447 && lnbitpos >= 0
6448 && rnbitpos >= 0)
6450 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6451 lntype, lnbitsize, lnbitpos,
6452 ll_unsignedp || rl_unsignedp, ll_reversep);
6453 if (! all_ones_mask_p (ll_mask, lnbitsize))
6454 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6456 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6457 rntype, rnbitsize, rnbitpos,
6458 lr_unsignedp || rr_unsignedp, lr_reversep);
6459 if (! all_ones_mask_p (lr_mask, rnbitsize))
6460 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6462 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6465 /* There is still another way we can do something: If both pairs of
6466 fields being compared are adjacent, we may be able to make a wider
6467 field containing them both.
6469 Note that we still must mask the lhs/rhs expressions. Furthermore,
6470 the mask must be shifted to account for the shift done by
6471 make_bit_field_ref. */
6472 if (((ll_bitsize + ll_bitpos == rl_bitpos
6473 && lr_bitsize + lr_bitpos == rr_bitpos)
6474 || (ll_bitpos == rl_bitpos + rl_bitsize
6475 && lr_bitpos == rr_bitpos + rr_bitsize))
6476 && ll_bitpos >= 0
6477 && rl_bitpos >= 0
6478 && lr_bitpos >= 0
6479 && rr_bitpos >= 0)
6481 tree type;
6483 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6484 ll_bitsize + rl_bitsize,
6485 MIN (ll_bitpos, rl_bitpos),
6486 ll_unsignedp, ll_reversep);
6487 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6488 lr_bitsize + rr_bitsize,
6489 MIN (lr_bitpos, rr_bitpos),
6490 lr_unsignedp, lr_reversep);
6492 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6493 size_int (MIN (xll_bitpos, xrl_bitpos)));
6494 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6495 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6497 /* Convert to the smaller type before masking out unwanted bits. */
6498 type = lntype;
6499 if (lntype != rntype)
6501 if (lnbitsize > rnbitsize)
6503 lhs = fold_convert_loc (loc, rntype, lhs);
6504 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6505 type = rntype;
6507 else if (lnbitsize < rnbitsize)
6509 rhs = fold_convert_loc (loc, lntype, rhs);
6510 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6511 type = lntype;
6515 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6516 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6518 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6519 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6521 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6524 return 0;
6527 /* Handle the case of comparisons with constants. If there is something in
6528 common between the masks, those bits of the constants must be the same.
6529 If not, the condition is always false. Test for this to avoid generating
6530 incorrect code below. */
6531 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6532 if (! integer_zerop (result)
6533 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6534 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6536 if (wanted_code == NE_EXPR)
6538 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6539 return constant_boolean_node (true, truth_type);
6541 else
6543 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6544 return constant_boolean_node (false, truth_type);
6548 if (lnbitpos < 0)
6549 return 0;
6551 /* Construct the expression we will return. First get the component
6552 reference we will make. Unless the mask is all ones the width of
6553 that field, perform the mask operation. Then compare with the
6554 merged constant. */
6555 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6556 lntype, lnbitsize, lnbitpos,
6557 ll_unsignedp || rl_unsignedp, ll_reversep);
6559 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6560 if (! all_ones_mask_p (ll_mask, lnbitsize))
6561 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6563 return build2_loc (loc, wanted_code, truth_type, result,
6564 const_binop (BIT_IOR_EXPR, l_const, r_const));
6567 /* T is an integer expression that is being multiplied, divided, or taken a
6568 modulus (CODE says which and what kind of divide or modulus) by a
6569 constant C. See if we can eliminate that operation by folding it with
6570 other operations already in T. WIDE_TYPE, if non-null, is a type that
6571 should be used for the computation if wider than our type.
6573 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6574 (X * 2) + (Y * 4). We must, however, be assured that either the original
6575 expression would not overflow or that overflow is undefined for the type
6576 in the language in question.
6578 If we return a non-null expression, it is an equivalent form of the
6579 original computation, but need not be in the original type.
6581 We set *STRICT_OVERFLOW_P to true if the return values depends on
6582 signed overflow being undefined. Otherwise we do not change
6583 *STRICT_OVERFLOW_P. */
6585 static tree
6586 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6587 bool *strict_overflow_p)
6589 /* To avoid exponential search depth, refuse to allow recursion past
6590 three levels. Beyond that (1) it's highly unlikely that we'll find
6591 something interesting and (2) we've probably processed it before
6592 when we built the inner expression. */
6594 static int depth;
6595 tree ret;
6597 if (depth > 3)
6598 return NULL;
6600 depth++;
6601 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6602 depth--;
6604 return ret;
6607 static tree
6608 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6609 bool *strict_overflow_p)
6611 tree type = TREE_TYPE (t);
6612 enum tree_code tcode = TREE_CODE (t);
6613 tree ctype = (wide_type != 0
6614 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6615 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6616 ? wide_type : type);
6617 tree t1, t2;
6618 int same_p = tcode == code;
6619 tree op0 = NULL_TREE, op1 = NULL_TREE;
6620 bool sub_strict_overflow_p;
6622 /* Don't deal with constants of zero here; they confuse the code below. */
6623 if (integer_zerop (c))
6624 return NULL_TREE;
6626 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6627 op0 = TREE_OPERAND (t, 0);
6629 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6630 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6632 /* Note that we need not handle conditional operations here since fold
6633 already handles those cases. So just do arithmetic here. */
6634 switch (tcode)
6636 case INTEGER_CST:
6637 /* For a constant, we can always simplify if we are a multiply
6638 or (for divide and modulus) if it is a multiple of our constant. */
6639 if (code == MULT_EXPR
6640 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6641 TYPE_SIGN (type)))
6643 tree tem = const_binop (code, fold_convert (ctype, t),
6644 fold_convert (ctype, c));
6645 /* If the multiplication overflowed, we lost information on it.
6646 See PR68142 and PR69845. */
6647 if (TREE_OVERFLOW (tem))
6648 return NULL_TREE;
6649 return tem;
6651 break;
6653 CASE_CONVERT: case NON_LVALUE_EXPR:
6654 /* If op0 is an expression ... */
6655 if ((COMPARISON_CLASS_P (op0)
6656 || UNARY_CLASS_P (op0)
6657 || BINARY_CLASS_P (op0)
6658 || VL_EXP_CLASS_P (op0)
6659 || EXPRESSION_CLASS_P (op0))
6660 /* ... and has wrapping overflow, and its type is smaller
6661 than ctype, then we cannot pass through as widening. */
6662 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6663 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6664 && (TYPE_PRECISION (ctype)
6665 > TYPE_PRECISION (TREE_TYPE (op0))))
6666 /* ... or this is a truncation (t is narrower than op0),
6667 then we cannot pass through this narrowing. */
6668 || (TYPE_PRECISION (type)
6669 < TYPE_PRECISION (TREE_TYPE (op0)))
6670 /* ... or signedness changes for division or modulus,
6671 then we cannot pass through this conversion. */
6672 || (code != MULT_EXPR
6673 && (TYPE_UNSIGNED (ctype)
6674 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6675 /* ... or has undefined overflow while the converted to
6676 type has not, we cannot do the operation in the inner type
6677 as that would introduce undefined overflow. */
6678 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6679 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6680 && !TYPE_OVERFLOW_UNDEFINED (type))))
6681 break;
6683 /* Pass the constant down and see if we can make a simplification. If
6684 we can, replace this expression with the inner simplification for
6685 possible later conversion to our or some other type. */
6686 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6687 && TREE_CODE (t2) == INTEGER_CST
6688 && !TREE_OVERFLOW (t2)
6689 && (t1 = extract_muldiv (op0, t2, code,
6690 code == MULT_EXPR ? ctype : NULL_TREE,
6691 strict_overflow_p)) != 0)
6692 return t1;
6693 break;
6695 case ABS_EXPR:
6696 /* If widening the type changes it from signed to unsigned, then we
6697 must avoid building ABS_EXPR itself as unsigned. */
6698 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6700 tree cstype = (*signed_type_for) (ctype);
6701 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6702 != 0)
6704 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6705 return fold_convert (ctype, t1);
6707 break;
6709 /* If the constant is negative, we cannot simplify this. */
6710 if (tree_int_cst_sgn (c) == -1)
6711 break;
6712 /* FALLTHROUGH */
6713 case NEGATE_EXPR:
6714 /* For division and modulus, type can't be unsigned, as e.g.
6715 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6716 For signed types, even with wrapping overflow, this is fine. */
6717 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6718 break;
6719 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6720 != 0)
6721 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6722 break;
6724 case MIN_EXPR: case MAX_EXPR:
6725 /* If widening the type changes the signedness, then we can't perform
6726 this optimization as that changes the result. */
6727 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6728 break;
6730 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6731 sub_strict_overflow_p = false;
6732 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6733 &sub_strict_overflow_p)) != 0
6734 && (t2 = extract_muldiv (op1, c, code, wide_type,
6735 &sub_strict_overflow_p)) != 0)
6737 if (tree_int_cst_sgn (c) < 0)
6738 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6739 if (sub_strict_overflow_p)
6740 *strict_overflow_p = true;
6741 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6742 fold_convert (ctype, t2));
6744 break;
6746 case LSHIFT_EXPR: case RSHIFT_EXPR:
6747 /* If the second operand is constant, this is a multiplication
6748 or floor division, by a power of two, so we can treat it that
6749 way unless the multiplier or divisor overflows. Signed
6750 left-shift overflow is implementation-defined rather than
6751 undefined in C90, so do not convert signed left shift into
6752 multiplication. */
6753 if (TREE_CODE (op1) == INTEGER_CST
6754 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6755 /* const_binop may not detect overflow correctly,
6756 so check for it explicitly here. */
6757 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6758 wi::to_wide (op1))
6759 && (t1 = fold_convert (ctype,
6760 const_binop (LSHIFT_EXPR, size_one_node,
6761 op1))) != 0
6762 && !TREE_OVERFLOW (t1))
6763 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6764 ? MULT_EXPR : FLOOR_DIV_EXPR,
6765 ctype,
6766 fold_convert (ctype, op0),
6767 t1),
6768 c, code, wide_type, strict_overflow_p);
6769 break;
6771 case PLUS_EXPR: case MINUS_EXPR:
6772 /* See if we can eliminate the operation on both sides. If we can, we
6773 can return a new PLUS or MINUS. If we can't, the only remaining
6774 cases where we can do anything are if the second operand is a
6775 constant. */
6776 sub_strict_overflow_p = false;
6777 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6778 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6779 if (t1 != 0 && t2 != 0
6780 && TYPE_OVERFLOW_WRAPS (ctype)
6781 && (code == MULT_EXPR
6782 /* If not multiplication, we can only do this if both operands
6783 are divisible by c. */
6784 || (multiple_of_p (ctype, op0, c)
6785 && multiple_of_p (ctype, op1, c))))
6787 if (sub_strict_overflow_p)
6788 *strict_overflow_p = true;
6789 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6790 fold_convert (ctype, t2));
6793 /* If this was a subtraction, negate OP1 and set it to be an addition.
6794 This simplifies the logic below. */
6795 if (tcode == MINUS_EXPR)
6797 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6798 /* If OP1 was not easily negatable, the constant may be OP0. */
6799 if (TREE_CODE (op0) == INTEGER_CST)
6801 std::swap (op0, op1);
6802 std::swap (t1, t2);
6806 if (TREE_CODE (op1) != INTEGER_CST)
6807 break;
6809 /* If either OP1 or C are negative, this optimization is not safe for
6810 some of the division and remainder types while for others we need
6811 to change the code. */
6812 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6814 if (code == CEIL_DIV_EXPR)
6815 code = FLOOR_DIV_EXPR;
6816 else if (code == FLOOR_DIV_EXPR)
6817 code = CEIL_DIV_EXPR;
6818 else if (code != MULT_EXPR
6819 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6820 break;
6823 /* If it's a multiply or a division/modulus operation of a multiple
6824 of our constant, do the operation and verify it doesn't overflow. */
6825 if (code == MULT_EXPR
6826 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6827 TYPE_SIGN (type)))
6829 op1 = const_binop (code, fold_convert (ctype, op1),
6830 fold_convert (ctype, c));
6831 /* We allow the constant to overflow with wrapping semantics. */
6832 if (op1 == 0
6833 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6834 break;
6836 else
6837 break;
6839 /* If we have an unsigned type, we cannot widen the operation since it
6840 will change the result if the original computation overflowed. */
6841 if (TYPE_UNSIGNED (ctype) && ctype != type)
6842 break;
6844 /* The last case is if we are a multiply. In that case, we can
6845 apply the distributive law to commute the multiply and addition
6846 if the multiplication of the constants doesn't overflow
6847 and overflow is defined. With undefined overflow
6848 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6849 But fold_plusminus_mult_expr would factor back any power-of-two
6850 value so do not distribute in the first place in this case. */
6851 if (code == MULT_EXPR
6852 && TYPE_OVERFLOW_WRAPS (ctype)
6853 && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6854 return fold_build2 (tcode, ctype,
6855 fold_build2 (code, ctype,
6856 fold_convert (ctype, op0),
6857 fold_convert (ctype, c)),
6858 op1);
6860 break;
6862 case MULT_EXPR:
6863 /* We have a special case here if we are doing something like
6864 (C * 8) % 4 since we know that's zero. */
6865 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6866 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6867 /* If the multiplication can overflow we cannot optimize this. */
6868 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6869 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6870 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6871 TYPE_SIGN (type)))
6873 *strict_overflow_p = true;
6874 return omit_one_operand (type, integer_zero_node, op0);
6877 /* ... fall through ... */
6879 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6880 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6881 /* If we can extract our operation from the LHS, do so and return a
6882 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6883 do something only if the second operand is a constant. */
6884 if (same_p
6885 && TYPE_OVERFLOW_WRAPS (ctype)
6886 && (t1 = extract_muldiv (op0, c, code, wide_type,
6887 strict_overflow_p)) != 0)
6888 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6889 fold_convert (ctype, op1));
6890 else if (tcode == MULT_EXPR && code == MULT_EXPR
6891 && TYPE_OVERFLOW_WRAPS (ctype)
6892 && (t1 = extract_muldiv (op1, c, code, wide_type,
6893 strict_overflow_p)) != 0)
6894 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6895 fold_convert (ctype, t1));
6896 else if (TREE_CODE (op1) != INTEGER_CST)
6897 return 0;
6899 /* If these are the same operation types, we can associate them
6900 assuming no overflow. */
6901 if (tcode == code)
6903 bool overflow_p = false;
6904 wi::overflow_type overflow_mul;
6905 signop sign = TYPE_SIGN (ctype);
6906 unsigned prec = TYPE_PRECISION (ctype);
6907 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6908 wi::to_wide (c, prec),
6909 sign, &overflow_mul);
6910 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6911 if (overflow_mul
6912 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6913 overflow_p = true;
6914 if (!overflow_p)
6915 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6916 wide_int_to_tree (ctype, mul));
6919 /* If these operations "cancel" each other, we have the main
6920 optimizations of this pass, which occur when either constant is a
6921 multiple of the other, in which case we replace this with either an
6922 operation or CODE or TCODE.
6924 If we have an unsigned type, we cannot do this since it will change
6925 the result if the original computation overflowed. */
6926 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6927 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6928 || (tcode == MULT_EXPR
6929 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6930 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6931 && code != MULT_EXPR)))
6933 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6934 TYPE_SIGN (type)))
6936 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6937 *strict_overflow_p = true;
6938 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6939 fold_convert (ctype,
6940 const_binop (TRUNC_DIV_EXPR,
6941 op1, c)));
6943 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6944 TYPE_SIGN (type)))
6946 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6947 *strict_overflow_p = true;
6948 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6949 fold_convert (ctype,
6950 const_binop (TRUNC_DIV_EXPR,
6951 c, op1)));
6954 break;
6956 default:
6957 break;
6960 return 0;
6963 /* Return a node which has the indicated constant VALUE (either 0 or
6964 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6965 and is of the indicated TYPE. */
6967 tree
6968 constant_boolean_node (bool value, tree type)
6970 if (type == integer_type_node)
6971 return value ? integer_one_node : integer_zero_node;
6972 else if (type == boolean_type_node)
6973 return value ? boolean_true_node : boolean_false_node;
6974 else if (TREE_CODE (type) == VECTOR_TYPE)
6975 return build_vector_from_val (type,
6976 build_int_cst (TREE_TYPE (type),
6977 value ? -1 : 0));
6978 else
6979 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6983 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6984 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6985 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6986 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6987 COND is the first argument to CODE; otherwise (as in the example
6988 given here), it is the second argument. TYPE is the type of the
6989 original expression. Return NULL_TREE if no simplification is
6990 possible. */
6992 static tree
6993 fold_binary_op_with_conditional_arg (location_t loc,
6994 enum tree_code code,
6995 tree type, tree op0, tree op1,
6996 tree cond, tree arg, int cond_first_p)
6998 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6999 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7000 tree test, true_value, false_value;
7001 tree lhs = NULL_TREE;
7002 tree rhs = NULL_TREE;
7003 enum tree_code cond_code = COND_EXPR;
7005 /* Do not move possibly trapping operations into the conditional as this
7006 pessimizes code and causes gimplification issues when applied late. */
7007 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7008 ANY_INTEGRAL_TYPE_P (type)
7009 && TYPE_OVERFLOW_TRAPS (type), op1))
7010 return NULL_TREE;
7012 if (TREE_CODE (cond) == COND_EXPR
7013 || TREE_CODE (cond) == VEC_COND_EXPR)
7015 test = TREE_OPERAND (cond, 0);
7016 true_value = TREE_OPERAND (cond, 1);
7017 false_value = TREE_OPERAND (cond, 2);
7018 /* If this operand throws an expression, then it does not make
7019 sense to try to perform a logical or arithmetic operation
7020 involving it. */
7021 if (VOID_TYPE_P (TREE_TYPE (true_value)))
7022 lhs = true_value;
7023 if (VOID_TYPE_P (TREE_TYPE (false_value)))
7024 rhs = false_value;
7026 else if (!(TREE_CODE (type) != VECTOR_TYPE
7027 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7029 tree testtype = TREE_TYPE (cond);
7030 test = cond;
7031 true_value = constant_boolean_node (true, testtype);
7032 false_value = constant_boolean_node (false, testtype);
7034 else
7035 /* Detect the case of mixing vector and scalar types - bail out. */
7036 return NULL_TREE;
7038 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7039 cond_code = VEC_COND_EXPR;
7041 /* This transformation is only worthwhile if we don't have to wrap ARG
7042 in a SAVE_EXPR and the operation can be simplified without recursing
7043 on at least one of the branches once its pushed inside the COND_EXPR. */
7044 if (!TREE_CONSTANT (arg)
7045 && (TREE_SIDE_EFFECTS (arg)
7046 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7047 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7048 return NULL_TREE;
7050 arg = fold_convert_loc (loc, arg_type, arg);
7051 if (lhs == 0)
7053 true_value = fold_convert_loc (loc, cond_type, true_value);
7054 if (cond_first_p)
7055 lhs = fold_build2_loc (loc, code, type, true_value, arg);
7056 else
7057 lhs = fold_build2_loc (loc, code, type, arg, true_value);
7059 if (rhs == 0)
7061 false_value = fold_convert_loc (loc, cond_type, false_value);
7062 if (cond_first_p)
7063 rhs = fold_build2_loc (loc, code, type, false_value, arg);
7064 else
7065 rhs = fold_build2_loc (loc, code, type, arg, false_value);
7068 /* Check that we have simplified at least one of the branches. */
7069 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7070 return NULL_TREE;
7072 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7076 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7078 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7079 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
7080 ADDEND is the same as X.
7082 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7083 and finite. The problematic cases are when X is zero, and its mode
7084 has signed zeros. In the case of rounding towards -infinity,
7085 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
7086 modes, X + 0 is not the same as X because -0 + 0 is 0. */
7088 bool
7089 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7091 if (!real_zerop (addend))
7092 return false;
7094 /* Don't allow the fold with -fsignaling-nans. */
7095 if (HONOR_SNANS (type))
7096 return false;
7098 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
7099 if (!HONOR_SIGNED_ZEROS (type))
7100 return true;
7102 /* There is no case that is safe for all rounding modes. */
7103 if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7104 return false;
7106 /* In a vector or complex, we would need to check the sign of all zeros. */
7107 if (TREE_CODE (addend) == VECTOR_CST)
7108 addend = uniform_vector_p (addend);
7109 if (!addend || TREE_CODE (addend) != REAL_CST)
7110 return false;
7112 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
7113 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7114 negate = !negate;
7116 /* The mode has signed zeros, and we have to honor their sign.
7117 In this situation, there is only one case we can return true for.
7118 X - 0 is the same as X with default rounding. */
7119 return negate;
7122 /* Subroutine of match.pd that optimizes comparisons of a division by
7123 a nonzero integer constant against an integer constant, i.e.
7124 X/C1 op C2.
7126 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7127 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
7129 enum tree_code
7130 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7131 tree *hi, bool *neg_overflow)
7133 tree prod, tmp, type = TREE_TYPE (c1);
7134 signop sign = TYPE_SIGN (type);
7135 wi::overflow_type overflow;
7137 /* We have to do this the hard way to detect unsigned overflow.
7138 prod = int_const_binop (MULT_EXPR, c1, c2); */
7139 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7140 prod = force_fit_type (type, val, -1, overflow);
7141 *neg_overflow = false;
7143 if (sign == UNSIGNED)
7145 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7146 *lo = prod;
7148 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
7149 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7150 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7152 else if (tree_int_cst_sgn (c1) >= 0)
7154 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7155 switch (tree_int_cst_sgn (c2))
7157 case -1:
7158 *neg_overflow = true;
7159 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7160 *hi = prod;
7161 break;
7163 case 0:
7164 *lo = fold_negate_const (tmp, type);
7165 *hi = tmp;
7166 break;
7168 case 1:
7169 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7170 *lo = prod;
7171 break;
7173 default:
7174 gcc_unreachable ();
7177 else
7179 /* A negative divisor reverses the relational operators. */
7180 code = swap_tree_comparison (code);
7182 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7183 switch (tree_int_cst_sgn (c2))
7185 case -1:
7186 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7187 *lo = prod;
7188 break;
7190 case 0:
7191 *hi = fold_negate_const (tmp, type);
7192 *lo = tmp;
7193 break;
7195 case 1:
7196 *neg_overflow = true;
7197 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7198 *hi = prod;
7199 break;
7201 default:
7202 gcc_unreachable ();
7206 if (code != EQ_EXPR && code != NE_EXPR)
7207 return code;
7209 if (TREE_OVERFLOW (*lo)
7210 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7211 *lo = NULL_TREE;
7212 if (TREE_OVERFLOW (*hi)
7213 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7214 *hi = NULL_TREE;
7216 return code;
7220 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7221 equality/inequality test, then return a simplified form of the test
7222 using a sign testing. Otherwise return NULL. TYPE is the desired
7223 result type. */
7225 static tree
7226 fold_single_bit_test_into_sign_test (location_t loc,
7227 enum tree_code code, tree arg0, tree arg1,
7228 tree result_type)
7230 /* If this is testing a single bit, we can optimize the test. */
7231 if ((code == NE_EXPR || code == EQ_EXPR)
7232 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7233 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7235 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7236 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7237 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7239 if (arg00 != NULL_TREE
7240 /* This is only a win if casting to a signed type is cheap,
7241 i.e. when arg00's type is not a partial mode. */
7242 && type_has_mode_precision_p (TREE_TYPE (arg00)))
7244 tree stype = signed_type_for (TREE_TYPE (arg00));
7245 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7246 result_type,
7247 fold_convert_loc (loc, stype, arg00),
7248 build_int_cst (stype, 0));
7252 return NULL_TREE;
7255 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7256 equality/inequality test, then return a simplified form of
7257 the test using shifts and logical operations. Otherwise return
7258 NULL. TYPE is the desired result type. */
7260 tree
7261 fold_single_bit_test (location_t loc, enum tree_code code,
7262 tree arg0, tree arg1, tree result_type)
7264 /* If this is testing a single bit, we can optimize the test. */
7265 if ((code == NE_EXPR || code == EQ_EXPR)
7266 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7267 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7269 tree inner = TREE_OPERAND (arg0, 0);
7270 tree type = TREE_TYPE (arg0);
7271 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7272 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7273 int ops_unsigned;
7274 tree signed_type, unsigned_type, intermediate_type;
7275 tree tem, one;
7277 /* First, see if we can fold the single bit test into a sign-bit
7278 test. */
7279 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7280 result_type);
7281 if (tem)
7282 return tem;
7284 /* Otherwise we have (A & C) != 0 where C is a single bit,
7285 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7286 Similarly for (A & C) == 0. */
7288 /* If INNER is a right shift of a constant and it plus BITNUM does
7289 not overflow, adjust BITNUM and INNER. */
7290 if (TREE_CODE (inner) == RSHIFT_EXPR
7291 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7292 && bitnum < TYPE_PRECISION (type)
7293 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7294 TYPE_PRECISION (type) - bitnum))
7296 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7297 inner = TREE_OPERAND (inner, 0);
7300 /* If we are going to be able to omit the AND below, we must do our
7301 operations as unsigned. If we must use the AND, we have a choice.
7302 Normally unsigned is faster, but for some machines signed is. */
7303 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7304 && !flag_syntax_only) ? 0 : 1;
7306 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7307 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7308 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7309 inner = fold_convert_loc (loc, intermediate_type, inner);
7311 if (bitnum != 0)
7312 inner = build2 (RSHIFT_EXPR, intermediate_type,
7313 inner, size_int (bitnum));
7315 one = build_int_cst (intermediate_type, 1);
7317 if (code == EQ_EXPR)
7318 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7320 /* Put the AND last so it can combine with more things. */
7321 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7323 /* Make sure to return the proper type. */
7324 inner = fold_convert_loc (loc, result_type, inner);
7326 return inner;
7328 return NULL_TREE;
7331 /* Test whether it is preferable two swap two operands, ARG0 and
7332 ARG1, for example because ARG0 is an integer constant and ARG1
7333 isn't. */
7335 bool
7336 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7338 if (CONSTANT_CLASS_P (arg1))
7339 return 0;
7340 if (CONSTANT_CLASS_P (arg0))
7341 return 1;
7343 STRIP_NOPS (arg0);
7344 STRIP_NOPS (arg1);
7346 if (TREE_CONSTANT (arg1))
7347 return 0;
7348 if (TREE_CONSTANT (arg0))
7349 return 1;
7351 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7352 for commutative and comparison operators. Ensuring a canonical
7353 form allows the optimizers to find additional redundancies without
7354 having to explicitly check for both orderings. */
7355 if (TREE_CODE (arg0) == SSA_NAME
7356 && TREE_CODE (arg1) == SSA_NAME
7357 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7358 return 1;
7360 /* Put SSA_NAMEs last. */
7361 if (TREE_CODE (arg1) == SSA_NAME)
7362 return 0;
7363 if (TREE_CODE (arg0) == SSA_NAME)
7364 return 1;
7366 /* Put variables last. */
7367 if (DECL_P (arg1))
7368 return 0;
7369 if (DECL_P (arg0))
7370 return 1;
7372 return 0;
7376 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7377 means A >= Y && A != MAX, but in this case we know that
7378 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7380 static tree
7381 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7383 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7385 if (TREE_CODE (bound) == LT_EXPR)
7386 a = TREE_OPERAND (bound, 0);
7387 else if (TREE_CODE (bound) == GT_EXPR)
7388 a = TREE_OPERAND (bound, 1);
7389 else
7390 return NULL_TREE;
7392 typea = TREE_TYPE (a);
7393 if (!INTEGRAL_TYPE_P (typea)
7394 && !POINTER_TYPE_P (typea))
7395 return NULL_TREE;
7397 if (TREE_CODE (ineq) == LT_EXPR)
7399 a1 = TREE_OPERAND (ineq, 1);
7400 y = TREE_OPERAND (ineq, 0);
7402 else if (TREE_CODE (ineq) == GT_EXPR)
7404 a1 = TREE_OPERAND (ineq, 0);
7405 y = TREE_OPERAND (ineq, 1);
7407 else
7408 return NULL_TREE;
7410 if (TREE_TYPE (a1) != typea)
7411 return NULL_TREE;
7413 if (POINTER_TYPE_P (typea))
7415 /* Convert the pointer types into integer before taking the difference. */
7416 tree ta = fold_convert_loc (loc, ssizetype, a);
7417 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7418 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7420 else
7421 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7423 if (!diff || !integer_onep (diff))
7424 return NULL_TREE;
7426 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7429 /* Fold a sum or difference of at least one multiplication.
7430 Returns the folded tree or NULL if no simplification could be made. */
7432 static tree
7433 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7434 tree arg0, tree arg1)
7436 tree arg00, arg01, arg10, arg11;
7437 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7439 /* (A * C) +- (B * C) -> (A+-B) * C.
7440 (A * C) +- A -> A * (C+-1).
7441 We are most concerned about the case where C is a constant,
7442 but other combinations show up during loop reduction. Since
7443 it is not difficult, try all four possibilities. */
7445 if (TREE_CODE (arg0) == MULT_EXPR)
7447 arg00 = TREE_OPERAND (arg0, 0);
7448 arg01 = TREE_OPERAND (arg0, 1);
7450 else if (TREE_CODE (arg0) == INTEGER_CST)
7452 arg00 = build_one_cst (type);
7453 arg01 = arg0;
7455 else
7457 /* We cannot generate constant 1 for fract. */
7458 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7459 return NULL_TREE;
7460 arg00 = arg0;
7461 arg01 = build_one_cst (type);
7463 if (TREE_CODE (arg1) == MULT_EXPR)
7465 arg10 = TREE_OPERAND (arg1, 0);
7466 arg11 = TREE_OPERAND (arg1, 1);
7468 else if (TREE_CODE (arg1) == INTEGER_CST)
7470 arg10 = build_one_cst (type);
7471 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7472 the purpose of this canonicalization. */
7473 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7474 && negate_expr_p (arg1)
7475 && code == PLUS_EXPR)
7477 arg11 = negate_expr (arg1);
7478 code = MINUS_EXPR;
7480 else
7481 arg11 = arg1;
7483 else
7485 /* We cannot generate constant 1 for fract. */
7486 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7487 return NULL_TREE;
7488 arg10 = arg1;
7489 arg11 = build_one_cst (type);
7491 same = NULL_TREE;
7493 /* Prefer factoring a common non-constant. */
7494 if (operand_equal_p (arg00, arg10, 0))
7495 same = arg00, alt0 = arg01, alt1 = arg11;
7496 else if (operand_equal_p (arg01, arg11, 0))
7497 same = arg01, alt0 = arg00, alt1 = arg10;
7498 else if (operand_equal_p (arg00, arg11, 0))
7499 same = arg00, alt0 = arg01, alt1 = arg10;
7500 else if (operand_equal_p (arg01, arg10, 0))
7501 same = arg01, alt0 = arg00, alt1 = arg11;
7503 /* No identical multiplicands; see if we can find a common
7504 power-of-two factor in non-power-of-two multiplies. This
7505 can help in multi-dimensional array access. */
7506 else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7508 HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7509 HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7510 HOST_WIDE_INT tmp;
7511 bool swap = false;
7512 tree maybe_same;
7514 /* Move min of absolute values to int11. */
7515 if (absu_hwi (int01) < absu_hwi (int11))
7517 tmp = int01, int01 = int11, int11 = tmp;
7518 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7519 maybe_same = arg01;
7520 swap = true;
7522 else
7523 maybe_same = arg11;
7525 const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7526 if (factor > 1
7527 && pow2p_hwi (factor)
7528 && (int01 & (factor - 1)) == 0
7529 /* The remainder should not be a constant, otherwise we
7530 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7531 increased the number of multiplications necessary. */
7532 && TREE_CODE (arg10) != INTEGER_CST)
7534 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7535 build_int_cst (TREE_TYPE (arg00),
7536 int01 / int11));
7537 alt1 = arg10;
7538 same = maybe_same;
7539 if (swap)
7540 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7544 if (!same)
7545 return NULL_TREE;
7547 if (! ANY_INTEGRAL_TYPE_P (type)
7548 || TYPE_OVERFLOW_WRAPS (type)
7549 /* We are neither factoring zero nor minus one. */
7550 || TREE_CODE (same) == INTEGER_CST)
7551 return fold_build2_loc (loc, MULT_EXPR, type,
7552 fold_build2_loc (loc, code, type,
7553 fold_convert_loc (loc, type, alt0),
7554 fold_convert_loc (loc, type, alt1)),
7555 fold_convert_loc (loc, type, same));
7557 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7558 same may be minus one and thus the multiplication may overflow. Perform
7559 the sum operation in an unsigned type. */
7560 tree utype = unsigned_type_for (type);
7561 tree tem = fold_build2_loc (loc, code, utype,
7562 fold_convert_loc (loc, utype, alt0),
7563 fold_convert_loc (loc, utype, alt1));
7564 /* If the sum evaluated to a constant that is not -INF the multiplication
7565 cannot overflow. */
7566 if (TREE_CODE (tem) == INTEGER_CST
7567 && (wi::to_wide (tem)
7568 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7569 return fold_build2_loc (loc, MULT_EXPR, type,
7570 fold_convert (type, tem), same);
7572 /* Do not resort to unsigned multiplication because
7573 we lose the no-overflow property of the expression. */
7574 return NULL_TREE;
7577 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7578 specified by EXPR into the buffer PTR of length LEN bytes.
7579 Return the number of bytes placed in the buffer, or zero
7580 upon failure. */
7582 static int
7583 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7585 tree type = TREE_TYPE (expr);
7586 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7587 int byte, offset, word, words;
7588 unsigned char value;
7590 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7591 return 0;
7592 if (off == -1)
7593 off = 0;
7595 if (ptr == NULL)
7596 /* Dry run. */
7597 return MIN (len, total_bytes - off);
7599 words = total_bytes / UNITS_PER_WORD;
7601 for (byte = 0; byte < total_bytes; byte++)
7603 int bitpos = byte * BITS_PER_UNIT;
7604 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7605 number of bytes. */
7606 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7608 if (total_bytes > UNITS_PER_WORD)
7610 word = byte / UNITS_PER_WORD;
7611 if (WORDS_BIG_ENDIAN)
7612 word = (words - 1) - word;
7613 offset = word * UNITS_PER_WORD;
7614 if (BYTES_BIG_ENDIAN)
7615 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7616 else
7617 offset += byte % UNITS_PER_WORD;
7619 else
7620 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7621 if (offset >= off && offset - off < len)
7622 ptr[offset - off] = value;
7624 return MIN (len, total_bytes - off);
7628 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7629 specified by EXPR into the buffer PTR of length LEN bytes.
7630 Return the number of bytes placed in the buffer, or zero
7631 upon failure. */
7633 static int
7634 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7636 tree type = TREE_TYPE (expr);
7637 scalar_mode mode = SCALAR_TYPE_MODE (type);
7638 int total_bytes = GET_MODE_SIZE (mode);
7639 FIXED_VALUE_TYPE value;
7640 tree i_value, i_type;
7642 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7643 return 0;
7645 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7647 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7648 return 0;
7650 value = TREE_FIXED_CST (expr);
7651 i_value = double_int_to_tree (i_type, value.data);
7653 return native_encode_int (i_value, ptr, len, off);
7657 /* Subroutine of native_encode_expr. Encode the REAL_CST
7658 specified by EXPR into the buffer PTR of length LEN bytes.
7659 Return the number of bytes placed in the buffer, or zero
7660 upon failure. */
7662 static int
7663 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7665 tree type = TREE_TYPE (expr);
7666 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7667 int byte, offset, word, words, bitpos;
7668 unsigned char value;
7670 /* There are always 32 bits in each long, no matter the size of
7671 the hosts long. We handle floating point representations with
7672 up to 192 bits. */
7673 long tmp[6];
7675 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7676 return 0;
7677 if (off == -1)
7678 off = 0;
7680 if (ptr == NULL)
7681 /* Dry run. */
7682 return MIN (len, total_bytes - off);
7684 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7686 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7688 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7689 bitpos += BITS_PER_UNIT)
7691 byte = (bitpos / BITS_PER_UNIT) & 3;
7692 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7694 if (UNITS_PER_WORD < 4)
7696 word = byte / UNITS_PER_WORD;
7697 if (WORDS_BIG_ENDIAN)
7698 word = (words - 1) - word;
7699 offset = word * UNITS_PER_WORD;
7700 if (BYTES_BIG_ENDIAN)
7701 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7702 else
7703 offset += byte % UNITS_PER_WORD;
7705 else
7707 offset = byte;
7708 if (BYTES_BIG_ENDIAN)
7710 /* Reverse bytes within each long, or within the entire float
7711 if it's smaller than a long (for HFmode). */
7712 offset = MIN (3, total_bytes - 1) - offset;
7713 gcc_assert (offset >= 0);
7716 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7717 if (offset >= off
7718 && offset - off < len)
7719 ptr[offset - off] = value;
7721 return MIN (len, total_bytes - off);
7724 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7725 specified by EXPR into the buffer PTR of length LEN bytes.
7726 Return the number of bytes placed in the buffer, or zero
7727 upon failure. */
7729 static int
7730 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7732 int rsize, isize;
7733 tree part;
7735 part = TREE_REALPART (expr);
7736 rsize = native_encode_expr (part, ptr, len, off);
7737 if (off == -1 && rsize == 0)
7738 return 0;
7739 part = TREE_IMAGPART (expr);
7740 if (off != -1)
7741 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7742 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7743 len - rsize, off);
7744 if (off == -1 && isize != rsize)
7745 return 0;
7746 return rsize + isize;
7749 /* Like native_encode_vector, but only encode the first COUNT elements.
7750 The other arguments are as for native_encode_vector. */
7752 static int
7753 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7754 int off, unsigned HOST_WIDE_INT count)
7756 tree itype = TREE_TYPE (TREE_TYPE (expr));
7757 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7758 && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7760 /* This is the only case in which elements can be smaller than a byte.
7761 Element 0 is always in the lsb of the containing byte. */
7762 unsigned int elt_bits = TYPE_PRECISION (itype);
7763 int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7764 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7765 return 0;
7767 if (off == -1)
7768 off = 0;
7770 /* Zero the buffer and then set bits later where necessary. */
7771 int extract_bytes = MIN (len, total_bytes - off);
7772 if (ptr)
7773 memset (ptr, 0, extract_bytes);
7775 unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7776 unsigned int first_elt = off * elts_per_byte;
7777 unsigned int extract_elts = extract_bytes * elts_per_byte;
7778 for (unsigned int i = 0; i < extract_elts; ++i)
7780 tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7781 if (TREE_CODE (elt) != INTEGER_CST)
7782 return 0;
7784 if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7786 unsigned int bit = i * elt_bits;
7787 ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7790 return extract_bytes;
7793 int offset = 0;
7794 int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7795 for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7797 if (off >= size)
7799 off -= size;
7800 continue;
7802 tree elem = VECTOR_CST_ELT (expr, i);
7803 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7804 len - offset, off);
7805 if ((off == -1 && res != size) || res == 0)
7806 return 0;
7807 offset += res;
7808 if (offset >= len)
7809 return (off == -1 && i < count - 1) ? 0 : offset;
7810 if (off != -1)
7811 off = 0;
7813 return offset;
7816 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7817 specified by EXPR into the buffer PTR of length LEN bytes.
7818 Return the number of bytes placed in the buffer, or zero
7819 upon failure. */
7821 static int
7822 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7824 unsigned HOST_WIDE_INT count;
7825 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7826 return 0;
7827 return native_encode_vector_part (expr, ptr, len, off, count);
7831 /* Subroutine of native_encode_expr. Encode the STRING_CST
7832 specified by EXPR into the buffer PTR of length LEN bytes.
7833 Return the number of bytes placed in the buffer, or zero
7834 upon failure. */
7836 static int
7837 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7839 tree type = TREE_TYPE (expr);
7841 /* Wide-char strings are encoded in target byte-order so native
7842 encoding them is trivial. */
7843 if (BITS_PER_UNIT != CHAR_BIT
7844 || TREE_CODE (type) != ARRAY_TYPE
7845 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7846 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7847 return 0;
7849 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7850 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7851 return 0;
7852 if (off == -1)
7853 off = 0;
7854 len = MIN (total_bytes - off, len);
7855 if (ptr == NULL)
7856 /* Dry run. */;
7857 else
7859 int written = 0;
7860 if (off < TREE_STRING_LENGTH (expr))
7862 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7863 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7865 memset (ptr + written, 0, len - written);
7867 return len;
7871 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7872 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7873 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7874 anything, just do a dry run. If OFF is not -1 then start
7875 the encoding at byte offset OFF and encode at most LEN bytes.
7876 Return the number of bytes placed in the buffer, or zero upon failure. */
7879 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7881 /* We don't support starting at negative offset and -1 is special. */
7882 if (off < -1)
7883 return 0;
7885 switch (TREE_CODE (expr))
7887 case INTEGER_CST:
7888 return native_encode_int (expr, ptr, len, off);
7890 case REAL_CST:
7891 return native_encode_real (expr, ptr, len, off);
7893 case FIXED_CST:
7894 return native_encode_fixed (expr, ptr, len, off);
7896 case COMPLEX_CST:
7897 return native_encode_complex (expr, ptr, len, off);
7899 case VECTOR_CST:
7900 return native_encode_vector (expr, ptr, len, off);
7902 case STRING_CST:
7903 return native_encode_string (expr, ptr, len, off);
7905 default:
7906 return 0;
7910 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7911 NON_LVALUE_EXPRs and nops. */
7914 native_encode_initializer (tree init, unsigned char *ptr, int len,
7915 int off)
7917 /* We don't support starting at negative offset and -1 is special. */
7918 if (off < -1 || init == NULL_TREE)
7919 return 0;
7921 STRIP_NOPS (init);
7922 switch (TREE_CODE (init))
7924 case VIEW_CONVERT_EXPR:
7925 case NON_LVALUE_EXPR:
7926 return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
7927 default:
7928 return native_encode_expr (init, ptr, len, off);
7929 case CONSTRUCTOR:
7930 tree type = TREE_TYPE (init);
7931 HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7932 if (total_bytes < 0)
7933 return 0;
7934 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7935 return 0;
7936 int o = off == -1 ? 0 : off;
7937 if (TREE_CODE (type) == ARRAY_TYPE)
7939 HOST_WIDE_INT min_index;
7940 unsigned HOST_WIDE_INT cnt;
7941 HOST_WIDE_INT curpos = 0, fieldsize;
7942 constructor_elt *ce;
7944 if (TYPE_DOMAIN (type) == NULL_TREE
7945 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
7946 return 0;
7948 fieldsize = int_size_in_bytes (TREE_TYPE (type));
7949 if (fieldsize <= 0)
7950 return 0;
7952 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
7953 if (ptr != NULL)
7954 memset (ptr, '\0', MIN (total_bytes - off, len));
7956 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
7958 tree val = ce->value;
7959 tree index = ce->index;
7960 HOST_WIDE_INT pos = curpos, count = 0;
7961 bool full = false;
7962 if (index && TREE_CODE (index) == RANGE_EXPR)
7964 if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
7965 || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
7966 return 0;
7967 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
7968 * fieldsize;
7969 count = (tree_to_shwi (TREE_OPERAND (index, 1))
7970 - tree_to_shwi (TREE_OPERAND (index, 0)));
7972 else if (index)
7974 if (!tree_fits_shwi_p (index))
7975 return 0;
7976 pos = (tree_to_shwi (index) - min_index) * fieldsize;
7979 curpos = pos;
7980 if (val)
7983 if (off == -1
7984 || (curpos >= off
7985 && (curpos + fieldsize
7986 <= (HOST_WIDE_INT) off + len)))
7988 if (full)
7990 if (ptr)
7991 memcpy (ptr + (curpos - o), ptr + (pos - o),
7992 fieldsize);
7994 else if (!native_encode_initializer (val,
7996 ? ptr + curpos - o
7997 : NULL,
7998 fieldsize,
7999 off == -1 ? -1
8000 : 0))
8001 return 0;
8002 else
8004 full = true;
8005 pos = curpos;
8008 else if (curpos + fieldsize > off
8009 && curpos < (HOST_WIDE_INT) off + len)
8011 /* Partial overlap. */
8012 unsigned char *p = NULL;
8013 int no = 0;
8014 int l;
8015 if (curpos >= off)
8017 if (ptr)
8018 p = ptr + curpos - off;
8019 l = MIN ((HOST_WIDE_INT) off + len - curpos,
8020 fieldsize);
8022 else
8024 p = ptr;
8025 no = off - curpos;
8026 l = len;
8028 if (!native_encode_initializer (val, p, l, no))
8029 return 0;
8031 curpos += fieldsize;
8033 while (count-- != 0);
8035 return MIN (total_bytes - off, len);
8037 else if (TREE_CODE (type) == RECORD_TYPE
8038 || TREE_CODE (type) == UNION_TYPE)
8040 unsigned HOST_WIDE_INT cnt;
8041 constructor_elt *ce;
8043 if (ptr != NULL)
8044 memset (ptr, '\0', MIN (total_bytes - off, len));
8045 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8047 tree field = ce->index;
8048 tree val = ce->value;
8049 HOST_WIDE_INT pos, fieldsize;
8051 if (field == NULL_TREE)
8052 return 0;
8054 pos = int_byte_position (field);
8055 if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8056 continue;
8058 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8059 && TYPE_DOMAIN (TREE_TYPE (field))
8060 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8061 return 0;
8062 if (DECL_SIZE_UNIT (field) == NULL_TREE
8063 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8064 return 0;
8065 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8066 if (fieldsize == 0)
8067 continue;
8069 if (off != -1 && pos + fieldsize <= off)
8070 continue;
8072 if (DECL_BIT_FIELD (field))
8073 return 0;
8075 if (val == NULL_TREE)
8076 continue;
8078 if (off == -1
8079 || (pos >= off
8080 && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8082 if (!native_encode_initializer (val, ptr ? ptr + pos - o
8083 : NULL,
8084 fieldsize,
8085 off == -1 ? -1 : 0))
8086 return 0;
8088 else
8090 /* Partial overlap. */
8091 unsigned char *p = NULL;
8092 int no = 0;
8093 int l;
8094 if (pos >= off)
8096 if (ptr)
8097 p = ptr + pos - off;
8098 l = MIN ((HOST_WIDE_INT) off + len - pos,
8099 fieldsize);
8101 else
8103 p = ptr;
8104 no = off - pos;
8105 l = len;
8107 if (!native_encode_initializer (val, p, l, no))
8108 return 0;
8111 return MIN (total_bytes - off, len);
8113 return 0;
8118 /* Subroutine of native_interpret_expr. Interpret the contents of
8119 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8120 If the buffer cannot be interpreted, return NULL_TREE. */
8122 static tree
8123 native_interpret_int (tree type, const unsigned char *ptr, int len)
8125 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8127 if (total_bytes > len
8128 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8129 return NULL_TREE;
8131 wide_int result = wi::from_buffer (ptr, total_bytes);
8133 return wide_int_to_tree (type, result);
8137 /* Subroutine of native_interpret_expr. Interpret the contents of
8138 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8139 If the buffer cannot be interpreted, return NULL_TREE. */
8141 static tree
8142 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8144 scalar_mode mode = SCALAR_TYPE_MODE (type);
8145 int total_bytes = GET_MODE_SIZE (mode);
8146 double_int result;
8147 FIXED_VALUE_TYPE fixed_value;
8149 if (total_bytes > len
8150 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8151 return NULL_TREE;
8153 result = double_int::from_buffer (ptr, total_bytes);
8154 fixed_value = fixed_from_double_int (result, mode);
8156 return build_fixed (type, fixed_value);
8160 /* Subroutine of native_interpret_expr. Interpret the contents of
8161 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8162 If the buffer cannot be interpreted, return NULL_TREE. */
8164 static tree
8165 native_interpret_real (tree type, const unsigned char *ptr, int len)
8167 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8168 int total_bytes = GET_MODE_SIZE (mode);
8169 unsigned char value;
8170 /* There are always 32 bits in each long, no matter the size of
8171 the hosts long. We handle floating point representations with
8172 up to 192 bits. */
8173 REAL_VALUE_TYPE r;
8174 long tmp[6];
8176 if (total_bytes > len || total_bytes > 24)
8177 return NULL_TREE;
8178 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8180 memset (tmp, 0, sizeof (tmp));
8181 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8182 bitpos += BITS_PER_UNIT)
8184 /* Both OFFSET and BYTE index within a long;
8185 bitpos indexes the whole float. */
8186 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8187 if (UNITS_PER_WORD < 4)
8189 int word = byte / UNITS_PER_WORD;
8190 if (WORDS_BIG_ENDIAN)
8191 word = (words - 1) - word;
8192 offset = word * UNITS_PER_WORD;
8193 if (BYTES_BIG_ENDIAN)
8194 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8195 else
8196 offset += byte % UNITS_PER_WORD;
8198 else
8200 offset = byte;
8201 if (BYTES_BIG_ENDIAN)
8203 /* Reverse bytes within each long, or within the entire float
8204 if it's smaller than a long (for HFmode). */
8205 offset = MIN (3, total_bytes - 1) - offset;
8206 gcc_assert (offset >= 0);
8209 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8211 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8214 real_from_target (&r, tmp, mode);
8215 return build_real (type, r);
8219 /* Subroutine of native_interpret_expr. Interpret the contents of
8220 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8221 If the buffer cannot be interpreted, return NULL_TREE. */
8223 static tree
8224 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8226 tree etype, rpart, ipart;
8227 int size;
8229 etype = TREE_TYPE (type);
8230 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8231 if (size * 2 > len)
8232 return NULL_TREE;
8233 rpart = native_interpret_expr (etype, ptr, size);
8234 if (!rpart)
8235 return NULL_TREE;
8236 ipart = native_interpret_expr (etype, ptr+size, size);
8237 if (!ipart)
8238 return NULL_TREE;
8239 return build_complex (type, rpart, ipart);
8242 /* Read a vector of type TYPE from the target memory image given by BYTES,
8243 which contains LEN bytes. The vector is known to be encodable using
8244 NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8246 Return the vector on success, otherwise return null. */
8248 static tree
8249 native_interpret_vector_part (tree type, const unsigned char *bytes,
8250 unsigned int len, unsigned int npatterns,
8251 unsigned int nelts_per_pattern)
8253 tree elt_type = TREE_TYPE (type);
8254 if (VECTOR_BOOLEAN_TYPE_P (type)
8255 && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8257 /* This is the only case in which elements can be smaller than a byte.
8258 Element 0 is always in the lsb of the containing byte. */
8259 unsigned int elt_bits = TYPE_PRECISION (elt_type);
8260 if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8261 return NULL_TREE;
8263 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8264 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8266 unsigned int bit_index = i * elt_bits;
8267 unsigned int byte_index = bit_index / BITS_PER_UNIT;
8268 unsigned int lsb = bit_index % BITS_PER_UNIT;
8269 builder.quick_push (bytes[byte_index] & (1 << lsb)
8270 ? build_all_ones_cst (elt_type)
8271 : build_zero_cst (elt_type));
8273 return builder.build ();
8276 unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8277 if (elt_bytes * npatterns * nelts_per_pattern > len)
8278 return NULL_TREE;
8280 tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8281 for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8283 tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8284 if (!elt)
8285 return NULL_TREE;
8286 builder.quick_push (elt);
8287 bytes += elt_bytes;
8289 return builder.build ();
8292 /* Subroutine of native_interpret_expr. Interpret the contents of
8293 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8294 If the buffer cannot be interpreted, return NULL_TREE. */
8296 static tree
8297 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8299 tree etype;
8300 unsigned int size;
8301 unsigned HOST_WIDE_INT count;
8303 etype = TREE_TYPE (type);
8304 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8305 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8306 || size * count > len)
8307 return NULL_TREE;
8309 return native_interpret_vector_part (type, ptr, len, count, 1);
8313 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8314 the buffer PTR of length LEN as a constant of type TYPE. For
8315 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8316 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8317 return NULL_TREE. */
8319 tree
8320 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8322 switch (TREE_CODE (type))
8324 case INTEGER_TYPE:
8325 case ENUMERAL_TYPE:
8326 case BOOLEAN_TYPE:
8327 case POINTER_TYPE:
8328 case REFERENCE_TYPE:
8329 return native_interpret_int (type, ptr, len);
8331 case REAL_TYPE:
8332 return native_interpret_real (type, ptr, len);
8334 case FIXED_POINT_TYPE:
8335 return native_interpret_fixed (type, ptr, len);
8337 case COMPLEX_TYPE:
8338 return native_interpret_complex (type, ptr, len);
8340 case VECTOR_TYPE:
8341 return native_interpret_vector (type, ptr, len);
8343 default:
8344 return NULL_TREE;
8348 /* Returns true if we can interpret the contents of a native encoding
8349 as TYPE. */
8351 bool
8352 can_native_interpret_type_p (tree type)
8354 switch (TREE_CODE (type))
8356 case INTEGER_TYPE:
8357 case ENUMERAL_TYPE:
8358 case BOOLEAN_TYPE:
8359 case POINTER_TYPE:
8360 case REFERENCE_TYPE:
8361 case FIXED_POINT_TYPE:
8362 case REAL_TYPE:
8363 case COMPLEX_TYPE:
8364 case VECTOR_TYPE:
8365 return true;
8366 default:
8367 return false;
8371 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8372 or extracted constant positions and/or sizes aren't byte aligned. */
8374 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8375 bits between adjacent elements. AMNT should be within
8376 [0, BITS_PER_UNIT).
8377 Example, AMNT = 2:
8378 00011111|11100000 << 2 = 01111111|10000000
8379 PTR[1] | PTR[0] PTR[1] | PTR[0]. */
8381 void
8382 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8383 unsigned int amnt)
8385 if (amnt == 0)
8386 return;
8388 unsigned char carry_over = 0U;
8389 unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8390 unsigned char clear_mask = (~0U) << amnt;
8392 for (unsigned int i = 0; i < sz; i++)
8394 unsigned prev_carry_over = carry_over;
8395 carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8397 ptr[i] <<= amnt;
8398 if (i != 0)
8400 ptr[i] &= clear_mask;
8401 ptr[i] |= prev_carry_over;
8406 /* Like shift_bytes_in_array_left but for big-endian.
8407 Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8408 bits between adjacent elements. AMNT should be within
8409 [0, BITS_PER_UNIT).
8410 Example, AMNT = 2:
8411 00011111|11100000 >> 2 = 00000111|11111000
8412 PTR[0] | PTR[1] PTR[0] | PTR[1]. */
8414 void
8415 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8416 unsigned int amnt)
8418 if (amnt == 0)
8419 return;
8421 unsigned char carry_over = 0U;
8422 unsigned char carry_mask = ~(~0U << amnt);
8424 for (unsigned int i = 0; i < sz; i++)
8426 unsigned prev_carry_over = carry_over;
8427 carry_over = ptr[i] & carry_mask;
8429 carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8430 ptr[i] >>= amnt;
8431 ptr[i] |= prev_carry_over;
8435 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8436 directly on the VECTOR_CST encoding, in a way that works for variable-
8437 length vectors. Return the resulting VECTOR_CST on success or null
8438 on failure. */
8440 static tree
8441 fold_view_convert_vector_encoding (tree type, tree expr)
8443 tree expr_type = TREE_TYPE (expr);
8444 poly_uint64 type_bits, expr_bits;
8445 if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8446 || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8447 return NULL_TREE;
8449 poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8450 poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8451 unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8452 unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8454 /* We can only preserve the semantics of a stepped pattern if the new
8455 vector element is an integer of the same size. */
8456 if (VECTOR_CST_STEPPED_P (expr)
8457 && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8458 return NULL_TREE;
8460 /* The number of bits needed to encode one element from every pattern
8461 of the original vector. */
8462 unsigned int expr_sequence_bits
8463 = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8465 /* The number of bits needed to encode one element from every pattern
8466 of the result. */
8467 unsigned int type_sequence_bits
8468 = least_common_multiple (expr_sequence_bits, type_elt_bits);
8470 /* Don't try to read more bytes than are available, which can happen
8471 for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8472 The general VIEW_CONVERT handling can cope with that case, so there's
8473 no point complicating things here. */
8474 unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8475 unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8476 BITS_PER_UNIT);
8477 unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8478 if (known_gt (buffer_bits, expr_bits))
8479 return NULL_TREE;
8481 /* Get enough bytes of EXPR to form the new encoding. */
8482 auto_vec<unsigned char, 128> buffer (buffer_bytes);
8483 buffer.quick_grow (buffer_bytes);
8484 if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8485 buffer_bits / expr_elt_bits)
8486 != (int) buffer_bytes)
8487 return NULL_TREE;
8489 /* Reencode the bytes as TYPE. */
8490 unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8491 return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8492 type_npatterns, nelts_per_pattern);
8495 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8496 TYPE at compile-time. If we're unable to perform the conversion
8497 return NULL_TREE. */
8499 static tree
8500 fold_view_convert_expr (tree type, tree expr)
8502 /* We support up to 512-bit values (for V8DFmode). */
8503 unsigned char buffer[64];
8504 int len;
8506 /* Check that the host and target are sane. */
8507 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8508 return NULL_TREE;
8510 if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8511 if (tree res = fold_view_convert_vector_encoding (type, expr))
8512 return res;
8514 len = native_encode_expr (expr, buffer, sizeof (buffer));
8515 if (len == 0)
8516 return NULL_TREE;
8518 return native_interpret_expr (type, buffer, len);
8521 /* Build an expression for the address of T. Folds away INDIRECT_REF
8522 to avoid confusing the gimplify process. */
8524 tree
8525 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8527 /* The size of the object is not relevant when talking about its address. */
8528 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8529 t = TREE_OPERAND (t, 0);
8531 if (TREE_CODE (t) == INDIRECT_REF)
8533 t = TREE_OPERAND (t, 0);
8535 if (TREE_TYPE (t) != ptrtype)
8536 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8538 else if (TREE_CODE (t) == MEM_REF
8539 && integer_zerop (TREE_OPERAND (t, 1)))
8541 t = TREE_OPERAND (t, 0);
8543 if (TREE_TYPE (t) != ptrtype)
8544 t = fold_convert_loc (loc, ptrtype, t);
8546 else if (TREE_CODE (t) == MEM_REF
8547 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8548 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8549 TREE_OPERAND (t, 0),
8550 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8551 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8553 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8555 if (TREE_TYPE (t) != ptrtype)
8556 t = fold_convert_loc (loc, ptrtype, t);
8558 else
8559 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8561 return t;
8564 /* Build an expression for the address of T. */
8566 tree
8567 build_fold_addr_expr_loc (location_t loc, tree t)
8569 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8571 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8574 /* Fold a unary expression of code CODE and type TYPE with operand
8575 OP0. Return the folded expression if folding is successful.
8576 Otherwise, return NULL_TREE. */
8578 tree
8579 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8581 tree tem;
8582 tree arg0;
8583 enum tree_code_class kind = TREE_CODE_CLASS (code);
8585 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8586 && TREE_CODE_LENGTH (code) == 1);
8588 arg0 = op0;
8589 if (arg0)
8591 if (CONVERT_EXPR_CODE_P (code)
8592 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8594 /* Don't use STRIP_NOPS, because signedness of argument type
8595 matters. */
8596 STRIP_SIGN_NOPS (arg0);
8598 else
8600 /* Strip any conversions that don't change the mode. This
8601 is safe for every expression, except for a comparison
8602 expression because its signedness is derived from its
8603 operands.
8605 Note that this is done as an internal manipulation within
8606 the constant folder, in order to find the simplest
8607 representation of the arguments so that their form can be
8608 studied. In any cases, the appropriate type conversions
8609 should be put back in the tree that will get out of the
8610 constant folder. */
8611 STRIP_NOPS (arg0);
8614 if (CONSTANT_CLASS_P (arg0))
8616 tree tem = const_unop (code, type, arg0);
8617 if (tem)
8619 if (TREE_TYPE (tem) != type)
8620 tem = fold_convert_loc (loc, type, tem);
8621 return tem;
8626 tem = generic_simplify (loc, code, type, op0);
8627 if (tem)
8628 return tem;
8630 if (TREE_CODE_CLASS (code) == tcc_unary)
8632 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8633 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8634 fold_build1_loc (loc, code, type,
8635 fold_convert_loc (loc, TREE_TYPE (op0),
8636 TREE_OPERAND (arg0, 1))));
8637 else if (TREE_CODE (arg0) == COND_EXPR)
8639 tree arg01 = TREE_OPERAND (arg0, 1);
8640 tree arg02 = TREE_OPERAND (arg0, 2);
8641 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8642 arg01 = fold_build1_loc (loc, code, type,
8643 fold_convert_loc (loc,
8644 TREE_TYPE (op0), arg01));
8645 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8646 arg02 = fold_build1_loc (loc, code, type,
8647 fold_convert_loc (loc,
8648 TREE_TYPE (op0), arg02));
8649 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8650 arg01, arg02);
8652 /* If this was a conversion, and all we did was to move into
8653 inside the COND_EXPR, bring it back out. But leave it if
8654 it is a conversion from integer to integer and the
8655 result precision is no wider than a word since such a
8656 conversion is cheap and may be optimized away by combine,
8657 while it couldn't if it were outside the COND_EXPR. Then return
8658 so we don't get into an infinite recursion loop taking the
8659 conversion out and then back in. */
8661 if ((CONVERT_EXPR_CODE_P (code)
8662 || code == NON_LVALUE_EXPR)
8663 && TREE_CODE (tem) == COND_EXPR
8664 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8665 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8666 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8667 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8668 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8669 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8670 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8671 && (INTEGRAL_TYPE_P
8672 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8673 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8674 || flag_syntax_only))
8675 tem = build1_loc (loc, code, type,
8676 build3 (COND_EXPR,
8677 TREE_TYPE (TREE_OPERAND
8678 (TREE_OPERAND (tem, 1), 0)),
8679 TREE_OPERAND (tem, 0),
8680 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8681 TREE_OPERAND (TREE_OPERAND (tem, 2),
8682 0)));
8683 return tem;
8687 switch (code)
8689 case NON_LVALUE_EXPR:
8690 if (!maybe_lvalue_p (op0))
8691 return fold_convert_loc (loc, type, op0);
8692 return NULL_TREE;
8694 CASE_CONVERT:
8695 case FLOAT_EXPR:
8696 case FIX_TRUNC_EXPR:
8697 if (COMPARISON_CLASS_P (op0))
8699 /* If we have (type) (a CMP b) and type is an integral type, return
8700 new expression involving the new type. Canonicalize
8701 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8702 non-integral type.
8703 Do not fold the result as that would not simplify further, also
8704 folding again results in recursions. */
8705 if (TREE_CODE (type) == BOOLEAN_TYPE)
8706 return build2_loc (loc, TREE_CODE (op0), type,
8707 TREE_OPERAND (op0, 0),
8708 TREE_OPERAND (op0, 1));
8709 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8710 && TREE_CODE (type) != VECTOR_TYPE)
8711 return build3_loc (loc, COND_EXPR, type, op0,
8712 constant_boolean_node (true, type),
8713 constant_boolean_node (false, type));
8716 /* Handle (T *)&A.B.C for A being of type T and B and C
8717 living at offset zero. This occurs frequently in
8718 C++ upcasting and then accessing the base. */
8719 if (TREE_CODE (op0) == ADDR_EXPR
8720 && POINTER_TYPE_P (type)
8721 && handled_component_p (TREE_OPERAND (op0, 0)))
8723 poly_int64 bitsize, bitpos;
8724 tree offset;
8725 machine_mode mode;
8726 int unsignedp, reversep, volatilep;
8727 tree base
8728 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8729 &offset, &mode, &unsignedp, &reversep,
8730 &volatilep);
8731 /* If the reference was to a (constant) zero offset, we can use
8732 the address of the base if it has the same base type
8733 as the result type and the pointer type is unqualified. */
8734 if (!offset
8735 && known_eq (bitpos, 0)
8736 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8737 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8738 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8739 return fold_convert_loc (loc, type,
8740 build_fold_addr_expr_loc (loc, base));
8743 if (TREE_CODE (op0) == MODIFY_EXPR
8744 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8745 /* Detect assigning a bitfield. */
8746 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8747 && DECL_BIT_FIELD
8748 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8750 /* Don't leave an assignment inside a conversion
8751 unless assigning a bitfield. */
8752 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8753 /* First do the assignment, then return converted constant. */
8754 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8755 TREE_NO_WARNING (tem) = 1;
8756 TREE_USED (tem) = 1;
8757 return tem;
8760 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8761 constants (if x has signed type, the sign bit cannot be set
8762 in c). This folds extension into the BIT_AND_EXPR.
8763 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8764 very likely don't have maximal range for their precision and this
8765 transformation effectively doesn't preserve non-maximal ranges. */
8766 if (TREE_CODE (type) == INTEGER_TYPE
8767 && TREE_CODE (op0) == BIT_AND_EXPR
8768 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8770 tree and_expr = op0;
8771 tree and0 = TREE_OPERAND (and_expr, 0);
8772 tree and1 = TREE_OPERAND (and_expr, 1);
8773 int change = 0;
8775 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8776 || (TYPE_PRECISION (type)
8777 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8778 change = 1;
8779 else if (TYPE_PRECISION (TREE_TYPE (and1))
8780 <= HOST_BITS_PER_WIDE_INT
8781 && tree_fits_uhwi_p (and1))
8783 unsigned HOST_WIDE_INT cst;
8785 cst = tree_to_uhwi (and1);
8786 cst &= HOST_WIDE_INT_M1U
8787 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8788 change = (cst == 0);
8789 if (change
8790 && !flag_syntax_only
8791 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8792 == ZERO_EXTEND))
8794 tree uns = unsigned_type_for (TREE_TYPE (and0));
8795 and0 = fold_convert_loc (loc, uns, and0);
8796 and1 = fold_convert_loc (loc, uns, and1);
8799 if (change)
8801 tem = force_fit_type (type, wi::to_widest (and1), 0,
8802 TREE_OVERFLOW (and1));
8803 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8804 fold_convert_loc (loc, type, and0), tem);
8808 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8809 cast (T1)X will fold away. We assume that this happens when X itself
8810 is a cast. */
8811 if (POINTER_TYPE_P (type)
8812 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8813 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8815 tree arg00 = TREE_OPERAND (arg0, 0);
8816 tree arg01 = TREE_OPERAND (arg0, 1);
8818 return fold_build_pointer_plus_loc
8819 (loc, fold_convert_loc (loc, type, arg00), arg01);
8822 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8823 of the same precision, and X is an integer type not narrower than
8824 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8825 if (INTEGRAL_TYPE_P (type)
8826 && TREE_CODE (op0) == BIT_NOT_EXPR
8827 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8828 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8829 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8831 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8832 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8833 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8834 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8835 fold_convert_loc (loc, type, tem));
8838 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8839 type of X and Y (integer types only). */
8840 if (INTEGRAL_TYPE_P (type)
8841 && TREE_CODE (op0) == MULT_EXPR
8842 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8843 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8845 /* Be careful not to introduce new overflows. */
8846 tree mult_type;
8847 if (TYPE_OVERFLOW_WRAPS (type))
8848 mult_type = type;
8849 else
8850 mult_type = unsigned_type_for (type);
8852 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8854 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8855 fold_convert_loc (loc, mult_type,
8856 TREE_OPERAND (op0, 0)),
8857 fold_convert_loc (loc, mult_type,
8858 TREE_OPERAND (op0, 1)));
8859 return fold_convert_loc (loc, type, tem);
8863 return NULL_TREE;
8865 case VIEW_CONVERT_EXPR:
8866 if (TREE_CODE (op0) == MEM_REF)
8868 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8869 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8870 tem = fold_build2_loc (loc, MEM_REF, type,
8871 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8872 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8873 return tem;
8876 return NULL_TREE;
8878 case NEGATE_EXPR:
8879 tem = fold_negate_expr (loc, arg0);
8880 if (tem)
8881 return fold_convert_loc (loc, type, tem);
8882 return NULL_TREE;
8884 case ABS_EXPR:
8885 /* Convert fabs((double)float) into (double)fabsf(float). */
8886 if (TREE_CODE (arg0) == NOP_EXPR
8887 && TREE_CODE (type) == REAL_TYPE)
8889 tree targ0 = strip_float_extensions (arg0);
8890 if (targ0 != arg0)
8891 return fold_convert_loc (loc, type,
8892 fold_build1_loc (loc, ABS_EXPR,
8893 TREE_TYPE (targ0),
8894 targ0));
8896 return NULL_TREE;
8898 case BIT_NOT_EXPR:
8899 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8900 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8901 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8902 fold_convert_loc (loc, type,
8903 TREE_OPERAND (arg0, 0)))))
8904 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8905 fold_convert_loc (loc, type,
8906 TREE_OPERAND (arg0, 1)));
8907 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8908 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8909 fold_convert_loc (loc, type,
8910 TREE_OPERAND (arg0, 1)))))
8911 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8912 fold_convert_loc (loc, type,
8913 TREE_OPERAND (arg0, 0)), tem);
8915 return NULL_TREE;
8917 case TRUTH_NOT_EXPR:
8918 /* Note that the operand of this must be an int
8919 and its values must be 0 or 1.
8920 ("true" is a fixed value perhaps depending on the language,
8921 but we don't handle values other than 1 correctly yet.) */
8922 tem = fold_truth_not_expr (loc, arg0);
8923 if (!tem)
8924 return NULL_TREE;
8925 return fold_convert_loc (loc, type, tem);
8927 case INDIRECT_REF:
8928 /* Fold *&X to X if X is an lvalue. */
8929 if (TREE_CODE (op0) == ADDR_EXPR)
8931 tree op00 = TREE_OPERAND (op0, 0);
8932 if ((VAR_P (op00)
8933 || TREE_CODE (op00) == PARM_DECL
8934 || TREE_CODE (op00) == RESULT_DECL)
8935 && !TREE_READONLY (op00))
8936 return op00;
8938 return NULL_TREE;
8940 default:
8941 return NULL_TREE;
8942 } /* switch (code) */
8946 /* If the operation was a conversion do _not_ mark a resulting constant
8947 with TREE_OVERFLOW if the original constant was not. These conversions
8948 have implementation defined behavior and retaining the TREE_OVERFLOW
8949 flag here would confuse later passes such as VRP. */
8950 tree
8951 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8952 tree type, tree op0)
8954 tree res = fold_unary_loc (loc, code, type, op0);
8955 if (res
8956 && TREE_CODE (res) == INTEGER_CST
8957 && TREE_CODE (op0) == INTEGER_CST
8958 && CONVERT_EXPR_CODE_P (code))
8959 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8961 return res;
8964 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8965 operands OP0 and OP1. LOC is the location of the resulting expression.
8966 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8967 Return the folded expression if folding is successful. Otherwise,
8968 return NULL_TREE. */
8969 static tree
8970 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8971 tree arg0, tree arg1, tree op0, tree op1)
8973 tree tem;
8975 /* We only do these simplifications if we are optimizing. */
8976 if (!optimize)
8977 return NULL_TREE;
8979 /* Check for things like (A || B) && (A || C). We can convert this
8980 to A || (B && C). Note that either operator can be any of the four
8981 truth and/or operations and the transformation will still be
8982 valid. Also note that we only care about order for the
8983 ANDIF and ORIF operators. If B contains side effects, this
8984 might change the truth-value of A. */
8985 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8986 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8987 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8988 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8989 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8990 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8992 tree a00 = TREE_OPERAND (arg0, 0);
8993 tree a01 = TREE_OPERAND (arg0, 1);
8994 tree a10 = TREE_OPERAND (arg1, 0);
8995 tree a11 = TREE_OPERAND (arg1, 1);
8996 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8997 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8998 && (code == TRUTH_AND_EXPR
8999 || code == TRUTH_OR_EXPR));
9001 if (operand_equal_p (a00, a10, 0))
9002 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9003 fold_build2_loc (loc, code, type, a01, a11));
9004 else if (commutative && operand_equal_p (a00, a11, 0))
9005 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9006 fold_build2_loc (loc, code, type, a01, a10));
9007 else if (commutative && operand_equal_p (a01, a10, 0))
9008 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9009 fold_build2_loc (loc, code, type, a00, a11));
9011 /* This case if tricky because we must either have commutative
9012 operators or else A10 must not have side-effects. */
9014 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9015 && operand_equal_p (a01, a11, 0))
9016 return fold_build2_loc (loc, TREE_CODE (arg0), type,
9017 fold_build2_loc (loc, code, type, a00, a10),
9018 a01);
9021 /* See if we can build a range comparison. */
9022 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9023 return tem;
9025 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9026 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9028 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9029 if (tem)
9030 return fold_build2_loc (loc, code, type, tem, arg1);
9033 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9034 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9036 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9037 if (tem)
9038 return fold_build2_loc (loc, code, type, arg0, tem);
9041 /* Check for the possibility of merging component references. If our
9042 lhs is another similar operation, try to merge its rhs with our
9043 rhs. Then try to merge our lhs and rhs. */
9044 if (TREE_CODE (arg0) == code
9045 && (tem = fold_truth_andor_1 (loc, code, type,
9046 TREE_OPERAND (arg0, 1), arg1)) != 0)
9047 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9049 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9050 return tem;
9052 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9053 if (param_logical_op_non_short_circuit != -1)
9054 logical_op_non_short_circuit
9055 = param_logical_op_non_short_circuit;
9056 if (logical_op_non_short_circuit
9057 && !flag_sanitize_coverage
9058 && (code == TRUTH_AND_EXPR
9059 || code == TRUTH_ANDIF_EXPR
9060 || code == TRUTH_OR_EXPR
9061 || code == TRUTH_ORIF_EXPR))
9063 enum tree_code ncode, icode;
9065 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9066 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9067 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9069 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9070 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9071 We don't want to pack more than two leafs to a non-IF AND/OR
9072 expression.
9073 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9074 equal to IF-CODE, then we don't want to add right-hand operand.
9075 If the inner right-hand side of left-hand operand has
9076 side-effects, or isn't simple, then we can't add to it,
9077 as otherwise we might destroy if-sequence. */
9078 if (TREE_CODE (arg0) == icode
9079 && simple_operand_p_2 (arg1)
9080 /* Needed for sequence points to handle trappings, and
9081 side-effects. */
9082 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9084 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9085 arg1);
9086 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9087 tem);
9089 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9090 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
9091 else if (TREE_CODE (arg1) == icode
9092 && simple_operand_p_2 (arg0)
9093 /* Needed for sequence points to handle trappings, and
9094 side-effects. */
9095 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9097 tem = fold_build2_loc (loc, ncode, type,
9098 arg0, TREE_OPERAND (arg1, 0));
9099 return fold_build2_loc (loc, icode, type, tem,
9100 TREE_OPERAND (arg1, 1));
9102 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9103 into (A OR B).
9104 For sequence point consistancy, we need to check for trapping,
9105 and side-effects. */
9106 else if (code == icode && simple_operand_p_2 (arg0)
9107 && simple_operand_p_2 (arg1))
9108 return fold_build2_loc (loc, ncode, type, arg0, arg1);
9111 return NULL_TREE;
9114 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9115 by changing CODE to reduce the magnitude of constants involved in
9116 ARG0 of the comparison.
9117 Returns a canonicalized comparison tree if a simplification was
9118 possible, otherwise returns NULL_TREE.
9119 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9120 valid if signed overflow is undefined. */
9122 static tree
9123 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9124 tree arg0, tree arg1,
9125 bool *strict_overflow_p)
9127 enum tree_code code0 = TREE_CODE (arg0);
9128 tree t, cst0 = NULL_TREE;
9129 int sgn0;
9131 /* Match A +- CST code arg1. We can change this only if overflow
9132 is undefined. */
9133 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9134 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9135 /* In principle pointers also have undefined overflow behavior,
9136 but that causes problems elsewhere. */
9137 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9138 && (code0 == MINUS_EXPR
9139 || code0 == PLUS_EXPR)
9140 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9141 return NULL_TREE;
9143 /* Identify the constant in arg0 and its sign. */
9144 cst0 = TREE_OPERAND (arg0, 1);
9145 sgn0 = tree_int_cst_sgn (cst0);
9147 /* Overflowed constants and zero will cause problems. */
9148 if (integer_zerop (cst0)
9149 || TREE_OVERFLOW (cst0))
9150 return NULL_TREE;
9152 /* See if we can reduce the magnitude of the constant in
9153 arg0 by changing the comparison code. */
9154 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9155 if (code == LT_EXPR
9156 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9157 code = LE_EXPR;
9158 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9159 else if (code == GT_EXPR
9160 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9161 code = GE_EXPR;
9162 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9163 else if (code == LE_EXPR
9164 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9165 code = LT_EXPR;
9166 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9167 else if (code == GE_EXPR
9168 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9169 code = GT_EXPR;
9170 else
9171 return NULL_TREE;
9172 *strict_overflow_p = true;
9174 /* Now build the constant reduced in magnitude. But not if that
9175 would produce one outside of its types range. */
9176 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9177 && ((sgn0 == 1
9178 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9179 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9180 || (sgn0 == -1
9181 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9182 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9183 return NULL_TREE;
9185 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9186 cst0, build_int_cst (TREE_TYPE (cst0), 1));
9187 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9188 t = fold_convert (TREE_TYPE (arg1), t);
9190 return fold_build2_loc (loc, code, type, t, arg1);
9193 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9194 overflow further. Try to decrease the magnitude of constants involved
9195 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9196 and put sole constants at the second argument position.
9197 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9199 static tree
9200 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9201 tree arg0, tree arg1)
9203 tree t;
9204 bool strict_overflow_p;
9205 const char * const warnmsg = G_("assuming signed overflow does not occur "
9206 "when reducing constant in comparison");
9208 /* Try canonicalization by simplifying arg0. */
9209 strict_overflow_p = false;
9210 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9211 &strict_overflow_p);
9212 if (t)
9214 if (strict_overflow_p)
9215 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9216 return t;
9219 /* Try canonicalization by simplifying arg1 using the swapped
9220 comparison. */
9221 code = swap_tree_comparison (code);
9222 strict_overflow_p = false;
9223 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9224 &strict_overflow_p);
9225 if (t && strict_overflow_p)
9226 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9227 return t;
9230 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9231 space. This is used to avoid issuing overflow warnings for
9232 expressions like &p->x which cannot wrap. */
9234 static bool
9235 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9237 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9238 return true;
9240 if (maybe_lt (bitpos, 0))
9241 return true;
9243 poly_wide_int wi_offset;
9244 int precision = TYPE_PRECISION (TREE_TYPE (base));
9245 if (offset == NULL_TREE)
9246 wi_offset = wi::zero (precision);
9247 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9248 return true;
9249 else
9250 wi_offset = wi::to_poly_wide (offset);
9252 wi::overflow_type overflow;
9253 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9254 precision);
9255 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9256 if (overflow)
9257 return true;
9259 poly_uint64 total_hwi, size;
9260 if (!total.to_uhwi (&total_hwi)
9261 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9262 &size)
9263 || known_eq (size, 0U))
9264 return true;
9266 if (known_le (total_hwi, size))
9267 return false;
9269 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9270 array. */
9271 if (TREE_CODE (base) == ADDR_EXPR
9272 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9273 &size)
9274 && maybe_ne (size, 0U)
9275 && known_le (total_hwi, size))
9276 return false;
9278 return true;
9281 /* Return a positive integer when the symbol DECL is known to have
9282 a nonzero address, zero when it's known not to (e.g., it's a weak
9283 symbol), and a negative integer when the symbol is not yet in the
9284 symbol table and so whether or not its address is zero is unknown.
9285 For function local objects always return positive integer. */
9286 static int
9287 maybe_nonzero_address (tree decl)
9289 if (DECL_P (decl) && decl_in_symtab_p (decl))
9290 if (struct symtab_node *symbol = symtab_node::get_create (decl))
9291 return symbol->nonzero_address ();
9293 /* Function local objects are never NULL. */
9294 if (DECL_P (decl)
9295 && (DECL_CONTEXT (decl)
9296 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9297 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9298 return 1;
9300 return -1;
9303 /* Subroutine of fold_binary. This routine performs all of the
9304 transformations that are common to the equality/inequality
9305 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9306 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9307 fold_binary should call fold_binary. Fold a comparison with
9308 tree code CODE and type TYPE with operands OP0 and OP1. Return
9309 the folded comparison or NULL_TREE. */
9311 static tree
9312 fold_comparison (location_t loc, enum tree_code code, tree type,
9313 tree op0, tree op1)
9315 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9316 tree arg0, arg1, tem;
9318 arg0 = op0;
9319 arg1 = op1;
9321 STRIP_SIGN_NOPS (arg0);
9322 STRIP_SIGN_NOPS (arg1);
9324 /* For comparisons of pointers we can decompose it to a compile time
9325 comparison of the base objects and the offsets into the object.
9326 This requires at least one operand being an ADDR_EXPR or a
9327 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9328 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9329 && (TREE_CODE (arg0) == ADDR_EXPR
9330 || TREE_CODE (arg1) == ADDR_EXPR
9331 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9332 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9334 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9335 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9336 machine_mode mode;
9337 int volatilep, reversep, unsignedp;
9338 bool indirect_base0 = false, indirect_base1 = false;
9340 /* Get base and offset for the access. Strip ADDR_EXPR for
9341 get_inner_reference, but put it back by stripping INDIRECT_REF
9342 off the base object if possible. indirect_baseN will be true
9343 if baseN is not an address but refers to the object itself. */
9344 base0 = arg0;
9345 if (TREE_CODE (arg0) == ADDR_EXPR)
9347 base0
9348 = get_inner_reference (TREE_OPERAND (arg0, 0),
9349 &bitsize, &bitpos0, &offset0, &mode,
9350 &unsignedp, &reversep, &volatilep);
9351 if (TREE_CODE (base0) == INDIRECT_REF)
9352 base0 = TREE_OPERAND (base0, 0);
9353 else
9354 indirect_base0 = true;
9356 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9358 base0 = TREE_OPERAND (arg0, 0);
9359 STRIP_SIGN_NOPS (base0);
9360 if (TREE_CODE (base0) == ADDR_EXPR)
9362 base0
9363 = get_inner_reference (TREE_OPERAND (base0, 0),
9364 &bitsize, &bitpos0, &offset0, &mode,
9365 &unsignedp, &reversep, &volatilep);
9366 if (TREE_CODE (base0) == INDIRECT_REF)
9367 base0 = TREE_OPERAND (base0, 0);
9368 else
9369 indirect_base0 = true;
9371 if (offset0 == NULL_TREE || integer_zerop (offset0))
9372 offset0 = TREE_OPERAND (arg0, 1);
9373 else
9374 offset0 = size_binop (PLUS_EXPR, offset0,
9375 TREE_OPERAND (arg0, 1));
9376 if (poly_int_tree_p (offset0))
9378 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9379 TYPE_PRECISION (sizetype));
9380 tem <<= LOG2_BITS_PER_UNIT;
9381 tem += bitpos0;
9382 if (tem.to_shwi (&bitpos0))
9383 offset0 = NULL_TREE;
9387 base1 = arg1;
9388 if (TREE_CODE (arg1) == ADDR_EXPR)
9390 base1
9391 = get_inner_reference (TREE_OPERAND (arg1, 0),
9392 &bitsize, &bitpos1, &offset1, &mode,
9393 &unsignedp, &reversep, &volatilep);
9394 if (TREE_CODE (base1) == INDIRECT_REF)
9395 base1 = TREE_OPERAND (base1, 0);
9396 else
9397 indirect_base1 = true;
9399 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9401 base1 = TREE_OPERAND (arg1, 0);
9402 STRIP_SIGN_NOPS (base1);
9403 if (TREE_CODE (base1) == ADDR_EXPR)
9405 base1
9406 = get_inner_reference (TREE_OPERAND (base1, 0),
9407 &bitsize, &bitpos1, &offset1, &mode,
9408 &unsignedp, &reversep, &volatilep);
9409 if (TREE_CODE (base1) == INDIRECT_REF)
9410 base1 = TREE_OPERAND (base1, 0);
9411 else
9412 indirect_base1 = true;
9414 if (offset1 == NULL_TREE || integer_zerop (offset1))
9415 offset1 = TREE_OPERAND (arg1, 1);
9416 else
9417 offset1 = size_binop (PLUS_EXPR, offset1,
9418 TREE_OPERAND (arg1, 1));
9419 if (poly_int_tree_p (offset1))
9421 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9422 TYPE_PRECISION (sizetype));
9423 tem <<= LOG2_BITS_PER_UNIT;
9424 tem += bitpos1;
9425 if (tem.to_shwi (&bitpos1))
9426 offset1 = NULL_TREE;
9430 /* If we have equivalent bases we might be able to simplify. */
9431 if (indirect_base0 == indirect_base1
9432 && operand_equal_p (base0, base1,
9433 indirect_base0 ? OEP_ADDRESS_OF : 0))
9435 /* We can fold this expression to a constant if the non-constant
9436 offset parts are equal. */
9437 if ((offset0 == offset1
9438 || (offset0 && offset1
9439 && operand_equal_p (offset0, offset1, 0)))
9440 && (equality_code
9441 || (indirect_base0
9442 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9443 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9445 if (!equality_code
9446 && maybe_ne (bitpos0, bitpos1)
9447 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9448 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9449 fold_overflow_warning (("assuming pointer wraparound does not "
9450 "occur when comparing P +- C1 with "
9451 "P +- C2"),
9452 WARN_STRICT_OVERFLOW_CONDITIONAL);
9454 switch (code)
9456 case EQ_EXPR:
9457 if (known_eq (bitpos0, bitpos1))
9458 return constant_boolean_node (true, type);
9459 if (known_ne (bitpos0, bitpos1))
9460 return constant_boolean_node (false, type);
9461 break;
9462 case NE_EXPR:
9463 if (known_ne (bitpos0, bitpos1))
9464 return constant_boolean_node (true, type);
9465 if (known_eq (bitpos0, bitpos1))
9466 return constant_boolean_node (false, type);
9467 break;
9468 case LT_EXPR:
9469 if (known_lt (bitpos0, bitpos1))
9470 return constant_boolean_node (true, type);
9471 if (known_ge (bitpos0, bitpos1))
9472 return constant_boolean_node (false, type);
9473 break;
9474 case LE_EXPR:
9475 if (known_le (bitpos0, bitpos1))
9476 return constant_boolean_node (true, type);
9477 if (known_gt (bitpos0, bitpos1))
9478 return constant_boolean_node (false, type);
9479 break;
9480 case GE_EXPR:
9481 if (known_ge (bitpos0, bitpos1))
9482 return constant_boolean_node (true, type);
9483 if (known_lt (bitpos0, bitpos1))
9484 return constant_boolean_node (false, type);
9485 break;
9486 case GT_EXPR:
9487 if (known_gt (bitpos0, bitpos1))
9488 return constant_boolean_node (true, type);
9489 if (known_le (bitpos0, bitpos1))
9490 return constant_boolean_node (false, type);
9491 break;
9492 default:;
9495 /* We can simplify the comparison to a comparison of the variable
9496 offset parts if the constant offset parts are equal.
9497 Be careful to use signed sizetype here because otherwise we
9498 mess with array offsets in the wrong way. This is possible
9499 because pointer arithmetic is restricted to retain within an
9500 object and overflow on pointer differences is undefined as of
9501 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9502 else if (known_eq (bitpos0, bitpos1)
9503 && (equality_code
9504 || (indirect_base0
9505 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9506 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9508 /* By converting to signed sizetype we cover middle-end pointer
9509 arithmetic which operates on unsigned pointer types of size
9510 type size and ARRAY_REF offsets which are properly sign or
9511 zero extended from their type in case it is narrower than
9512 sizetype. */
9513 if (offset0 == NULL_TREE)
9514 offset0 = build_int_cst (ssizetype, 0);
9515 else
9516 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9517 if (offset1 == NULL_TREE)
9518 offset1 = build_int_cst (ssizetype, 0);
9519 else
9520 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9522 if (!equality_code
9523 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9524 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9525 fold_overflow_warning (("assuming pointer wraparound does not "
9526 "occur when comparing P +- C1 with "
9527 "P +- C2"),
9528 WARN_STRICT_OVERFLOW_COMPARISON);
9530 return fold_build2_loc (loc, code, type, offset0, offset1);
9533 /* For equal offsets we can simplify to a comparison of the
9534 base addresses. */
9535 else if (known_eq (bitpos0, bitpos1)
9536 && (indirect_base0
9537 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9538 && (indirect_base1
9539 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9540 && ((offset0 == offset1)
9541 || (offset0 && offset1
9542 && operand_equal_p (offset0, offset1, 0))))
9544 if (indirect_base0)
9545 base0 = build_fold_addr_expr_loc (loc, base0);
9546 if (indirect_base1)
9547 base1 = build_fold_addr_expr_loc (loc, base1);
9548 return fold_build2_loc (loc, code, type, base0, base1);
9550 /* Comparison between an ordinary (non-weak) symbol and a null
9551 pointer can be eliminated since such symbols must have a non
9552 null address. In C, relational expressions between pointers
9553 to objects and null pointers are undefined. The results
9554 below follow the C++ rules with the additional property that
9555 every object pointer compares greater than a null pointer.
9557 else if (((DECL_P (base0)
9558 && maybe_nonzero_address (base0) > 0
9559 /* Avoid folding references to struct members at offset 0 to
9560 prevent tests like '&ptr->firstmember == 0' from getting
9561 eliminated. When ptr is null, although the -> expression
9562 is strictly speaking invalid, GCC retains it as a matter
9563 of QoI. See PR c/44555. */
9564 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9565 || CONSTANT_CLASS_P (base0))
9566 && indirect_base0
9567 /* The caller guarantees that when one of the arguments is
9568 constant (i.e., null in this case) it is second. */
9569 && integer_zerop (arg1))
9571 switch (code)
9573 case EQ_EXPR:
9574 case LE_EXPR:
9575 case LT_EXPR:
9576 return constant_boolean_node (false, type);
9577 case GE_EXPR:
9578 case GT_EXPR:
9579 case NE_EXPR:
9580 return constant_boolean_node (true, type);
9581 default:
9582 gcc_unreachable ();
9587 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9588 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9589 the resulting offset is smaller in absolute value than the
9590 original one and has the same sign. */
9591 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9592 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9593 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9594 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9595 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9596 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9597 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9598 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9600 tree const1 = TREE_OPERAND (arg0, 1);
9601 tree const2 = TREE_OPERAND (arg1, 1);
9602 tree variable1 = TREE_OPERAND (arg0, 0);
9603 tree variable2 = TREE_OPERAND (arg1, 0);
9604 tree cst;
9605 const char * const warnmsg = G_("assuming signed overflow does not "
9606 "occur when combining constants around "
9607 "a comparison");
9609 /* Put the constant on the side where it doesn't overflow and is
9610 of lower absolute value and of same sign than before. */
9611 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9612 ? MINUS_EXPR : PLUS_EXPR,
9613 const2, const1);
9614 if (!TREE_OVERFLOW (cst)
9615 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9616 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9618 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9619 return fold_build2_loc (loc, code, type,
9620 variable1,
9621 fold_build2_loc (loc, TREE_CODE (arg1),
9622 TREE_TYPE (arg1),
9623 variable2, cst));
9626 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9627 ? MINUS_EXPR : PLUS_EXPR,
9628 const1, const2);
9629 if (!TREE_OVERFLOW (cst)
9630 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9631 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9633 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9634 return fold_build2_loc (loc, code, type,
9635 fold_build2_loc (loc, TREE_CODE (arg0),
9636 TREE_TYPE (arg0),
9637 variable1, cst),
9638 variable2);
9642 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9643 if (tem)
9644 return tem;
9646 /* If we are comparing an expression that just has comparisons
9647 of two integer values, arithmetic expressions of those comparisons,
9648 and constants, we can simplify it. There are only three cases
9649 to check: the two values can either be equal, the first can be
9650 greater, or the second can be greater. Fold the expression for
9651 those three values. Since each value must be 0 or 1, we have
9652 eight possibilities, each of which corresponds to the constant 0
9653 or 1 or one of the six possible comparisons.
9655 This handles common cases like (a > b) == 0 but also handles
9656 expressions like ((x > y) - (y > x)) > 0, which supposedly
9657 occur in macroized code. */
9659 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9661 tree cval1 = 0, cval2 = 0;
9663 if (twoval_comparison_p (arg0, &cval1, &cval2)
9664 /* Don't handle degenerate cases here; they should already
9665 have been handled anyway. */
9666 && cval1 != 0 && cval2 != 0
9667 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9668 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9669 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9670 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9671 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9672 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9673 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9675 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9676 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9678 /* We can't just pass T to eval_subst in case cval1 or cval2
9679 was the same as ARG1. */
9681 tree high_result
9682 = fold_build2_loc (loc, code, type,
9683 eval_subst (loc, arg0, cval1, maxval,
9684 cval2, minval),
9685 arg1);
9686 tree equal_result
9687 = fold_build2_loc (loc, code, type,
9688 eval_subst (loc, arg0, cval1, maxval,
9689 cval2, maxval),
9690 arg1);
9691 tree low_result
9692 = fold_build2_loc (loc, code, type,
9693 eval_subst (loc, arg0, cval1, minval,
9694 cval2, maxval),
9695 arg1);
9697 /* All three of these results should be 0 or 1. Confirm they are.
9698 Then use those values to select the proper code to use. */
9700 if (TREE_CODE (high_result) == INTEGER_CST
9701 && TREE_CODE (equal_result) == INTEGER_CST
9702 && TREE_CODE (low_result) == INTEGER_CST)
9704 /* Make a 3-bit mask with the high-order bit being the
9705 value for `>', the next for '=', and the low for '<'. */
9706 switch ((integer_onep (high_result) * 4)
9707 + (integer_onep (equal_result) * 2)
9708 + integer_onep (low_result))
9710 case 0:
9711 /* Always false. */
9712 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9713 case 1:
9714 code = LT_EXPR;
9715 break;
9716 case 2:
9717 code = EQ_EXPR;
9718 break;
9719 case 3:
9720 code = LE_EXPR;
9721 break;
9722 case 4:
9723 code = GT_EXPR;
9724 break;
9725 case 5:
9726 code = NE_EXPR;
9727 break;
9728 case 6:
9729 code = GE_EXPR;
9730 break;
9731 case 7:
9732 /* Always true. */
9733 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9736 return fold_build2_loc (loc, code, type, cval1, cval2);
9741 return NULL_TREE;
9745 /* Subroutine of fold_binary. Optimize complex multiplications of the
9746 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9747 argument EXPR represents the expression "z" of type TYPE. */
9749 static tree
9750 fold_mult_zconjz (location_t loc, tree type, tree expr)
9752 tree itype = TREE_TYPE (type);
9753 tree rpart, ipart, tem;
9755 if (TREE_CODE (expr) == COMPLEX_EXPR)
9757 rpart = TREE_OPERAND (expr, 0);
9758 ipart = TREE_OPERAND (expr, 1);
9760 else if (TREE_CODE (expr) == COMPLEX_CST)
9762 rpart = TREE_REALPART (expr);
9763 ipart = TREE_IMAGPART (expr);
9765 else
9767 expr = save_expr (expr);
9768 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9769 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9772 rpart = save_expr (rpart);
9773 ipart = save_expr (ipart);
9774 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9775 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9776 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9777 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9778 build_zero_cst (itype));
9782 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9783 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9784 true if successful. */
9786 static bool
9787 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9789 unsigned HOST_WIDE_INT i, nunits;
9791 if (TREE_CODE (arg) == VECTOR_CST
9792 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9794 for (i = 0; i < nunits; ++i)
9795 elts[i] = VECTOR_CST_ELT (arg, i);
9797 else if (TREE_CODE (arg) == CONSTRUCTOR)
9799 constructor_elt *elt;
9801 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9802 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9803 return false;
9804 else
9805 elts[i] = elt->value;
9807 else
9808 return false;
9809 for (; i < nelts; i++)
9810 elts[i]
9811 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9812 return true;
9815 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9816 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9817 NULL_TREE otherwise. */
9819 tree
9820 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9822 unsigned int i;
9823 unsigned HOST_WIDE_INT nelts;
9824 bool need_ctor = false;
9826 if (!sel.length ().is_constant (&nelts))
9827 return NULL_TREE;
9828 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9829 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9830 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9831 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9832 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9833 return NULL_TREE;
9835 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9836 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9837 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9838 return NULL_TREE;
9840 tree_vector_builder out_elts (type, nelts, 1);
9841 for (i = 0; i < nelts; i++)
9843 HOST_WIDE_INT index;
9844 if (!sel[i].is_constant (&index))
9845 return NULL_TREE;
9846 if (!CONSTANT_CLASS_P (in_elts[index]))
9847 need_ctor = true;
9848 out_elts.quick_push (unshare_expr (in_elts[index]));
9851 if (need_ctor)
9853 vec<constructor_elt, va_gc> *v;
9854 vec_alloc (v, nelts);
9855 for (i = 0; i < nelts; i++)
9856 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9857 return build_constructor (type, v);
9859 else
9860 return out_elts.build ();
9863 /* Try to fold a pointer difference of type TYPE two address expressions of
9864 array references AREF0 and AREF1 using location LOC. Return a
9865 simplified expression for the difference or NULL_TREE. */
9867 static tree
9868 fold_addr_of_array_ref_difference (location_t loc, tree type,
9869 tree aref0, tree aref1,
9870 bool use_pointer_diff)
9872 tree base0 = TREE_OPERAND (aref0, 0);
9873 tree base1 = TREE_OPERAND (aref1, 0);
9874 tree base_offset = build_int_cst (type, 0);
9876 /* If the bases are array references as well, recurse. If the bases
9877 are pointer indirections compute the difference of the pointers.
9878 If the bases are equal, we are set. */
9879 if ((TREE_CODE (base0) == ARRAY_REF
9880 && TREE_CODE (base1) == ARRAY_REF
9881 && (base_offset
9882 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9883 use_pointer_diff)))
9884 || (INDIRECT_REF_P (base0)
9885 && INDIRECT_REF_P (base1)
9886 && (base_offset
9887 = use_pointer_diff
9888 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9889 TREE_OPERAND (base0, 0),
9890 TREE_OPERAND (base1, 0))
9891 : fold_binary_loc (loc, MINUS_EXPR, type,
9892 fold_convert (type,
9893 TREE_OPERAND (base0, 0)),
9894 fold_convert (type,
9895 TREE_OPERAND (base1, 0)))))
9896 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9898 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9899 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9900 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9901 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9902 return fold_build2_loc (loc, PLUS_EXPR, type,
9903 base_offset,
9904 fold_build2_loc (loc, MULT_EXPR, type,
9905 diff, esz));
9907 return NULL_TREE;
9910 /* If the real or vector real constant CST of type TYPE has an exact
9911 inverse, return it, else return NULL. */
9913 tree
9914 exact_inverse (tree type, tree cst)
9916 REAL_VALUE_TYPE r;
9917 tree unit_type;
9918 machine_mode mode;
9920 switch (TREE_CODE (cst))
9922 case REAL_CST:
9923 r = TREE_REAL_CST (cst);
9925 if (exact_real_inverse (TYPE_MODE (type), &r))
9926 return build_real (type, r);
9928 return NULL_TREE;
9930 case VECTOR_CST:
9932 unit_type = TREE_TYPE (type);
9933 mode = TYPE_MODE (unit_type);
9935 tree_vector_builder elts;
9936 if (!elts.new_unary_operation (type, cst, false))
9937 return NULL_TREE;
9938 unsigned int count = elts.encoded_nelts ();
9939 for (unsigned int i = 0; i < count; ++i)
9941 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9942 if (!exact_real_inverse (mode, &r))
9943 return NULL_TREE;
9944 elts.quick_push (build_real (unit_type, r));
9947 return elts.build ();
9950 default:
9951 return NULL_TREE;
9955 /* Mask out the tz least significant bits of X of type TYPE where
9956 tz is the number of trailing zeroes in Y. */
9957 static wide_int
9958 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9960 int tz = wi::ctz (y);
9961 if (tz > 0)
9962 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9963 return x;
9966 /* Return true when T is an address and is known to be nonzero.
9967 For floating point we further ensure that T is not denormal.
9968 Similar logic is present in nonzero_address in rtlanal.h.
9970 If the return value is based on the assumption that signed overflow
9971 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9972 change *STRICT_OVERFLOW_P. */
9974 static bool
9975 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9977 tree type = TREE_TYPE (t);
9978 enum tree_code code;
9980 /* Doing something useful for floating point would need more work. */
9981 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9982 return false;
9984 code = TREE_CODE (t);
9985 switch (TREE_CODE_CLASS (code))
9987 case tcc_unary:
9988 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9989 strict_overflow_p);
9990 case tcc_binary:
9991 case tcc_comparison:
9992 return tree_binary_nonzero_warnv_p (code, type,
9993 TREE_OPERAND (t, 0),
9994 TREE_OPERAND (t, 1),
9995 strict_overflow_p);
9996 case tcc_constant:
9997 case tcc_declaration:
9998 case tcc_reference:
9999 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10001 default:
10002 break;
10005 switch (code)
10007 case TRUTH_NOT_EXPR:
10008 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10009 strict_overflow_p);
10011 case TRUTH_AND_EXPR:
10012 case TRUTH_OR_EXPR:
10013 case TRUTH_XOR_EXPR:
10014 return tree_binary_nonzero_warnv_p (code, type,
10015 TREE_OPERAND (t, 0),
10016 TREE_OPERAND (t, 1),
10017 strict_overflow_p);
10019 case COND_EXPR:
10020 case CONSTRUCTOR:
10021 case OBJ_TYPE_REF:
10022 case ASSERT_EXPR:
10023 case ADDR_EXPR:
10024 case WITH_SIZE_EXPR:
10025 case SSA_NAME:
10026 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10028 case COMPOUND_EXPR:
10029 case MODIFY_EXPR:
10030 case BIND_EXPR:
10031 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10032 strict_overflow_p);
10034 case SAVE_EXPR:
10035 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10036 strict_overflow_p);
10038 case CALL_EXPR:
10040 tree fndecl = get_callee_fndecl (t);
10041 if (!fndecl) return false;
10042 if (flag_delete_null_pointer_checks && !flag_check_new
10043 && DECL_IS_OPERATOR_NEW_P (fndecl)
10044 && !TREE_NOTHROW (fndecl))
10045 return true;
10046 if (flag_delete_null_pointer_checks
10047 && lookup_attribute ("returns_nonnull",
10048 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10049 return true;
10050 return alloca_call_p (t);
10053 default:
10054 break;
10056 return false;
10059 /* Return true when T is an address and is known to be nonzero.
10060 Handle warnings about undefined signed overflow. */
10062 bool
10063 tree_expr_nonzero_p (tree t)
10065 bool ret, strict_overflow_p;
10067 strict_overflow_p = false;
10068 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10069 if (strict_overflow_p)
10070 fold_overflow_warning (("assuming signed overflow does not occur when "
10071 "determining that expression is always "
10072 "non-zero"),
10073 WARN_STRICT_OVERFLOW_MISC);
10074 return ret;
10077 /* Return true if T is known not to be equal to an integer W. */
10079 bool
10080 expr_not_equal_to (tree t, const wide_int &w)
10082 wide_int min, max, nz;
10083 value_range_kind rtype;
10084 switch (TREE_CODE (t))
10086 case INTEGER_CST:
10087 return wi::to_wide (t) != w;
10089 case SSA_NAME:
10090 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10091 return false;
10092 rtype = get_range_info (t, &min, &max);
10093 if (rtype == VR_RANGE)
10095 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
10096 return true;
10097 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
10098 return true;
10100 else if (rtype == VR_ANTI_RANGE
10101 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
10102 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
10103 return true;
10104 /* If T has some known zero bits and W has any of those bits set,
10105 then T is known not to be equal to W. */
10106 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10107 TYPE_PRECISION (TREE_TYPE (t))), 0))
10108 return true;
10109 return false;
10111 default:
10112 return false;
10116 /* Fold a binary expression of code CODE and type TYPE with operands
10117 OP0 and OP1. LOC is the location of the resulting expression.
10118 Return the folded expression if folding is successful. Otherwise,
10119 return NULL_TREE. */
10121 tree
10122 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10123 tree op0, tree op1)
10125 enum tree_code_class kind = TREE_CODE_CLASS (code);
10126 tree arg0, arg1, tem;
10127 tree t1 = NULL_TREE;
10128 bool strict_overflow_p;
10129 unsigned int prec;
10131 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10132 && TREE_CODE_LENGTH (code) == 2
10133 && op0 != NULL_TREE
10134 && op1 != NULL_TREE);
10136 arg0 = op0;
10137 arg1 = op1;
10139 /* Strip any conversions that don't change the mode. This is
10140 safe for every expression, except for a comparison expression
10141 because its signedness is derived from its operands. So, in
10142 the latter case, only strip conversions that don't change the
10143 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10144 preserved.
10146 Note that this is done as an internal manipulation within the
10147 constant folder, in order to find the simplest representation
10148 of the arguments so that their form can be studied. In any
10149 cases, the appropriate type conversions should be put back in
10150 the tree that will get out of the constant folder. */
10152 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10154 STRIP_SIGN_NOPS (arg0);
10155 STRIP_SIGN_NOPS (arg1);
10157 else
10159 STRIP_NOPS (arg0);
10160 STRIP_NOPS (arg1);
10163 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10164 constant but we can't do arithmetic on them. */
10165 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10167 tem = const_binop (code, type, arg0, arg1);
10168 if (tem != NULL_TREE)
10170 if (TREE_TYPE (tem) != type)
10171 tem = fold_convert_loc (loc, type, tem);
10172 return tem;
10176 /* If this is a commutative operation, and ARG0 is a constant, move it
10177 to ARG1 to reduce the number of tests below. */
10178 if (commutative_tree_code (code)
10179 && tree_swap_operands_p (arg0, arg1))
10180 return fold_build2_loc (loc, code, type, op1, op0);
10182 /* Likewise if this is a comparison, and ARG0 is a constant, move it
10183 to ARG1 to reduce the number of tests below. */
10184 if (kind == tcc_comparison
10185 && tree_swap_operands_p (arg0, arg1))
10186 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10188 tem = generic_simplify (loc, code, type, op0, op1);
10189 if (tem)
10190 return tem;
10192 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10194 First check for cases where an arithmetic operation is applied to a
10195 compound, conditional, or comparison operation. Push the arithmetic
10196 operation inside the compound or conditional to see if any folding
10197 can then be done. Convert comparison to conditional for this purpose.
10198 The also optimizes non-constant cases that used to be done in
10199 expand_expr.
10201 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10202 one of the operands is a comparison and the other is a comparison, a
10203 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10204 code below would make the expression more complex. Change it to a
10205 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10206 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10208 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10209 || code == EQ_EXPR || code == NE_EXPR)
10210 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10211 && ((truth_value_p (TREE_CODE (arg0))
10212 && (truth_value_p (TREE_CODE (arg1))
10213 || (TREE_CODE (arg1) == BIT_AND_EXPR
10214 && integer_onep (TREE_OPERAND (arg1, 1)))))
10215 || (truth_value_p (TREE_CODE (arg1))
10216 && (truth_value_p (TREE_CODE (arg0))
10217 || (TREE_CODE (arg0) == BIT_AND_EXPR
10218 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10220 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10221 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10222 : TRUTH_XOR_EXPR,
10223 boolean_type_node,
10224 fold_convert_loc (loc, boolean_type_node, arg0),
10225 fold_convert_loc (loc, boolean_type_node, arg1));
10227 if (code == EQ_EXPR)
10228 tem = invert_truthvalue_loc (loc, tem);
10230 return fold_convert_loc (loc, type, tem);
10233 if (TREE_CODE_CLASS (code) == tcc_binary
10234 || TREE_CODE_CLASS (code) == tcc_comparison)
10236 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10238 tem = fold_build2_loc (loc, code, type,
10239 fold_convert_loc (loc, TREE_TYPE (op0),
10240 TREE_OPERAND (arg0, 1)), op1);
10241 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10242 tem);
10244 if (TREE_CODE (arg1) == COMPOUND_EXPR)
10246 tem = fold_build2_loc (loc, code, type, op0,
10247 fold_convert_loc (loc, TREE_TYPE (op1),
10248 TREE_OPERAND (arg1, 1)));
10249 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10250 tem);
10253 if (TREE_CODE (arg0) == COND_EXPR
10254 || TREE_CODE (arg0) == VEC_COND_EXPR
10255 || COMPARISON_CLASS_P (arg0))
10257 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10258 arg0, arg1,
10259 /*cond_first_p=*/1);
10260 if (tem != NULL_TREE)
10261 return tem;
10264 if (TREE_CODE (arg1) == COND_EXPR
10265 || TREE_CODE (arg1) == VEC_COND_EXPR
10266 || COMPARISON_CLASS_P (arg1))
10268 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10269 arg1, arg0,
10270 /*cond_first_p=*/0);
10271 if (tem != NULL_TREE)
10272 return tem;
10276 switch (code)
10278 case MEM_REF:
10279 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10280 if (TREE_CODE (arg0) == ADDR_EXPR
10281 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10283 tree iref = TREE_OPERAND (arg0, 0);
10284 return fold_build2 (MEM_REF, type,
10285 TREE_OPERAND (iref, 0),
10286 int_const_binop (PLUS_EXPR, arg1,
10287 TREE_OPERAND (iref, 1)));
10290 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10291 if (TREE_CODE (arg0) == ADDR_EXPR
10292 && handled_component_p (TREE_OPERAND (arg0, 0)))
10294 tree base;
10295 poly_int64 coffset;
10296 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10297 &coffset);
10298 if (!base)
10299 return NULL_TREE;
10300 return fold_build2 (MEM_REF, type,
10301 build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10302 int_const_binop (PLUS_EXPR, arg1,
10303 size_int (coffset)));
10306 return NULL_TREE;
10308 case POINTER_PLUS_EXPR:
10309 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10310 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10311 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10312 return fold_convert_loc (loc, type,
10313 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10314 fold_convert_loc (loc, sizetype,
10315 arg1),
10316 fold_convert_loc (loc, sizetype,
10317 arg0)));
10319 return NULL_TREE;
10321 case PLUS_EXPR:
10322 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10324 /* X + (X / CST) * -CST is X % CST. */
10325 if (TREE_CODE (arg1) == MULT_EXPR
10326 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10327 && operand_equal_p (arg0,
10328 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10330 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10331 tree cst1 = TREE_OPERAND (arg1, 1);
10332 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10333 cst1, cst0);
10334 if (sum && integer_zerop (sum))
10335 return fold_convert_loc (loc, type,
10336 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10337 TREE_TYPE (arg0), arg0,
10338 cst0));
10342 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10343 one. Make sure the type is not saturating and has the signedness of
10344 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10345 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10346 if ((TREE_CODE (arg0) == MULT_EXPR
10347 || TREE_CODE (arg1) == MULT_EXPR)
10348 && !TYPE_SATURATING (type)
10349 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10350 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10351 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10353 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10354 if (tem)
10355 return tem;
10358 if (! FLOAT_TYPE_P (type))
10360 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10361 (plus (plus (mult) (mult)) (foo)) so that we can
10362 take advantage of the factoring cases below. */
10363 if (ANY_INTEGRAL_TYPE_P (type)
10364 && TYPE_OVERFLOW_WRAPS (type)
10365 && (((TREE_CODE (arg0) == PLUS_EXPR
10366 || TREE_CODE (arg0) == MINUS_EXPR)
10367 && TREE_CODE (arg1) == MULT_EXPR)
10368 || ((TREE_CODE (arg1) == PLUS_EXPR
10369 || TREE_CODE (arg1) == MINUS_EXPR)
10370 && TREE_CODE (arg0) == MULT_EXPR)))
10372 tree parg0, parg1, parg, marg;
10373 enum tree_code pcode;
10375 if (TREE_CODE (arg1) == MULT_EXPR)
10376 parg = arg0, marg = arg1;
10377 else
10378 parg = arg1, marg = arg0;
10379 pcode = TREE_CODE (parg);
10380 parg0 = TREE_OPERAND (parg, 0);
10381 parg1 = TREE_OPERAND (parg, 1);
10382 STRIP_NOPS (parg0);
10383 STRIP_NOPS (parg1);
10385 if (TREE_CODE (parg0) == MULT_EXPR
10386 && TREE_CODE (parg1) != MULT_EXPR)
10387 return fold_build2_loc (loc, pcode, type,
10388 fold_build2_loc (loc, PLUS_EXPR, type,
10389 fold_convert_loc (loc, type,
10390 parg0),
10391 fold_convert_loc (loc, type,
10392 marg)),
10393 fold_convert_loc (loc, type, parg1));
10394 if (TREE_CODE (parg0) != MULT_EXPR
10395 && TREE_CODE (parg1) == MULT_EXPR)
10396 return
10397 fold_build2_loc (loc, PLUS_EXPR, type,
10398 fold_convert_loc (loc, type, parg0),
10399 fold_build2_loc (loc, pcode, type,
10400 fold_convert_loc (loc, type, marg),
10401 fold_convert_loc (loc, type,
10402 parg1)));
10405 else
10407 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10408 to __complex__ ( x, y ). This is not the same for SNaNs or
10409 if signed zeros are involved. */
10410 if (!HONOR_SNANS (element_mode (arg0))
10411 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10412 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10414 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10415 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10416 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10417 bool arg0rz = false, arg0iz = false;
10418 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10419 || (arg0i && (arg0iz = real_zerop (arg0i))))
10421 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10422 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10423 if (arg0rz && arg1i && real_zerop (arg1i))
10425 tree rp = arg1r ? arg1r
10426 : build1 (REALPART_EXPR, rtype, arg1);
10427 tree ip = arg0i ? arg0i
10428 : build1 (IMAGPART_EXPR, rtype, arg0);
10429 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10431 else if (arg0iz && arg1r && real_zerop (arg1r))
10433 tree rp = arg0r ? arg0r
10434 : build1 (REALPART_EXPR, rtype, arg0);
10435 tree ip = arg1i ? arg1i
10436 : build1 (IMAGPART_EXPR, rtype, arg1);
10437 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10442 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10443 We associate floats only if the user has specified
10444 -fassociative-math. */
10445 if (flag_associative_math
10446 && TREE_CODE (arg1) == PLUS_EXPR
10447 && TREE_CODE (arg0) != MULT_EXPR)
10449 tree tree10 = TREE_OPERAND (arg1, 0);
10450 tree tree11 = TREE_OPERAND (arg1, 1);
10451 if (TREE_CODE (tree11) == MULT_EXPR
10452 && TREE_CODE (tree10) == MULT_EXPR)
10454 tree tree0;
10455 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10456 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10459 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10460 We associate floats only if the user has specified
10461 -fassociative-math. */
10462 if (flag_associative_math
10463 && TREE_CODE (arg0) == PLUS_EXPR
10464 && TREE_CODE (arg1) != MULT_EXPR)
10466 tree tree00 = TREE_OPERAND (arg0, 0);
10467 tree tree01 = TREE_OPERAND (arg0, 1);
10468 if (TREE_CODE (tree01) == MULT_EXPR
10469 && TREE_CODE (tree00) == MULT_EXPR)
10471 tree tree0;
10472 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10473 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10478 bit_rotate:
10479 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10480 is a rotate of A by C1 bits. */
10481 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10482 is a rotate of A by B bits.
10483 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10484 though in this case CODE must be | and not + or ^, otherwise
10485 it doesn't return A when B is 0. */
10487 enum tree_code code0, code1;
10488 tree rtype;
10489 code0 = TREE_CODE (arg0);
10490 code1 = TREE_CODE (arg1);
10491 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10492 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10493 && operand_equal_p (TREE_OPERAND (arg0, 0),
10494 TREE_OPERAND (arg1, 0), 0)
10495 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10496 TYPE_UNSIGNED (rtype))
10497 /* Only create rotates in complete modes. Other cases are not
10498 expanded properly. */
10499 && (element_precision (rtype)
10500 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10502 tree tree01, tree11;
10503 tree orig_tree01, orig_tree11;
10504 enum tree_code code01, code11;
10506 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10507 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10508 STRIP_NOPS (tree01);
10509 STRIP_NOPS (tree11);
10510 code01 = TREE_CODE (tree01);
10511 code11 = TREE_CODE (tree11);
10512 if (code11 != MINUS_EXPR
10513 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10515 std::swap (code0, code1);
10516 std::swap (code01, code11);
10517 std::swap (tree01, tree11);
10518 std::swap (orig_tree01, orig_tree11);
10520 if (code01 == INTEGER_CST
10521 && code11 == INTEGER_CST
10522 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10523 == element_precision (rtype)))
10525 tem = build2_loc (loc, LROTATE_EXPR,
10526 rtype, TREE_OPERAND (arg0, 0),
10527 code0 == LSHIFT_EXPR
10528 ? orig_tree01 : orig_tree11);
10529 return fold_convert_loc (loc, type, tem);
10531 else if (code11 == MINUS_EXPR)
10533 tree tree110, tree111;
10534 tree110 = TREE_OPERAND (tree11, 0);
10535 tree111 = TREE_OPERAND (tree11, 1);
10536 STRIP_NOPS (tree110);
10537 STRIP_NOPS (tree111);
10538 if (TREE_CODE (tree110) == INTEGER_CST
10539 && compare_tree_int (tree110,
10540 element_precision (rtype)) == 0
10541 && operand_equal_p (tree01, tree111, 0))
10543 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10544 ? LROTATE_EXPR : RROTATE_EXPR),
10545 rtype, TREE_OPERAND (arg0, 0),
10546 orig_tree01);
10547 return fold_convert_loc (loc, type, tem);
10550 else if (code == BIT_IOR_EXPR
10551 && code11 == BIT_AND_EXPR
10552 && pow2p_hwi (element_precision (rtype)))
10554 tree tree110, tree111;
10555 tree110 = TREE_OPERAND (tree11, 0);
10556 tree111 = TREE_OPERAND (tree11, 1);
10557 STRIP_NOPS (tree110);
10558 STRIP_NOPS (tree111);
10559 if (TREE_CODE (tree110) == NEGATE_EXPR
10560 && TREE_CODE (tree111) == INTEGER_CST
10561 && compare_tree_int (tree111,
10562 element_precision (rtype) - 1) == 0
10563 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10565 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10566 ? LROTATE_EXPR : RROTATE_EXPR),
10567 rtype, TREE_OPERAND (arg0, 0),
10568 orig_tree01);
10569 return fold_convert_loc (loc, type, tem);
10575 associate:
10576 /* In most languages, can't associate operations on floats through
10577 parentheses. Rather than remember where the parentheses were, we
10578 don't associate floats at all, unless the user has specified
10579 -fassociative-math.
10580 And, we need to make sure type is not saturating. */
10582 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10583 && !TYPE_SATURATING (type))
10585 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10586 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10587 tree atype = type;
10588 bool ok = true;
10590 /* Split both trees into variables, constants, and literals. Then
10591 associate each group together, the constants with literals,
10592 then the result with variables. This increases the chances of
10593 literals being recombined later and of generating relocatable
10594 expressions for the sum of a constant and literal. */
10595 var0 = split_tree (arg0, type, code,
10596 &minus_var0, &con0, &minus_con0,
10597 &lit0, &minus_lit0, 0);
10598 var1 = split_tree (arg1, type, code,
10599 &minus_var1, &con1, &minus_con1,
10600 &lit1, &minus_lit1, code == MINUS_EXPR);
10602 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10603 if (code == MINUS_EXPR)
10604 code = PLUS_EXPR;
10606 /* With undefined overflow prefer doing association in a type
10607 which wraps on overflow, if that is one of the operand types. */
10608 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10609 && !TYPE_OVERFLOW_WRAPS (type))
10611 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10612 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10613 atype = TREE_TYPE (arg0);
10614 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10615 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10616 atype = TREE_TYPE (arg1);
10617 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10620 /* With undefined overflow we can only associate constants with one
10621 variable, and constants whose association doesn't overflow. */
10622 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10623 && !TYPE_OVERFLOW_WRAPS (atype))
10625 if ((var0 && var1) || (minus_var0 && minus_var1))
10627 /* ??? If split_tree would handle NEGATE_EXPR we could
10628 simply reject these cases and the allowed cases would
10629 be the var0/minus_var1 ones. */
10630 tree tmp0 = var0 ? var0 : minus_var0;
10631 tree tmp1 = var1 ? var1 : minus_var1;
10632 bool one_neg = false;
10634 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10636 tmp0 = TREE_OPERAND (tmp0, 0);
10637 one_neg = !one_neg;
10639 if (CONVERT_EXPR_P (tmp0)
10640 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10641 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10642 <= TYPE_PRECISION (atype)))
10643 tmp0 = TREE_OPERAND (tmp0, 0);
10644 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10646 tmp1 = TREE_OPERAND (tmp1, 0);
10647 one_neg = !one_neg;
10649 if (CONVERT_EXPR_P (tmp1)
10650 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10651 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10652 <= TYPE_PRECISION (atype)))
10653 tmp1 = TREE_OPERAND (tmp1, 0);
10654 /* The only case we can still associate with two variables
10655 is if they cancel out. */
10656 if (!one_neg
10657 || !operand_equal_p (tmp0, tmp1, 0))
10658 ok = false;
10660 else if ((var0 && minus_var1
10661 && ! operand_equal_p (var0, minus_var1, 0))
10662 || (minus_var0 && var1
10663 && ! operand_equal_p (minus_var0, var1, 0)))
10664 ok = false;
10667 /* Only do something if we found more than two objects. Otherwise,
10668 nothing has changed and we risk infinite recursion. */
10669 if (ok
10670 && ((var0 != 0) + (var1 != 0)
10671 + (minus_var0 != 0) + (minus_var1 != 0)
10672 + (con0 != 0) + (con1 != 0)
10673 + (minus_con0 != 0) + (minus_con1 != 0)
10674 + (lit0 != 0) + (lit1 != 0)
10675 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10677 var0 = associate_trees (loc, var0, var1, code, atype);
10678 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10679 code, atype);
10680 con0 = associate_trees (loc, con0, con1, code, atype);
10681 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10682 code, atype);
10683 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10684 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10685 code, atype);
10687 if (minus_var0 && var0)
10689 var0 = associate_trees (loc, var0, minus_var0,
10690 MINUS_EXPR, atype);
10691 minus_var0 = 0;
10693 if (minus_con0 && con0)
10695 con0 = associate_trees (loc, con0, minus_con0,
10696 MINUS_EXPR, atype);
10697 minus_con0 = 0;
10700 /* Preserve the MINUS_EXPR if the negative part of the literal is
10701 greater than the positive part. Otherwise, the multiplicative
10702 folding code (i.e extract_muldiv) may be fooled in case
10703 unsigned constants are subtracted, like in the following
10704 example: ((X*2 + 4) - 8U)/2. */
10705 if (minus_lit0 && lit0)
10707 if (TREE_CODE (lit0) == INTEGER_CST
10708 && TREE_CODE (minus_lit0) == INTEGER_CST
10709 && tree_int_cst_lt (lit0, minus_lit0)
10710 /* But avoid ending up with only negated parts. */
10711 && (var0 || con0))
10713 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10714 MINUS_EXPR, atype);
10715 lit0 = 0;
10717 else
10719 lit0 = associate_trees (loc, lit0, minus_lit0,
10720 MINUS_EXPR, atype);
10721 minus_lit0 = 0;
10725 /* Don't introduce overflows through reassociation. */
10726 if ((lit0 && TREE_OVERFLOW_P (lit0))
10727 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10728 return NULL_TREE;
10730 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10731 con0 = associate_trees (loc, con0, lit0, code, atype);
10732 lit0 = 0;
10733 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10734 code, atype);
10735 minus_lit0 = 0;
10737 /* Eliminate minus_con0. */
10738 if (minus_con0)
10740 if (con0)
10741 con0 = associate_trees (loc, con0, minus_con0,
10742 MINUS_EXPR, atype);
10743 else if (var0)
10744 var0 = associate_trees (loc, var0, minus_con0,
10745 MINUS_EXPR, atype);
10746 else
10747 gcc_unreachable ();
10748 minus_con0 = 0;
10751 /* Eliminate minus_var0. */
10752 if (minus_var0)
10754 if (con0)
10755 con0 = associate_trees (loc, con0, minus_var0,
10756 MINUS_EXPR, atype);
10757 else
10758 gcc_unreachable ();
10759 minus_var0 = 0;
10762 return
10763 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10764 code, atype));
10768 return NULL_TREE;
10770 case POINTER_DIFF_EXPR:
10771 case MINUS_EXPR:
10772 /* Fold &a[i] - &a[j] to i-j. */
10773 if (TREE_CODE (arg0) == ADDR_EXPR
10774 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10775 && TREE_CODE (arg1) == ADDR_EXPR
10776 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10778 tree tem = fold_addr_of_array_ref_difference (loc, type,
10779 TREE_OPERAND (arg0, 0),
10780 TREE_OPERAND (arg1, 0),
10781 code
10782 == POINTER_DIFF_EXPR);
10783 if (tem)
10784 return tem;
10787 /* Further transformations are not for pointers. */
10788 if (code == POINTER_DIFF_EXPR)
10789 return NULL_TREE;
10791 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10792 if (TREE_CODE (arg0) == NEGATE_EXPR
10793 && negate_expr_p (op1)
10794 /* If arg0 is e.g. unsigned int and type is int, then this could
10795 introduce UB, because if A is INT_MIN at runtime, the original
10796 expression can be well defined while the latter is not.
10797 See PR83269. */
10798 && !(ANY_INTEGRAL_TYPE_P (type)
10799 && TYPE_OVERFLOW_UNDEFINED (type)
10800 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10801 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10802 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10803 fold_convert_loc (loc, type,
10804 TREE_OPERAND (arg0, 0)));
10806 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10807 __complex__ ( x, -y ). This is not the same for SNaNs or if
10808 signed zeros are involved. */
10809 if (!HONOR_SNANS (element_mode (arg0))
10810 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10811 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10813 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10814 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10815 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10816 bool arg0rz = false, arg0iz = false;
10817 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10818 || (arg0i && (arg0iz = real_zerop (arg0i))))
10820 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10821 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10822 if (arg0rz && arg1i && real_zerop (arg1i))
10824 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10825 arg1r ? arg1r
10826 : build1 (REALPART_EXPR, rtype, arg1));
10827 tree ip = arg0i ? arg0i
10828 : build1 (IMAGPART_EXPR, rtype, arg0);
10829 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10831 else if (arg0iz && arg1r && real_zerop (arg1r))
10833 tree rp = arg0r ? arg0r
10834 : build1 (REALPART_EXPR, rtype, arg0);
10835 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10836 arg1i ? arg1i
10837 : build1 (IMAGPART_EXPR, rtype, arg1));
10838 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10843 /* A - B -> A + (-B) if B is easily negatable. */
10844 if (negate_expr_p (op1)
10845 && ! TYPE_OVERFLOW_SANITIZED (type)
10846 && ((FLOAT_TYPE_P (type)
10847 /* Avoid this transformation if B is a positive REAL_CST. */
10848 && (TREE_CODE (op1) != REAL_CST
10849 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10850 || INTEGRAL_TYPE_P (type)))
10851 return fold_build2_loc (loc, PLUS_EXPR, type,
10852 fold_convert_loc (loc, type, arg0),
10853 negate_expr (op1));
10855 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10856 one. Make sure the type is not saturating and has the signedness of
10857 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10858 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10859 if ((TREE_CODE (arg0) == MULT_EXPR
10860 || TREE_CODE (arg1) == MULT_EXPR)
10861 && !TYPE_SATURATING (type)
10862 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10863 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10864 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10866 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10867 if (tem)
10868 return tem;
10871 goto associate;
10873 case MULT_EXPR:
10874 if (! FLOAT_TYPE_P (type))
10876 /* Transform x * -C into -x * C if x is easily negatable. */
10877 if (TREE_CODE (op1) == INTEGER_CST
10878 && tree_int_cst_sgn (op1) == -1
10879 && negate_expr_p (op0)
10880 && negate_expr_p (op1)
10881 && (tem = negate_expr (op1)) != op1
10882 && ! TREE_OVERFLOW (tem))
10883 return fold_build2_loc (loc, MULT_EXPR, type,
10884 fold_convert_loc (loc, type,
10885 negate_expr (op0)), tem);
10887 strict_overflow_p = false;
10888 if (TREE_CODE (arg1) == INTEGER_CST
10889 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10890 &strict_overflow_p)) != 0)
10892 if (strict_overflow_p)
10893 fold_overflow_warning (("assuming signed overflow does not "
10894 "occur when simplifying "
10895 "multiplication"),
10896 WARN_STRICT_OVERFLOW_MISC);
10897 return fold_convert_loc (loc, type, tem);
10900 /* Optimize z * conj(z) for integer complex numbers. */
10901 if (TREE_CODE (arg0) == CONJ_EXPR
10902 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10903 return fold_mult_zconjz (loc, type, arg1);
10904 if (TREE_CODE (arg1) == CONJ_EXPR
10905 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10906 return fold_mult_zconjz (loc, type, arg0);
10908 else
10910 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10911 This is not the same for NaNs or if signed zeros are
10912 involved. */
10913 if (!HONOR_NANS (arg0)
10914 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10915 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10916 && TREE_CODE (arg1) == COMPLEX_CST
10917 && real_zerop (TREE_REALPART (arg1)))
10919 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10920 if (real_onep (TREE_IMAGPART (arg1)))
10921 return
10922 fold_build2_loc (loc, COMPLEX_EXPR, type,
10923 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10924 rtype, arg0)),
10925 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10926 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10927 return
10928 fold_build2_loc (loc, COMPLEX_EXPR, type,
10929 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10930 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10931 rtype, arg0)));
10934 /* Optimize z * conj(z) for floating point complex numbers.
10935 Guarded by flag_unsafe_math_optimizations as non-finite
10936 imaginary components don't produce scalar results. */
10937 if (flag_unsafe_math_optimizations
10938 && TREE_CODE (arg0) == CONJ_EXPR
10939 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10940 return fold_mult_zconjz (loc, type, arg1);
10941 if (flag_unsafe_math_optimizations
10942 && TREE_CODE (arg1) == CONJ_EXPR
10943 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10944 return fold_mult_zconjz (loc, type, arg0);
10946 goto associate;
10948 case BIT_IOR_EXPR:
10949 /* Canonicalize (X & C1) | C2. */
10950 if (TREE_CODE (arg0) == BIT_AND_EXPR
10951 && TREE_CODE (arg1) == INTEGER_CST
10952 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10954 int width = TYPE_PRECISION (type), w;
10955 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10956 wide_int c2 = wi::to_wide (arg1);
10958 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10959 if ((c1 & c2) == c1)
10960 return omit_one_operand_loc (loc, type, arg1,
10961 TREE_OPERAND (arg0, 0));
10963 wide_int msk = wi::mask (width, false,
10964 TYPE_PRECISION (TREE_TYPE (arg1)));
10966 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10967 if (wi::bit_and_not (msk, c1 | c2) == 0)
10969 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10970 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10973 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10974 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10975 mode which allows further optimizations. */
10976 c1 &= msk;
10977 c2 &= msk;
10978 wide_int c3 = wi::bit_and_not (c1, c2);
10979 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10981 wide_int mask = wi::mask (w, false,
10982 TYPE_PRECISION (type));
10983 if (((c1 | c2) & mask) == mask
10984 && wi::bit_and_not (c1, mask) == 0)
10986 c3 = mask;
10987 break;
10991 if (c3 != c1)
10993 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10994 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10995 wide_int_to_tree (type, c3));
10996 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11000 /* See if this can be simplified into a rotate first. If that
11001 is unsuccessful continue in the association code. */
11002 goto bit_rotate;
11004 case BIT_XOR_EXPR:
11005 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11006 if (TREE_CODE (arg0) == BIT_AND_EXPR
11007 && INTEGRAL_TYPE_P (type)
11008 && integer_onep (TREE_OPERAND (arg0, 1))
11009 && integer_onep (arg1))
11010 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11011 build_zero_cst (TREE_TYPE (arg0)));
11013 /* See if this can be simplified into a rotate first. If that
11014 is unsuccessful continue in the association code. */
11015 goto bit_rotate;
11017 case BIT_AND_EXPR:
11018 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11019 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11020 && INTEGRAL_TYPE_P (type)
11021 && integer_onep (TREE_OPERAND (arg0, 1))
11022 && integer_onep (arg1))
11024 tree tem2;
11025 tem = TREE_OPERAND (arg0, 0);
11026 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11027 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11028 tem, tem2);
11029 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11030 build_zero_cst (TREE_TYPE (tem)));
11032 /* Fold ~X & 1 as (X & 1) == 0. */
11033 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11034 && INTEGRAL_TYPE_P (type)
11035 && integer_onep (arg1))
11037 tree tem2;
11038 tem = TREE_OPERAND (arg0, 0);
11039 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11040 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11041 tem, tem2);
11042 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11043 build_zero_cst (TREE_TYPE (tem)));
11045 /* Fold !X & 1 as X == 0. */
11046 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11047 && integer_onep (arg1))
11049 tem = TREE_OPERAND (arg0, 0);
11050 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11051 build_zero_cst (TREE_TYPE (tem)));
11054 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11055 multiple of 1 << CST. */
11056 if (TREE_CODE (arg1) == INTEGER_CST)
11058 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11059 wide_int ncst1 = -cst1;
11060 if ((cst1 & ncst1) == ncst1
11061 && multiple_of_p (type, arg0,
11062 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11063 return fold_convert_loc (loc, type, arg0);
11066 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11067 bits from CST2. */
11068 if (TREE_CODE (arg1) == INTEGER_CST
11069 && TREE_CODE (arg0) == MULT_EXPR
11070 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11072 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11073 wide_int masked
11074 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11076 if (masked == 0)
11077 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11078 arg0, arg1);
11079 else if (masked != warg1)
11081 /* Avoid the transform if arg1 is a mask of some
11082 mode which allows further optimizations. */
11083 int pop = wi::popcount (warg1);
11084 if (!(pop >= BITS_PER_UNIT
11085 && pow2p_hwi (pop)
11086 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11087 return fold_build2_loc (loc, code, type, op0,
11088 wide_int_to_tree (type, masked));
11092 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11093 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11094 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11096 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11098 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11099 if (mask == -1)
11100 return
11101 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11104 goto associate;
11106 case RDIV_EXPR:
11107 /* Don't touch a floating-point divide by zero unless the mode
11108 of the constant can represent infinity. */
11109 if (TREE_CODE (arg1) == REAL_CST
11110 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11111 && real_zerop (arg1))
11112 return NULL_TREE;
11114 /* (-A) / (-B) -> A / B */
11115 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11116 return fold_build2_loc (loc, RDIV_EXPR, type,
11117 TREE_OPERAND (arg0, 0),
11118 negate_expr (arg1));
11119 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11120 return fold_build2_loc (loc, RDIV_EXPR, type,
11121 negate_expr (arg0),
11122 TREE_OPERAND (arg1, 0));
11123 return NULL_TREE;
11125 case TRUNC_DIV_EXPR:
11126 /* Fall through */
11128 case FLOOR_DIV_EXPR:
11129 /* Simplify A / (B << N) where A and B are positive and B is
11130 a power of 2, to A >> (N + log2(B)). */
11131 strict_overflow_p = false;
11132 if (TREE_CODE (arg1) == LSHIFT_EXPR
11133 && (TYPE_UNSIGNED (type)
11134 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11136 tree sval = TREE_OPERAND (arg1, 0);
11137 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11139 tree sh_cnt = TREE_OPERAND (arg1, 1);
11140 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11141 wi::exact_log2 (wi::to_wide (sval)));
11143 if (strict_overflow_p)
11144 fold_overflow_warning (("assuming signed overflow does not "
11145 "occur when simplifying A / (B << N)"),
11146 WARN_STRICT_OVERFLOW_MISC);
11148 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11149 sh_cnt, pow2);
11150 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11151 fold_convert_loc (loc, type, arg0), sh_cnt);
11155 /* Fall through */
11157 case ROUND_DIV_EXPR:
11158 case CEIL_DIV_EXPR:
11159 case EXACT_DIV_EXPR:
11160 if (integer_zerop (arg1))
11161 return NULL_TREE;
11163 /* Convert -A / -B to A / B when the type is signed and overflow is
11164 undefined. */
11165 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11166 && TREE_CODE (op0) == NEGATE_EXPR
11167 && negate_expr_p (op1))
11169 if (ANY_INTEGRAL_TYPE_P (type))
11170 fold_overflow_warning (("assuming signed overflow does not occur "
11171 "when distributing negation across "
11172 "division"),
11173 WARN_STRICT_OVERFLOW_MISC);
11174 return fold_build2_loc (loc, code, type,
11175 fold_convert_loc (loc, type,
11176 TREE_OPERAND (arg0, 0)),
11177 negate_expr (op1));
11179 if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11180 && TREE_CODE (arg1) == NEGATE_EXPR
11181 && negate_expr_p (op0))
11183 if (ANY_INTEGRAL_TYPE_P (type))
11184 fold_overflow_warning (("assuming signed overflow does not occur "
11185 "when distributing negation across "
11186 "division"),
11187 WARN_STRICT_OVERFLOW_MISC);
11188 return fold_build2_loc (loc, code, type,
11189 negate_expr (op0),
11190 fold_convert_loc (loc, type,
11191 TREE_OPERAND (arg1, 0)));
11194 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11195 operation, EXACT_DIV_EXPR.
11197 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11198 At one time others generated faster code, it's not clear if they do
11199 after the last round to changes to the DIV code in expmed.c. */
11200 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11201 && multiple_of_p (type, arg0, arg1))
11202 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11203 fold_convert (type, arg0),
11204 fold_convert (type, arg1));
11206 strict_overflow_p = false;
11207 if (TREE_CODE (arg1) == INTEGER_CST
11208 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11209 &strict_overflow_p)) != 0)
11211 if (strict_overflow_p)
11212 fold_overflow_warning (("assuming signed overflow does not occur "
11213 "when simplifying division"),
11214 WARN_STRICT_OVERFLOW_MISC);
11215 return fold_convert_loc (loc, type, tem);
11218 return NULL_TREE;
11220 case CEIL_MOD_EXPR:
11221 case FLOOR_MOD_EXPR:
11222 case ROUND_MOD_EXPR:
11223 case TRUNC_MOD_EXPR:
11224 strict_overflow_p = false;
11225 if (TREE_CODE (arg1) == INTEGER_CST
11226 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11227 &strict_overflow_p)) != 0)
11229 if (strict_overflow_p)
11230 fold_overflow_warning (("assuming signed overflow does not occur "
11231 "when simplifying modulus"),
11232 WARN_STRICT_OVERFLOW_MISC);
11233 return fold_convert_loc (loc, type, tem);
11236 return NULL_TREE;
11238 case LROTATE_EXPR:
11239 case RROTATE_EXPR:
11240 case RSHIFT_EXPR:
11241 case LSHIFT_EXPR:
11242 /* Since negative shift count is not well-defined,
11243 don't try to compute it in the compiler. */
11244 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11245 return NULL_TREE;
11247 prec = element_precision (type);
11249 /* If we have a rotate of a bit operation with the rotate count and
11250 the second operand of the bit operation both constant,
11251 permute the two operations. */
11252 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11253 && (TREE_CODE (arg0) == BIT_AND_EXPR
11254 || TREE_CODE (arg0) == BIT_IOR_EXPR
11255 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11256 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11258 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11259 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11260 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11261 fold_build2_loc (loc, code, type,
11262 arg00, arg1),
11263 fold_build2_loc (loc, code, type,
11264 arg01, arg1));
11267 /* Two consecutive rotates adding up to the some integer
11268 multiple of the precision of the type can be ignored. */
11269 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11270 && TREE_CODE (arg0) == RROTATE_EXPR
11271 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11272 && wi::umod_trunc (wi::to_wide (arg1)
11273 + wi::to_wide (TREE_OPERAND (arg0, 1)),
11274 prec) == 0)
11275 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11277 return NULL_TREE;
11279 case MIN_EXPR:
11280 case MAX_EXPR:
11281 goto associate;
11283 case TRUTH_ANDIF_EXPR:
11284 /* Note that the operands of this must be ints
11285 and their values must be 0 or 1.
11286 ("true" is a fixed value perhaps depending on the language.) */
11287 /* If first arg is constant zero, return it. */
11288 if (integer_zerop (arg0))
11289 return fold_convert_loc (loc, type, arg0);
11290 /* FALLTHRU */
11291 case TRUTH_AND_EXPR:
11292 /* If either arg is constant true, drop it. */
11293 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11294 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11295 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11296 /* Preserve sequence points. */
11297 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11298 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11299 /* If second arg is constant zero, result is zero, but first arg
11300 must be evaluated. */
11301 if (integer_zerop (arg1))
11302 return omit_one_operand_loc (loc, type, arg1, arg0);
11303 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11304 case will be handled here. */
11305 if (integer_zerop (arg0))
11306 return omit_one_operand_loc (loc, type, arg0, arg1);
11308 /* !X && X is always false. */
11309 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11310 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11311 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11312 /* X && !X is always false. */
11313 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11314 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11315 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11317 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11318 means A >= Y && A != MAX, but in this case we know that
11319 A < X <= MAX. */
11321 if (!TREE_SIDE_EFFECTS (arg0)
11322 && !TREE_SIDE_EFFECTS (arg1))
11324 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11325 if (tem && !operand_equal_p (tem, arg0, 0))
11326 return fold_build2_loc (loc, code, type, tem, arg1);
11328 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11329 if (tem && !operand_equal_p (tem, arg1, 0))
11330 return fold_build2_loc (loc, code, type, arg0, tem);
11333 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11334 != NULL_TREE)
11335 return tem;
11337 return NULL_TREE;
11339 case TRUTH_ORIF_EXPR:
11340 /* Note that the operands of this must be ints
11341 and their values must be 0 or true.
11342 ("true" is a fixed value perhaps depending on the language.) */
11343 /* If first arg is constant true, return it. */
11344 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11345 return fold_convert_loc (loc, type, arg0);
11346 /* FALLTHRU */
11347 case TRUTH_OR_EXPR:
11348 /* If either arg is constant zero, drop it. */
11349 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11350 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11351 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11352 /* Preserve sequence points. */
11353 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11354 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11355 /* If second arg is constant true, result is true, but we must
11356 evaluate first arg. */
11357 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11358 return omit_one_operand_loc (loc, type, arg1, arg0);
11359 /* Likewise for first arg, but note this only occurs here for
11360 TRUTH_OR_EXPR. */
11361 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11362 return omit_one_operand_loc (loc, type, arg0, arg1);
11364 /* !X || X is always true. */
11365 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11366 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11367 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11368 /* X || !X is always true. */
11369 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11370 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11371 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11373 /* (X && !Y) || (!X && Y) is X ^ Y */
11374 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11375 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11377 tree a0, a1, l0, l1, n0, n1;
11379 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11380 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11382 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11383 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11385 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11386 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11388 if ((operand_equal_p (n0, a0, 0)
11389 && operand_equal_p (n1, a1, 0))
11390 || (operand_equal_p (n0, a1, 0)
11391 && operand_equal_p (n1, a0, 0)))
11392 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11395 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11396 != NULL_TREE)
11397 return tem;
11399 return NULL_TREE;
11401 case TRUTH_XOR_EXPR:
11402 /* If the second arg is constant zero, drop it. */
11403 if (integer_zerop (arg1))
11404 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11405 /* If the second arg is constant true, this is a logical inversion. */
11406 if (integer_onep (arg1))
11408 tem = invert_truthvalue_loc (loc, arg0);
11409 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11411 /* Identical arguments cancel to zero. */
11412 if (operand_equal_p (arg0, arg1, 0))
11413 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11415 /* !X ^ X is always true. */
11416 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11417 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11418 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11420 /* X ^ !X is always true. */
11421 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11422 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11423 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11425 return NULL_TREE;
11427 case EQ_EXPR:
11428 case NE_EXPR:
11429 STRIP_NOPS (arg0);
11430 STRIP_NOPS (arg1);
11432 tem = fold_comparison (loc, code, type, op0, op1);
11433 if (tem != NULL_TREE)
11434 return tem;
11436 /* bool_var != 1 becomes !bool_var. */
11437 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11438 && code == NE_EXPR)
11439 return fold_convert_loc (loc, type,
11440 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11441 TREE_TYPE (arg0), arg0));
11443 /* bool_var == 0 becomes !bool_var. */
11444 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11445 && code == EQ_EXPR)
11446 return fold_convert_loc (loc, type,
11447 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11448 TREE_TYPE (arg0), arg0));
11450 /* !exp != 0 becomes !exp */
11451 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11452 && code == NE_EXPR)
11453 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11455 /* If this is an EQ or NE comparison with zero and ARG0 is
11456 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11457 two operations, but the latter can be done in one less insn
11458 on machines that have only two-operand insns or on which a
11459 constant cannot be the first operand. */
11460 if (TREE_CODE (arg0) == BIT_AND_EXPR
11461 && integer_zerop (arg1))
11463 tree arg00 = TREE_OPERAND (arg0, 0);
11464 tree arg01 = TREE_OPERAND (arg0, 1);
11465 if (TREE_CODE (arg00) == LSHIFT_EXPR
11466 && integer_onep (TREE_OPERAND (arg00, 0)))
11468 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11469 arg01, TREE_OPERAND (arg00, 1));
11470 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11471 build_int_cst (TREE_TYPE (arg0), 1));
11472 return fold_build2_loc (loc, code, type,
11473 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11474 arg1);
11476 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11477 && integer_onep (TREE_OPERAND (arg01, 0)))
11479 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11480 arg00, TREE_OPERAND (arg01, 1));
11481 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11482 build_int_cst (TREE_TYPE (arg0), 1));
11483 return fold_build2_loc (loc, code, type,
11484 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11485 arg1);
11489 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11490 C1 is a valid shift constant, and C2 is a power of two, i.e.
11491 a single bit. */
11492 if (TREE_CODE (arg0) == BIT_AND_EXPR
11493 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11494 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11495 == INTEGER_CST
11496 && integer_pow2p (TREE_OPERAND (arg0, 1))
11497 && integer_zerop (arg1))
11499 tree itype = TREE_TYPE (arg0);
11500 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11501 prec = TYPE_PRECISION (itype);
11503 /* Check for a valid shift count. */
11504 if (wi::ltu_p (wi::to_wide (arg001), prec))
11506 tree arg01 = TREE_OPERAND (arg0, 1);
11507 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11508 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11509 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11510 can be rewritten as (X & (C2 << C1)) != 0. */
11511 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11513 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11514 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11515 return fold_build2_loc (loc, code, type, tem,
11516 fold_convert_loc (loc, itype, arg1));
11518 /* Otherwise, for signed (arithmetic) shifts,
11519 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11520 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11521 else if (!TYPE_UNSIGNED (itype))
11522 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11523 arg000, build_int_cst (itype, 0));
11524 /* Otherwise, of unsigned (logical) shifts,
11525 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11526 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11527 else
11528 return omit_one_operand_loc (loc, type,
11529 code == EQ_EXPR ? integer_one_node
11530 : integer_zero_node,
11531 arg000);
11535 /* If this is a comparison of a field, we may be able to simplify it. */
11536 if ((TREE_CODE (arg0) == COMPONENT_REF
11537 || TREE_CODE (arg0) == BIT_FIELD_REF)
11538 /* Handle the constant case even without -O
11539 to make sure the warnings are given. */
11540 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11542 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11543 if (t1)
11544 return t1;
11547 /* Optimize comparisons of strlen vs zero to a compare of the
11548 first character of the string vs zero. To wit,
11549 strlen(ptr) == 0 => *ptr == 0
11550 strlen(ptr) != 0 => *ptr != 0
11551 Other cases should reduce to one of these two (or a constant)
11552 due to the return value of strlen being unsigned. */
11553 if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11555 tree fndecl = get_callee_fndecl (arg0);
11557 if (fndecl
11558 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11559 && call_expr_nargs (arg0) == 1
11560 && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11561 == POINTER_TYPE))
11563 tree ptrtype
11564 = build_pointer_type (build_qualified_type (char_type_node,
11565 TYPE_QUAL_CONST));
11566 tree ptr = fold_convert_loc (loc, ptrtype,
11567 CALL_EXPR_ARG (arg0, 0));
11568 tree iref = build_fold_indirect_ref_loc (loc, ptr);
11569 return fold_build2_loc (loc, code, type, iref,
11570 build_int_cst (TREE_TYPE (iref), 0));
11574 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11575 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11576 if (TREE_CODE (arg0) == RSHIFT_EXPR
11577 && integer_zerop (arg1)
11578 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11580 tree arg00 = TREE_OPERAND (arg0, 0);
11581 tree arg01 = TREE_OPERAND (arg0, 1);
11582 tree itype = TREE_TYPE (arg00);
11583 if (wi::to_wide (arg01) == element_precision (itype) - 1)
11585 if (TYPE_UNSIGNED (itype))
11587 itype = signed_type_for (itype);
11588 arg00 = fold_convert_loc (loc, itype, arg00);
11590 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11591 type, arg00, build_zero_cst (itype));
11595 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11596 (X & C) == 0 when C is a single bit. */
11597 if (TREE_CODE (arg0) == BIT_AND_EXPR
11598 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11599 && integer_zerop (arg1)
11600 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11602 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11603 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11604 TREE_OPERAND (arg0, 1));
11605 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11606 type, tem,
11607 fold_convert_loc (loc, TREE_TYPE (arg0),
11608 arg1));
11611 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11612 constant C is a power of two, i.e. a single bit. */
11613 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11614 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11615 && integer_zerop (arg1)
11616 && integer_pow2p (TREE_OPERAND (arg0, 1))
11617 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11618 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11620 tree arg00 = TREE_OPERAND (arg0, 0);
11621 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11622 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11625 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11626 when is C is a power of two, i.e. a single bit. */
11627 if (TREE_CODE (arg0) == BIT_AND_EXPR
11628 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11629 && integer_zerop (arg1)
11630 && integer_pow2p (TREE_OPERAND (arg0, 1))
11631 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11632 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11634 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11635 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11636 arg000, TREE_OPERAND (arg0, 1));
11637 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11638 tem, build_int_cst (TREE_TYPE (tem), 0));
11641 if (integer_zerop (arg1)
11642 && tree_expr_nonzero_p (arg0))
11644 tree res = constant_boolean_node (code==NE_EXPR, type);
11645 return omit_one_operand_loc (loc, type, res, arg0);
11648 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11649 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11651 tree arg00 = TREE_OPERAND (arg0, 0);
11652 tree arg01 = TREE_OPERAND (arg0, 1);
11653 tree arg10 = TREE_OPERAND (arg1, 0);
11654 tree arg11 = TREE_OPERAND (arg1, 1);
11655 tree itype = TREE_TYPE (arg0);
11657 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11658 operand_equal_p guarantees no side-effects so we don't need
11659 to use omit_one_operand on Z. */
11660 if (operand_equal_p (arg01, arg11, 0))
11661 return fold_build2_loc (loc, code, type, arg00,
11662 fold_convert_loc (loc, TREE_TYPE (arg00),
11663 arg10));
11664 if (operand_equal_p (arg01, arg10, 0))
11665 return fold_build2_loc (loc, code, type, arg00,
11666 fold_convert_loc (loc, TREE_TYPE (arg00),
11667 arg11));
11668 if (operand_equal_p (arg00, arg11, 0))
11669 return fold_build2_loc (loc, code, type, arg01,
11670 fold_convert_loc (loc, TREE_TYPE (arg01),
11671 arg10));
11672 if (operand_equal_p (arg00, arg10, 0))
11673 return fold_build2_loc (loc, code, type, arg01,
11674 fold_convert_loc (loc, TREE_TYPE (arg01),
11675 arg11));
11677 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11678 if (TREE_CODE (arg01) == INTEGER_CST
11679 && TREE_CODE (arg11) == INTEGER_CST)
11681 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11682 fold_convert_loc (loc, itype, arg11));
11683 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11684 return fold_build2_loc (loc, code, type, tem,
11685 fold_convert_loc (loc, itype, arg10));
11689 /* Attempt to simplify equality/inequality comparisons of complex
11690 values. Only lower the comparison if the result is known or
11691 can be simplified to a single scalar comparison. */
11692 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11693 || TREE_CODE (arg0) == COMPLEX_CST)
11694 && (TREE_CODE (arg1) == COMPLEX_EXPR
11695 || TREE_CODE (arg1) == COMPLEX_CST))
11697 tree real0, imag0, real1, imag1;
11698 tree rcond, icond;
11700 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11702 real0 = TREE_OPERAND (arg0, 0);
11703 imag0 = TREE_OPERAND (arg0, 1);
11705 else
11707 real0 = TREE_REALPART (arg0);
11708 imag0 = TREE_IMAGPART (arg0);
11711 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11713 real1 = TREE_OPERAND (arg1, 0);
11714 imag1 = TREE_OPERAND (arg1, 1);
11716 else
11718 real1 = TREE_REALPART (arg1);
11719 imag1 = TREE_IMAGPART (arg1);
11722 rcond = fold_binary_loc (loc, code, type, real0, real1);
11723 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11725 if (integer_zerop (rcond))
11727 if (code == EQ_EXPR)
11728 return omit_two_operands_loc (loc, type, boolean_false_node,
11729 imag0, imag1);
11730 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11732 else
11734 if (code == NE_EXPR)
11735 return omit_two_operands_loc (loc, type, boolean_true_node,
11736 imag0, imag1);
11737 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11741 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11742 if (icond && TREE_CODE (icond) == INTEGER_CST)
11744 if (integer_zerop (icond))
11746 if (code == EQ_EXPR)
11747 return omit_two_operands_loc (loc, type, boolean_false_node,
11748 real0, real1);
11749 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11751 else
11753 if (code == NE_EXPR)
11754 return omit_two_operands_loc (loc, type, boolean_true_node,
11755 real0, real1);
11756 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11761 return NULL_TREE;
11763 case LT_EXPR:
11764 case GT_EXPR:
11765 case LE_EXPR:
11766 case GE_EXPR:
11767 tem = fold_comparison (loc, code, type, op0, op1);
11768 if (tem != NULL_TREE)
11769 return tem;
11771 /* Transform comparisons of the form X +- C CMP X. */
11772 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11773 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11774 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11775 && !HONOR_SNANS (arg0))
11777 tree arg01 = TREE_OPERAND (arg0, 1);
11778 enum tree_code code0 = TREE_CODE (arg0);
11779 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11781 /* (X - c) > X becomes false. */
11782 if (code == GT_EXPR
11783 && ((code0 == MINUS_EXPR && is_positive >= 0)
11784 || (code0 == PLUS_EXPR && is_positive <= 0)))
11785 return constant_boolean_node (0, type);
11787 /* Likewise (X + c) < X becomes false. */
11788 if (code == LT_EXPR
11789 && ((code0 == PLUS_EXPR && is_positive >= 0)
11790 || (code0 == MINUS_EXPR && is_positive <= 0)))
11791 return constant_boolean_node (0, type);
11793 /* Convert (X - c) <= X to true. */
11794 if (!HONOR_NANS (arg1)
11795 && code == LE_EXPR
11796 && ((code0 == MINUS_EXPR && is_positive >= 0)
11797 || (code0 == PLUS_EXPR && is_positive <= 0)))
11798 return constant_boolean_node (1, type);
11800 /* Convert (X + c) >= X to true. */
11801 if (!HONOR_NANS (arg1)
11802 && code == GE_EXPR
11803 && ((code0 == PLUS_EXPR && is_positive >= 0)
11804 || (code0 == MINUS_EXPR && is_positive <= 0)))
11805 return constant_boolean_node (1, type);
11808 /* If we are comparing an ABS_EXPR with a constant, we can
11809 convert all the cases into explicit comparisons, but they may
11810 well not be faster than doing the ABS and one comparison.
11811 But ABS (X) <= C is a range comparison, which becomes a subtraction
11812 and a comparison, and is probably faster. */
11813 if (code == LE_EXPR
11814 && TREE_CODE (arg1) == INTEGER_CST
11815 && TREE_CODE (arg0) == ABS_EXPR
11816 && ! TREE_SIDE_EFFECTS (arg0)
11817 && (tem = negate_expr (arg1)) != 0
11818 && TREE_CODE (tem) == INTEGER_CST
11819 && !TREE_OVERFLOW (tem))
11820 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11821 build2 (GE_EXPR, type,
11822 TREE_OPERAND (arg0, 0), tem),
11823 build2 (LE_EXPR, type,
11824 TREE_OPERAND (arg0, 0), arg1));
11826 /* Convert ABS_EXPR<x> >= 0 to true. */
11827 strict_overflow_p = false;
11828 if (code == GE_EXPR
11829 && (integer_zerop (arg1)
11830 || (! HONOR_NANS (arg0)
11831 && real_zerop (arg1)))
11832 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11834 if (strict_overflow_p)
11835 fold_overflow_warning (("assuming signed overflow does not occur "
11836 "when simplifying comparison of "
11837 "absolute value and zero"),
11838 WARN_STRICT_OVERFLOW_CONDITIONAL);
11839 return omit_one_operand_loc (loc, type,
11840 constant_boolean_node (true, type),
11841 arg0);
11844 /* Convert ABS_EXPR<x> < 0 to false. */
11845 strict_overflow_p = false;
11846 if (code == LT_EXPR
11847 && (integer_zerop (arg1) || real_zerop (arg1))
11848 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11850 if (strict_overflow_p)
11851 fold_overflow_warning (("assuming signed overflow does not occur "
11852 "when simplifying comparison of "
11853 "absolute value and zero"),
11854 WARN_STRICT_OVERFLOW_CONDITIONAL);
11855 return omit_one_operand_loc (loc, type,
11856 constant_boolean_node (false, type),
11857 arg0);
11860 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11861 and similarly for >= into !=. */
11862 if ((code == LT_EXPR || code == GE_EXPR)
11863 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11864 && TREE_CODE (arg1) == LSHIFT_EXPR
11865 && integer_onep (TREE_OPERAND (arg1, 0)))
11866 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11867 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11868 TREE_OPERAND (arg1, 1)),
11869 build_zero_cst (TREE_TYPE (arg0)));
11871 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11872 otherwise Y might be >= # of bits in X's type and thus e.g.
11873 (unsigned char) (1 << Y) for Y 15 might be 0.
11874 If the cast is widening, then 1 << Y should have unsigned type,
11875 otherwise if Y is number of bits in the signed shift type minus 1,
11876 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11877 31 might be 0xffffffff80000000. */
11878 if ((code == LT_EXPR || code == GE_EXPR)
11879 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11880 && CONVERT_EXPR_P (arg1)
11881 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11882 && (element_precision (TREE_TYPE (arg1))
11883 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11884 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11885 || (element_precision (TREE_TYPE (arg1))
11886 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11887 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11889 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11890 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11891 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11892 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11893 build_zero_cst (TREE_TYPE (arg0)));
11896 return NULL_TREE;
11898 case UNORDERED_EXPR:
11899 case ORDERED_EXPR:
11900 case UNLT_EXPR:
11901 case UNLE_EXPR:
11902 case UNGT_EXPR:
11903 case UNGE_EXPR:
11904 case UNEQ_EXPR:
11905 case LTGT_EXPR:
11906 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11908 tree targ0 = strip_float_extensions (arg0);
11909 tree targ1 = strip_float_extensions (arg1);
11910 tree newtype = TREE_TYPE (targ0);
11912 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11913 newtype = TREE_TYPE (targ1);
11915 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11916 return fold_build2_loc (loc, code, type,
11917 fold_convert_loc (loc, newtype, targ0),
11918 fold_convert_loc (loc, newtype, targ1));
11921 return NULL_TREE;
11923 case COMPOUND_EXPR:
11924 /* When pedantic, a compound expression can be neither an lvalue
11925 nor an integer constant expression. */
11926 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11927 return NULL_TREE;
11928 /* Don't let (0, 0) be null pointer constant. */
11929 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11930 : fold_convert_loc (loc, type, arg1);
11931 return pedantic_non_lvalue_loc (loc, tem);
11933 case ASSERT_EXPR:
11934 /* An ASSERT_EXPR should never be passed to fold_binary. */
11935 gcc_unreachable ();
11937 default:
11938 return NULL_TREE;
11939 } /* switch (code) */
11942 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11943 ((A & N) + B) & M -> (A + B) & M
11944 Similarly if (N & M) == 0,
11945 ((A | N) + B) & M -> (A + B) & M
11946 and for - instead of + (or unary - instead of +)
11947 and/or ^ instead of |.
11948 If B is constant and (B & M) == 0, fold into A & M.
11950 This function is a helper for match.pd patterns. Return non-NULL
11951 type in which the simplified operation should be performed only
11952 if any optimization is possible.
11954 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11955 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11956 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11957 +/-. */
11958 tree
11959 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11960 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11961 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11962 tree *pmop)
11964 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11965 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11966 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11967 if (~cst1 == 0
11968 || (cst1 & (cst1 + 1)) != 0
11969 || !INTEGRAL_TYPE_P (type)
11970 || (!TYPE_OVERFLOW_WRAPS (type)
11971 && TREE_CODE (type) != INTEGER_TYPE)
11972 || (wi::max_value (type) & cst1) != cst1)
11973 return NULL_TREE;
11975 enum tree_code codes[2] = { code00, code01 };
11976 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11977 int which = 0;
11978 wide_int cst0;
11980 /* Now we know that arg0 is (C + D) or (C - D) or -C and
11981 arg1 (M) is == (1LL << cst) - 1.
11982 Store C into PMOP[0] and D into PMOP[1]. */
11983 pmop[0] = arg00;
11984 pmop[1] = arg01;
11985 which = code != NEGATE_EXPR;
11987 for (; which >= 0; which--)
11988 switch (codes[which])
11990 case BIT_AND_EXPR:
11991 case BIT_IOR_EXPR:
11992 case BIT_XOR_EXPR:
11993 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11994 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11995 if (codes[which] == BIT_AND_EXPR)
11997 if (cst0 != cst1)
11998 break;
12000 else if (cst0 != 0)
12001 break;
12002 /* If C or D is of the form (A & N) where
12003 (N & M) == M, or of the form (A | N) or
12004 (A ^ N) where (N & M) == 0, replace it with A. */
12005 pmop[which] = arg0xx[2 * which];
12006 break;
12007 case ERROR_MARK:
12008 if (TREE_CODE (pmop[which]) != INTEGER_CST)
12009 break;
12010 /* If C or D is a N where (N & M) == 0, it can be
12011 omitted (replaced with 0). */
12012 if ((code == PLUS_EXPR
12013 || (code == MINUS_EXPR && which == 0))
12014 && (cst1 & wi::to_wide (pmop[which])) == 0)
12015 pmop[which] = build_int_cst (type, 0);
12016 /* Similarly, with C - N where (-N & M) == 0. */
12017 if (code == MINUS_EXPR
12018 && which == 1
12019 && (cst1 & -wi::to_wide (pmop[which])) == 0)
12020 pmop[which] = build_int_cst (type, 0);
12021 break;
12022 default:
12023 gcc_unreachable ();
12026 /* Only build anything new if we optimized one or both arguments above. */
12027 if (pmop[0] == arg00 && pmop[1] == arg01)
12028 return NULL_TREE;
12030 if (TYPE_OVERFLOW_WRAPS (type))
12031 return type;
12032 else
12033 return unsigned_type_for (type);
12036 /* Used by contains_label_[p1]. */
12038 struct contains_label_data
12040 hash_set<tree> *pset;
12041 bool inside_switch_p;
12044 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12045 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12046 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
12048 static tree
12049 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12051 contains_label_data *d = (contains_label_data *) data;
12052 switch (TREE_CODE (*tp))
12054 case LABEL_EXPR:
12055 return *tp;
12057 case CASE_LABEL_EXPR:
12058 if (!d->inside_switch_p)
12059 return *tp;
12060 return NULL_TREE;
12062 case SWITCH_EXPR:
12063 if (!d->inside_switch_p)
12065 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12066 return *tp;
12067 d->inside_switch_p = true;
12068 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12069 return *tp;
12070 d->inside_switch_p = false;
12071 *walk_subtrees = 0;
12073 return NULL_TREE;
12075 case GOTO_EXPR:
12076 *walk_subtrees = 0;
12077 return NULL_TREE;
12079 default:
12080 return NULL_TREE;
12084 /* Return whether the sub-tree ST contains a label which is accessible from
12085 outside the sub-tree. */
12087 static bool
12088 contains_label_p (tree st)
12090 hash_set<tree> pset;
12091 contains_label_data data = { &pset, false };
12092 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12095 /* Fold a ternary expression of code CODE and type TYPE with operands
12096 OP0, OP1, and OP2. Return the folded expression if folding is
12097 successful. Otherwise, return NULL_TREE. */
12099 tree
12100 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12101 tree op0, tree op1, tree op2)
12103 tree tem;
12104 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12105 enum tree_code_class kind = TREE_CODE_CLASS (code);
12107 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12108 && TREE_CODE_LENGTH (code) == 3);
12110 /* If this is a commutative operation, and OP0 is a constant, move it
12111 to OP1 to reduce the number of tests below. */
12112 if (commutative_ternary_tree_code (code)
12113 && tree_swap_operands_p (op0, op1))
12114 return fold_build3_loc (loc, code, type, op1, op0, op2);
12116 tem = generic_simplify (loc, code, type, op0, op1, op2);
12117 if (tem)
12118 return tem;
12120 /* Strip any conversions that don't change the mode. This is safe
12121 for every expression, except for a comparison expression because
12122 its signedness is derived from its operands. So, in the latter
12123 case, only strip conversions that don't change the signedness.
12125 Note that this is done as an internal manipulation within the
12126 constant folder, in order to find the simplest representation of
12127 the arguments so that their form can be studied. In any cases,
12128 the appropriate type conversions should be put back in the tree
12129 that will get out of the constant folder. */
12130 if (op0)
12132 arg0 = op0;
12133 STRIP_NOPS (arg0);
12136 if (op1)
12138 arg1 = op1;
12139 STRIP_NOPS (arg1);
12142 if (op2)
12144 arg2 = op2;
12145 STRIP_NOPS (arg2);
12148 switch (code)
12150 case COMPONENT_REF:
12151 if (TREE_CODE (arg0) == CONSTRUCTOR
12152 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12154 unsigned HOST_WIDE_INT idx;
12155 tree field, value;
12156 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12157 if (field == arg1)
12158 return value;
12160 return NULL_TREE;
12162 case COND_EXPR:
12163 case VEC_COND_EXPR:
12164 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12165 so all simple results must be passed through pedantic_non_lvalue. */
12166 if (TREE_CODE (arg0) == INTEGER_CST)
12168 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12169 tem = integer_zerop (arg0) ? op2 : op1;
12170 /* Only optimize constant conditions when the selected branch
12171 has the same type as the COND_EXPR. This avoids optimizing
12172 away "c ? x : throw", where the throw has a void type.
12173 Avoid throwing away that operand which contains label. */
12174 if ((!TREE_SIDE_EFFECTS (unused_op)
12175 || !contains_label_p (unused_op))
12176 && (! VOID_TYPE_P (TREE_TYPE (tem))
12177 || VOID_TYPE_P (type)))
12178 return pedantic_non_lvalue_loc (loc, tem);
12179 return NULL_TREE;
12181 else if (TREE_CODE (arg0) == VECTOR_CST)
12183 unsigned HOST_WIDE_INT nelts;
12184 if ((TREE_CODE (arg1) == VECTOR_CST
12185 || TREE_CODE (arg1) == CONSTRUCTOR)
12186 && (TREE_CODE (arg2) == VECTOR_CST
12187 || TREE_CODE (arg2) == CONSTRUCTOR)
12188 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12190 vec_perm_builder sel (nelts, nelts, 1);
12191 for (unsigned int i = 0; i < nelts; i++)
12193 tree val = VECTOR_CST_ELT (arg0, i);
12194 if (integer_all_onesp (val))
12195 sel.quick_push (i);
12196 else if (integer_zerop (val))
12197 sel.quick_push (nelts + i);
12198 else /* Currently unreachable. */
12199 return NULL_TREE;
12201 vec_perm_indices indices (sel, 2, nelts);
12202 tree t = fold_vec_perm (type, arg1, arg2, indices);
12203 if (t != NULL_TREE)
12204 return t;
12208 /* If we have A op B ? A : C, we may be able to convert this to a
12209 simpler expression, depending on the operation and the values
12210 of B and C. Signed zeros prevent all of these transformations,
12211 for reasons given above each one.
12213 Also try swapping the arguments and inverting the conditional. */
12214 if (COMPARISON_CLASS_P (arg0)
12215 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12216 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12218 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12219 if (tem)
12220 return tem;
12223 if (COMPARISON_CLASS_P (arg0)
12224 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12225 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12227 location_t loc0 = expr_location_or (arg0, loc);
12228 tem = fold_invert_truthvalue (loc0, arg0);
12229 if (tem && COMPARISON_CLASS_P (tem))
12231 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12232 if (tem)
12233 return tem;
12237 /* If the second operand is simpler than the third, swap them
12238 since that produces better jump optimization results. */
12239 if (truth_value_p (TREE_CODE (arg0))
12240 && tree_swap_operands_p (op1, op2))
12242 location_t loc0 = expr_location_or (arg0, loc);
12243 /* See if this can be inverted. If it can't, possibly because
12244 it was a floating-point inequality comparison, don't do
12245 anything. */
12246 tem = fold_invert_truthvalue (loc0, arg0);
12247 if (tem)
12248 return fold_build3_loc (loc, code, type, tem, op2, op1);
12251 /* Convert A ? 1 : 0 to simply A. */
12252 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12253 : (integer_onep (op1)
12254 && !VECTOR_TYPE_P (type)))
12255 && integer_zerop (op2)
12256 /* If we try to convert OP0 to our type, the
12257 call to fold will try to move the conversion inside
12258 a COND, which will recurse. In that case, the COND_EXPR
12259 is probably the best choice, so leave it alone. */
12260 && type == TREE_TYPE (arg0))
12261 return pedantic_non_lvalue_loc (loc, arg0);
12263 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12264 over COND_EXPR in cases such as floating point comparisons. */
12265 if (integer_zerop (op1)
12266 && code == COND_EXPR
12267 && integer_onep (op2)
12268 && !VECTOR_TYPE_P (type)
12269 && truth_value_p (TREE_CODE (arg0)))
12270 return pedantic_non_lvalue_loc (loc,
12271 fold_convert_loc (loc, type,
12272 invert_truthvalue_loc (loc,
12273 arg0)));
12275 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12276 if (TREE_CODE (arg0) == LT_EXPR
12277 && integer_zerop (TREE_OPERAND (arg0, 1))
12278 && integer_zerop (op2)
12279 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12281 /* sign_bit_p looks through both zero and sign extensions,
12282 but for this optimization only sign extensions are
12283 usable. */
12284 tree tem2 = TREE_OPERAND (arg0, 0);
12285 while (tem != tem2)
12287 if (TREE_CODE (tem2) != NOP_EXPR
12288 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12290 tem = NULL_TREE;
12291 break;
12293 tem2 = TREE_OPERAND (tem2, 0);
12295 /* sign_bit_p only checks ARG1 bits within A's precision.
12296 If <sign bit of A> has wider type than A, bits outside
12297 of A's precision in <sign bit of A> need to be checked.
12298 If they are all 0, this optimization needs to be done
12299 in unsigned A's type, if they are all 1 in signed A's type,
12300 otherwise this can't be done. */
12301 if (tem
12302 && TYPE_PRECISION (TREE_TYPE (tem))
12303 < TYPE_PRECISION (TREE_TYPE (arg1))
12304 && TYPE_PRECISION (TREE_TYPE (tem))
12305 < TYPE_PRECISION (type))
12307 int inner_width, outer_width;
12308 tree tem_type;
12310 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12311 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12312 if (outer_width > TYPE_PRECISION (type))
12313 outer_width = TYPE_PRECISION (type);
12315 wide_int mask = wi::shifted_mask
12316 (inner_width, outer_width - inner_width, false,
12317 TYPE_PRECISION (TREE_TYPE (arg1)));
12319 wide_int common = mask & wi::to_wide (arg1);
12320 if (common == mask)
12322 tem_type = signed_type_for (TREE_TYPE (tem));
12323 tem = fold_convert_loc (loc, tem_type, tem);
12325 else if (common == 0)
12327 tem_type = unsigned_type_for (TREE_TYPE (tem));
12328 tem = fold_convert_loc (loc, tem_type, tem);
12330 else
12331 tem = NULL;
12334 if (tem)
12335 return
12336 fold_convert_loc (loc, type,
12337 fold_build2_loc (loc, BIT_AND_EXPR,
12338 TREE_TYPE (tem), tem,
12339 fold_convert_loc (loc,
12340 TREE_TYPE (tem),
12341 arg1)));
12344 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12345 already handled above. */
12346 if (TREE_CODE (arg0) == BIT_AND_EXPR
12347 && integer_onep (TREE_OPERAND (arg0, 1))
12348 && integer_zerop (op2)
12349 && integer_pow2p (arg1))
12351 tree tem = TREE_OPERAND (arg0, 0);
12352 STRIP_NOPS (tem);
12353 if (TREE_CODE (tem) == RSHIFT_EXPR
12354 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12355 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12356 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12357 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12358 fold_convert_loc (loc, type,
12359 TREE_OPERAND (tem, 0)),
12360 op1);
12363 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12364 is probably obsolete because the first operand should be a
12365 truth value (that's why we have the two cases above), but let's
12366 leave it in until we can confirm this for all front-ends. */
12367 if (integer_zerop (op2)
12368 && TREE_CODE (arg0) == NE_EXPR
12369 && integer_zerop (TREE_OPERAND (arg0, 1))
12370 && integer_pow2p (arg1)
12371 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12372 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12373 arg1, OEP_ONLY_CONST)
12374 /* operand_equal_p compares just value, not precision, so e.g.
12375 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12376 second operand 32-bit -128, which is not a power of two (or vice
12377 versa. */
12378 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12379 return pedantic_non_lvalue_loc (loc,
12380 fold_convert_loc (loc, type,
12381 TREE_OPERAND (arg0,
12382 0)));
12384 /* Disable the transformations below for vectors, since
12385 fold_binary_op_with_conditional_arg may undo them immediately,
12386 yielding an infinite loop. */
12387 if (code == VEC_COND_EXPR)
12388 return NULL_TREE;
12390 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12391 if (integer_zerop (op2)
12392 && truth_value_p (TREE_CODE (arg0))
12393 && truth_value_p (TREE_CODE (arg1))
12394 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12395 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12396 : TRUTH_ANDIF_EXPR,
12397 type, fold_convert_loc (loc, type, arg0), op1);
12399 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12400 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12401 && truth_value_p (TREE_CODE (arg0))
12402 && truth_value_p (TREE_CODE (arg1))
12403 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12405 location_t loc0 = expr_location_or (arg0, loc);
12406 /* Only perform transformation if ARG0 is easily inverted. */
12407 tem = fold_invert_truthvalue (loc0, arg0);
12408 if (tem)
12409 return fold_build2_loc (loc, code == VEC_COND_EXPR
12410 ? BIT_IOR_EXPR
12411 : TRUTH_ORIF_EXPR,
12412 type, fold_convert_loc (loc, type, tem),
12413 op1);
12416 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12417 if (integer_zerop (arg1)
12418 && truth_value_p (TREE_CODE (arg0))
12419 && truth_value_p (TREE_CODE (op2))
12420 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12422 location_t loc0 = expr_location_or (arg0, loc);
12423 /* Only perform transformation if ARG0 is easily inverted. */
12424 tem = fold_invert_truthvalue (loc0, arg0);
12425 if (tem)
12426 return fold_build2_loc (loc, code == VEC_COND_EXPR
12427 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12428 type, fold_convert_loc (loc, type, tem),
12429 op2);
12432 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12433 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12434 && truth_value_p (TREE_CODE (arg0))
12435 && truth_value_p (TREE_CODE (op2))
12436 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12437 return fold_build2_loc (loc, code == VEC_COND_EXPR
12438 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12439 type, fold_convert_loc (loc, type, arg0), op2);
12441 return NULL_TREE;
12443 case CALL_EXPR:
12444 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12445 of fold_ternary on them. */
12446 gcc_unreachable ();
12448 case BIT_FIELD_REF:
12449 if (TREE_CODE (arg0) == VECTOR_CST
12450 && (type == TREE_TYPE (TREE_TYPE (arg0))
12451 || (VECTOR_TYPE_P (type)
12452 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12453 && tree_fits_uhwi_p (op1)
12454 && tree_fits_uhwi_p (op2))
12456 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12457 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12458 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12459 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12461 if (n != 0
12462 && (idx % width) == 0
12463 && (n % width) == 0
12464 && known_le ((idx + n) / width,
12465 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12467 idx = idx / width;
12468 n = n / width;
12470 if (TREE_CODE (arg0) == VECTOR_CST)
12472 if (n == 1)
12474 tem = VECTOR_CST_ELT (arg0, idx);
12475 if (VECTOR_TYPE_P (type))
12476 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12477 return tem;
12480 tree_vector_builder vals (type, n, 1);
12481 for (unsigned i = 0; i < n; ++i)
12482 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12483 return vals.build ();
12488 /* On constants we can use native encode/interpret to constant
12489 fold (nearly) all BIT_FIELD_REFs. */
12490 if (CONSTANT_CLASS_P (arg0)
12491 && can_native_interpret_type_p (type)
12492 && BITS_PER_UNIT == 8
12493 && tree_fits_uhwi_p (op1)
12494 && tree_fits_uhwi_p (op2))
12496 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12497 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12498 /* Limit us to a reasonable amount of work. To relax the
12499 other limitations we need bit-shifting of the buffer
12500 and rounding up the size. */
12501 if (bitpos % BITS_PER_UNIT == 0
12502 && bitsize % BITS_PER_UNIT == 0
12503 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12505 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12506 unsigned HOST_WIDE_INT len
12507 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12508 bitpos / BITS_PER_UNIT);
12509 if (len > 0
12510 && len * BITS_PER_UNIT >= bitsize)
12512 tree v = native_interpret_expr (type, b,
12513 bitsize / BITS_PER_UNIT);
12514 if (v)
12515 return v;
12520 return NULL_TREE;
12522 case VEC_PERM_EXPR:
12523 /* Perform constant folding of BIT_INSERT_EXPR. */
12524 if (TREE_CODE (arg2) == VECTOR_CST
12525 && TREE_CODE (op0) == VECTOR_CST
12526 && TREE_CODE (op1) == VECTOR_CST)
12528 /* Build a vector of integers from the tree mask. */
12529 vec_perm_builder builder;
12530 if (!tree_to_vec_perm_builder (&builder, arg2))
12531 return NULL_TREE;
12533 /* Create a vec_perm_indices for the integer vector. */
12534 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12535 bool single_arg = (op0 == op1);
12536 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12537 return fold_vec_perm (type, op0, op1, sel);
12539 return NULL_TREE;
12541 case BIT_INSERT_EXPR:
12542 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
12543 if (TREE_CODE (arg0) == INTEGER_CST
12544 && TREE_CODE (arg1) == INTEGER_CST)
12546 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12547 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12548 wide_int tem = (wi::to_wide (arg0)
12549 & wi::shifted_mask (bitpos, bitsize, true,
12550 TYPE_PRECISION (type)));
12551 wide_int tem2
12552 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12553 bitsize), bitpos);
12554 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12556 else if (TREE_CODE (arg0) == VECTOR_CST
12557 && CONSTANT_CLASS_P (arg1)
12558 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12559 TREE_TYPE (arg1)))
12561 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12562 unsigned HOST_WIDE_INT elsize
12563 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12564 if (bitpos % elsize == 0)
12566 unsigned k = bitpos / elsize;
12567 unsigned HOST_WIDE_INT nelts;
12568 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12569 return arg0;
12570 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12572 tree_vector_builder elts (type, nelts, 1);
12573 elts.quick_grow (nelts);
12574 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12575 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12576 return elts.build ();
12580 return NULL_TREE;
12582 default:
12583 return NULL_TREE;
12584 } /* switch (code) */
12587 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12588 of an array (or vector). *CTOR_IDX if non-NULL is updated with the
12589 constructor element index of the value returned. If the element is
12590 not found NULL_TREE is returned and *CTOR_IDX is updated to
12591 the index of the element after the ACCESS_INDEX position (which
12592 may be outside of the CTOR array). */
12594 tree
12595 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12596 unsigned *ctor_idx)
12598 tree index_type = NULL_TREE;
12599 signop index_sgn = UNSIGNED;
12600 offset_int low_bound = 0;
12602 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12604 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12605 if (domain_type && TYPE_MIN_VALUE (domain_type))
12607 /* Static constructors for variably sized objects makes no sense. */
12608 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12609 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12610 /* ??? When it is obvious that the range is signed, treat it so. */
12611 if (TYPE_UNSIGNED (index_type)
12612 && TYPE_MAX_VALUE (domain_type)
12613 && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12614 TYPE_MIN_VALUE (domain_type)))
12616 index_sgn = SIGNED;
12617 low_bound
12618 = offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12619 SIGNED);
12621 else
12623 index_sgn = TYPE_SIGN (index_type);
12624 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12629 if (index_type)
12630 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12631 index_sgn);
12633 offset_int index = low_bound;
12634 if (index_type)
12635 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12637 offset_int max_index = index;
12638 unsigned cnt;
12639 tree cfield, cval;
12640 bool first_p = true;
12642 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12644 /* Array constructor might explicitly set index, or specify a range,
12645 or leave index NULL meaning that it is next index after previous
12646 one. */
12647 if (cfield)
12649 if (TREE_CODE (cfield) == INTEGER_CST)
12650 max_index = index
12651 = offset_int::from (wi::to_wide (cfield), index_sgn);
12652 else
12654 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12655 index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12656 index_sgn);
12657 max_index
12658 = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12659 index_sgn);
12660 gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12663 else if (!first_p)
12665 index = max_index + 1;
12666 if (index_type)
12667 index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12668 gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12669 max_index = index;
12671 else
12672 first_p = false;
12674 /* Do we have match? */
12675 if (wi::cmp (access_index, index, index_sgn) >= 0)
12677 if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12679 if (ctor_idx)
12680 *ctor_idx = cnt;
12681 return cval;
12684 else if (in_gimple_form)
12685 /* We're past the element we search for. Note during parsing
12686 the elements might not be sorted.
12687 ??? We should use a binary search and a flag on the
12688 CONSTRUCTOR as to whether elements are sorted in declaration
12689 order. */
12690 break;
12692 if (ctor_idx)
12693 *ctor_idx = cnt;
12694 return NULL_TREE;
12697 /* Perform constant folding and related simplification of EXPR.
12698 The related simplifications include x*1 => x, x*0 => 0, etc.,
12699 and application of the associative law.
12700 NOP_EXPR conversions may be removed freely (as long as we
12701 are careful not to change the type of the overall expression).
12702 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12703 but we can constant-fold them if they have constant operands. */
12705 #ifdef ENABLE_FOLD_CHECKING
12706 # define fold(x) fold_1 (x)
12707 static tree fold_1 (tree);
12708 static
12709 #endif
12710 tree
12711 fold (tree expr)
12713 const tree t = expr;
12714 enum tree_code code = TREE_CODE (t);
12715 enum tree_code_class kind = TREE_CODE_CLASS (code);
12716 tree tem;
12717 location_t loc = EXPR_LOCATION (expr);
12719 /* Return right away if a constant. */
12720 if (kind == tcc_constant)
12721 return t;
12723 /* CALL_EXPR-like objects with variable numbers of operands are
12724 treated specially. */
12725 if (kind == tcc_vl_exp)
12727 if (code == CALL_EXPR)
12729 tem = fold_call_expr (loc, expr, false);
12730 return tem ? tem : expr;
12732 return expr;
12735 if (IS_EXPR_CODE_CLASS (kind))
12737 tree type = TREE_TYPE (t);
12738 tree op0, op1, op2;
12740 switch (TREE_CODE_LENGTH (code))
12742 case 1:
12743 op0 = TREE_OPERAND (t, 0);
12744 tem = fold_unary_loc (loc, code, type, op0);
12745 return tem ? tem : expr;
12746 case 2:
12747 op0 = TREE_OPERAND (t, 0);
12748 op1 = TREE_OPERAND (t, 1);
12749 tem = fold_binary_loc (loc, code, type, op0, op1);
12750 return tem ? tem : expr;
12751 case 3:
12752 op0 = TREE_OPERAND (t, 0);
12753 op1 = TREE_OPERAND (t, 1);
12754 op2 = TREE_OPERAND (t, 2);
12755 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12756 return tem ? tem : expr;
12757 default:
12758 break;
12762 switch (code)
12764 case ARRAY_REF:
12766 tree op0 = TREE_OPERAND (t, 0);
12767 tree op1 = TREE_OPERAND (t, 1);
12769 if (TREE_CODE (op1) == INTEGER_CST
12770 && TREE_CODE (op0) == CONSTRUCTOR
12771 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12773 tree val = get_array_ctor_element_at_index (op0,
12774 wi::to_offset (op1));
12775 if (val)
12776 return val;
12779 return t;
12782 /* Return a VECTOR_CST if possible. */
12783 case CONSTRUCTOR:
12785 tree type = TREE_TYPE (t);
12786 if (TREE_CODE (type) != VECTOR_TYPE)
12787 return t;
12789 unsigned i;
12790 tree val;
12791 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12792 if (! CONSTANT_CLASS_P (val))
12793 return t;
12795 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12798 case CONST_DECL:
12799 return fold (DECL_INITIAL (t));
12801 default:
12802 return t;
12803 } /* switch (code) */
12806 #ifdef ENABLE_FOLD_CHECKING
12807 #undef fold
12809 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12810 hash_table<nofree_ptr_hash<const tree_node> > *);
12811 static void fold_check_failed (const_tree, const_tree);
12812 void print_fold_checksum (const_tree);
12814 /* When --enable-checking=fold, compute a digest of expr before
12815 and after actual fold call to see if fold did not accidentally
12816 change original expr. */
12818 tree
12819 fold (tree expr)
12821 tree ret;
12822 struct md5_ctx ctx;
12823 unsigned char checksum_before[16], checksum_after[16];
12824 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12826 md5_init_ctx (&ctx);
12827 fold_checksum_tree (expr, &ctx, &ht);
12828 md5_finish_ctx (&ctx, checksum_before);
12829 ht.empty ();
12831 ret = fold_1 (expr);
12833 md5_init_ctx (&ctx);
12834 fold_checksum_tree (expr, &ctx, &ht);
12835 md5_finish_ctx (&ctx, checksum_after);
12837 if (memcmp (checksum_before, checksum_after, 16))
12838 fold_check_failed (expr, ret);
12840 return ret;
12843 void
12844 print_fold_checksum (const_tree expr)
12846 struct md5_ctx ctx;
12847 unsigned char checksum[16], cnt;
12848 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12850 md5_init_ctx (&ctx);
12851 fold_checksum_tree (expr, &ctx, &ht);
12852 md5_finish_ctx (&ctx, checksum);
12853 for (cnt = 0; cnt < 16; ++cnt)
12854 fprintf (stderr, "%02x", checksum[cnt]);
12855 putc ('\n', stderr);
12858 static void
12859 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12861 internal_error ("fold check: original tree changed by fold");
12864 static void
12865 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12866 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12868 const tree_node **slot;
12869 enum tree_code code;
12870 union tree_node *buf;
12871 int i, len;
12873 recursive_label:
12874 if (expr == NULL)
12875 return;
12876 slot = ht->find_slot (expr, INSERT);
12877 if (*slot != NULL)
12878 return;
12879 *slot = expr;
12880 code = TREE_CODE (expr);
12881 if (TREE_CODE_CLASS (code) == tcc_declaration
12882 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12884 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12885 size_t sz = tree_size (expr);
12886 buf = XALLOCAVAR (union tree_node, sz);
12887 memcpy ((char *) buf, expr, sz);
12888 SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12889 buf->decl_with_vis.symtab_node = NULL;
12890 buf->base.nowarning_flag = 0;
12891 expr = (tree) buf;
12893 else if (TREE_CODE_CLASS (code) == tcc_type
12894 && (TYPE_POINTER_TO (expr)
12895 || TYPE_REFERENCE_TO (expr)
12896 || TYPE_CACHED_VALUES_P (expr)
12897 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12898 || TYPE_NEXT_VARIANT (expr)
12899 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12901 /* Allow these fields to be modified. */
12902 tree tmp;
12903 size_t sz = tree_size (expr);
12904 buf = XALLOCAVAR (union tree_node, sz);
12905 memcpy ((char *) buf, expr, sz);
12906 expr = tmp = (tree) buf;
12907 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12908 TYPE_POINTER_TO (tmp) = NULL;
12909 TYPE_REFERENCE_TO (tmp) = NULL;
12910 TYPE_NEXT_VARIANT (tmp) = NULL;
12911 TYPE_ALIAS_SET (tmp) = -1;
12912 if (TYPE_CACHED_VALUES_P (tmp))
12914 TYPE_CACHED_VALUES_P (tmp) = 0;
12915 TYPE_CACHED_VALUES (tmp) = NULL;
12918 else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
12920 /* Allow TREE_NO_WARNING to be set. Perhaps we shouldn't allow that
12921 and change builtins.c etc. instead - see PR89543. */
12922 size_t sz = tree_size (expr);
12923 buf = XALLOCAVAR (union tree_node, sz);
12924 memcpy ((char *) buf, expr, sz);
12925 buf->base.nowarning_flag = 0;
12926 expr = (tree) buf;
12928 md5_process_bytes (expr, tree_size (expr), ctx);
12929 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12930 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12931 if (TREE_CODE_CLASS (code) != tcc_type
12932 && TREE_CODE_CLASS (code) != tcc_declaration
12933 && code != TREE_LIST
12934 && code != SSA_NAME
12935 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12936 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12937 switch (TREE_CODE_CLASS (code))
12939 case tcc_constant:
12940 switch (code)
12942 case STRING_CST:
12943 md5_process_bytes (TREE_STRING_POINTER (expr),
12944 TREE_STRING_LENGTH (expr), ctx);
12945 break;
12946 case COMPLEX_CST:
12947 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12948 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12949 break;
12950 case VECTOR_CST:
12951 len = vector_cst_encoded_nelts (expr);
12952 for (i = 0; i < len; ++i)
12953 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12954 break;
12955 default:
12956 break;
12958 break;
12959 case tcc_exceptional:
12960 switch (code)
12962 case TREE_LIST:
12963 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12964 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12965 expr = TREE_CHAIN (expr);
12966 goto recursive_label;
12967 break;
12968 case TREE_VEC:
12969 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12970 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12971 break;
12972 default:
12973 break;
12975 break;
12976 case tcc_expression:
12977 case tcc_reference:
12978 case tcc_comparison:
12979 case tcc_unary:
12980 case tcc_binary:
12981 case tcc_statement:
12982 case tcc_vl_exp:
12983 len = TREE_OPERAND_LENGTH (expr);
12984 for (i = 0; i < len; ++i)
12985 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12986 break;
12987 case tcc_declaration:
12988 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12989 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12990 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12992 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12993 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12994 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12995 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12996 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12999 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13001 if (TREE_CODE (expr) == FUNCTION_DECL)
13003 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13004 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13006 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13008 break;
13009 case tcc_type:
13010 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13011 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13012 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13013 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13014 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13015 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13016 if (INTEGRAL_TYPE_P (expr)
13017 || SCALAR_FLOAT_TYPE_P (expr))
13019 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13020 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13022 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13023 if (TREE_CODE (expr) == RECORD_TYPE
13024 || TREE_CODE (expr) == UNION_TYPE
13025 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13026 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13027 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13028 break;
13029 default:
13030 break;
13034 /* Helper function for outputting the checksum of a tree T. When
13035 debugging with gdb, you can "define mynext" to be "next" followed
13036 by "call debug_fold_checksum (op0)", then just trace down till the
13037 outputs differ. */
13039 DEBUG_FUNCTION void
13040 debug_fold_checksum (const_tree t)
13042 int i;
13043 unsigned char checksum[16];
13044 struct md5_ctx ctx;
13045 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13047 md5_init_ctx (&ctx);
13048 fold_checksum_tree (t, &ctx, &ht);
13049 md5_finish_ctx (&ctx, checksum);
13050 ht.empty ();
13052 for (i = 0; i < 16; i++)
13053 fprintf (stderr, "%d ", checksum[i]);
13055 fprintf (stderr, "\n");
13058 #endif
13060 /* Fold a unary tree expression with code CODE of type TYPE with an
13061 operand OP0. LOC is the location of the resulting expression.
13062 Return a folded expression if successful. Otherwise, return a tree
13063 expression with code CODE of type TYPE with an operand OP0. */
13065 tree
13066 fold_build1_loc (location_t loc,
13067 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13069 tree tem;
13070 #ifdef ENABLE_FOLD_CHECKING
13071 unsigned char checksum_before[16], checksum_after[16];
13072 struct md5_ctx ctx;
13073 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13075 md5_init_ctx (&ctx);
13076 fold_checksum_tree (op0, &ctx, &ht);
13077 md5_finish_ctx (&ctx, checksum_before);
13078 ht.empty ();
13079 #endif
13081 tem = fold_unary_loc (loc, code, type, op0);
13082 if (!tem)
13083 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13085 #ifdef ENABLE_FOLD_CHECKING
13086 md5_init_ctx (&ctx);
13087 fold_checksum_tree (op0, &ctx, &ht);
13088 md5_finish_ctx (&ctx, checksum_after);
13090 if (memcmp (checksum_before, checksum_after, 16))
13091 fold_check_failed (op0, tem);
13092 #endif
13093 return tem;
13096 /* Fold a binary tree expression with code CODE of type TYPE with
13097 operands OP0 and OP1. LOC is the location of the resulting
13098 expression. Return a folded expression if successful. Otherwise,
13099 return a tree expression with code CODE of type TYPE with operands
13100 OP0 and OP1. */
13102 tree
13103 fold_build2_loc (location_t loc,
13104 enum tree_code code, tree type, tree op0, tree op1
13105 MEM_STAT_DECL)
13107 tree tem;
13108 #ifdef ENABLE_FOLD_CHECKING
13109 unsigned char checksum_before_op0[16],
13110 checksum_before_op1[16],
13111 checksum_after_op0[16],
13112 checksum_after_op1[16];
13113 struct md5_ctx ctx;
13114 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13116 md5_init_ctx (&ctx);
13117 fold_checksum_tree (op0, &ctx, &ht);
13118 md5_finish_ctx (&ctx, checksum_before_op0);
13119 ht.empty ();
13121 md5_init_ctx (&ctx);
13122 fold_checksum_tree (op1, &ctx, &ht);
13123 md5_finish_ctx (&ctx, checksum_before_op1);
13124 ht.empty ();
13125 #endif
13127 tem = fold_binary_loc (loc, code, type, op0, op1);
13128 if (!tem)
13129 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13131 #ifdef ENABLE_FOLD_CHECKING
13132 md5_init_ctx (&ctx);
13133 fold_checksum_tree (op0, &ctx, &ht);
13134 md5_finish_ctx (&ctx, checksum_after_op0);
13135 ht.empty ();
13137 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13138 fold_check_failed (op0, tem);
13140 md5_init_ctx (&ctx);
13141 fold_checksum_tree (op1, &ctx, &ht);
13142 md5_finish_ctx (&ctx, checksum_after_op1);
13144 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13145 fold_check_failed (op1, tem);
13146 #endif
13147 return tem;
13150 /* Fold a ternary tree expression with code CODE of type TYPE with
13151 operands OP0, OP1, and OP2. Return a folded expression if
13152 successful. Otherwise, return a tree expression with code CODE of
13153 type TYPE with operands OP0, OP1, and OP2. */
13155 tree
13156 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13157 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13159 tree tem;
13160 #ifdef ENABLE_FOLD_CHECKING
13161 unsigned char checksum_before_op0[16],
13162 checksum_before_op1[16],
13163 checksum_before_op2[16],
13164 checksum_after_op0[16],
13165 checksum_after_op1[16],
13166 checksum_after_op2[16];
13167 struct md5_ctx ctx;
13168 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13170 md5_init_ctx (&ctx);
13171 fold_checksum_tree (op0, &ctx, &ht);
13172 md5_finish_ctx (&ctx, checksum_before_op0);
13173 ht.empty ();
13175 md5_init_ctx (&ctx);
13176 fold_checksum_tree (op1, &ctx, &ht);
13177 md5_finish_ctx (&ctx, checksum_before_op1);
13178 ht.empty ();
13180 md5_init_ctx (&ctx);
13181 fold_checksum_tree (op2, &ctx, &ht);
13182 md5_finish_ctx (&ctx, checksum_before_op2);
13183 ht.empty ();
13184 #endif
13186 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13187 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13188 if (!tem)
13189 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13191 #ifdef ENABLE_FOLD_CHECKING
13192 md5_init_ctx (&ctx);
13193 fold_checksum_tree (op0, &ctx, &ht);
13194 md5_finish_ctx (&ctx, checksum_after_op0);
13195 ht.empty ();
13197 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13198 fold_check_failed (op0, tem);
13200 md5_init_ctx (&ctx);
13201 fold_checksum_tree (op1, &ctx, &ht);
13202 md5_finish_ctx (&ctx, checksum_after_op1);
13203 ht.empty ();
13205 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13206 fold_check_failed (op1, tem);
13208 md5_init_ctx (&ctx);
13209 fold_checksum_tree (op2, &ctx, &ht);
13210 md5_finish_ctx (&ctx, checksum_after_op2);
13212 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13213 fold_check_failed (op2, tem);
13214 #endif
13215 return tem;
13218 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13219 arguments in ARGARRAY, and a null static chain.
13220 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13221 of type TYPE from the given operands as constructed by build_call_array. */
13223 tree
13224 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13225 int nargs, tree *argarray)
13227 tree tem;
13228 #ifdef ENABLE_FOLD_CHECKING
13229 unsigned char checksum_before_fn[16],
13230 checksum_before_arglist[16],
13231 checksum_after_fn[16],
13232 checksum_after_arglist[16];
13233 struct md5_ctx ctx;
13234 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13235 int i;
13237 md5_init_ctx (&ctx);
13238 fold_checksum_tree (fn, &ctx, &ht);
13239 md5_finish_ctx (&ctx, checksum_before_fn);
13240 ht.empty ();
13242 md5_init_ctx (&ctx);
13243 for (i = 0; i < nargs; i++)
13244 fold_checksum_tree (argarray[i], &ctx, &ht);
13245 md5_finish_ctx (&ctx, checksum_before_arglist);
13246 ht.empty ();
13247 #endif
13249 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13250 if (!tem)
13251 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13253 #ifdef ENABLE_FOLD_CHECKING
13254 md5_init_ctx (&ctx);
13255 fold_checksum_tree (fn, &ctx, &ht);
13256 md5_finish_ctx (&ctx, checksum_after_fn);
13257 ht.empty ();
13259 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13260 fold_check_failed (fn, tem);
13262 md5_init_ctx (&ctx);
13263 for (i = 0; i < nargs; i++)
13264 fold_checksum_tree (argarray[i], &ctx, &ht);
13265 md5_finish_ctx (&ctx, checksum_after_arglist);
13267 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13268 fold_check_failed (NULL_TREE, tem);
13269 #endif
13270 return tem;
13273 /* Perform constant folding and related simplification of initializer
13274 expression EXPR. These behave identically to "fold_buildN" but ignore
13275 potential run-time traps and exceptions that fold must preserve. */
13277 #define START_FOLD_INIT \
13278 int saved_signaling_nans = flag_signaling_nans;\
13279 int saved_trapping_math = flag_trapping_math;\
13280 int saved_rounding_math = flag_rounding_math;\
13281 int saved_trapv = flag_trapv;\
13282 int saved_folding_initializer = folding_initializer;\
13283 flag_signaling_nans = 0;\
13284 flag_trapping_math = 0;\
13285 flag_rounding_math = 0;\
13286 flag_trapv = 0;\
13287 folding_initializer = 1;
13289 #define END_FOLD_INIT \
13290 flag_signaling_nans = saved_signaling_nans;\
13291 flag_trapping_math = saved_trapping_math;\
13292 flag_rounding_math = saved_rounding_math;\
13293 flag_trapv = saved_trapv;\
13294 folding_initializer = saved_folding_initializer;
13296 tree
13297 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13298 tree type, tree op)
13300 tree result;
13301 START_FOLD_INIT;
13303 result = fold_build1_loc (loc, code, type, op);
13305 END_FOLD_INIT;
13306 return result;
13309 tree
13310 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13311 tree type, tree op0, tree op1)
13313 tree result;
13314 START_FOLD_INIT;
13316 result = fold_build2_loc (loc, code, type, op0, op1);
13318 END_FOLD_INIT;
13319 return result;
13322 tree
13323 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13324 int nargs, tree *argarray)
13326 tree result;
13327 START_FOLD_INIT;
13329 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13331 END_FOLD_INIT;
13332 return result;
13335 #undef START_FOLD_INIT
13336 #undef END_FOLD_INIT
13338 /* Determine if first argument is a multiple of second argument. Return 0 if
13339 it is not, or we cannot easily determined it to be.
13341 An example of the sort of thing we care about (at this point; this routine
13342 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13343 fold cases do now) is discovering that
13345 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13347 is a multiple of
13349 SAVE_EXPR (J * 8)
13351 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13353 This code also handles discovering that
13355 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13357 is a multiple of 8 so we don't have to worry about dealing with a
13358 possible remainder.
13360 Note that we *look* inside a SAVE_EXPR only to determine how it was
13361 calculated; it is not safe for fold to do much of anything else with the
13362 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13363 at run time. For example, the latter example above *cannot* be implemented
13364 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13365 evaluation time of the original SAVE_EXPR is not necessarily the same at
13366 the time the new expression is evaluated. The only optimization of this
13367 sort that would be valid is changing
13369 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13371 divided by 8 to
13373 SAVE_EXPR (I) * SAVE_EXPR (J)
13375 (where the same SAVE_EXPR (J) is used in the original and the
13376 transformed version). */
13379 multiple_of_p (tree type, const_tree top, const_tree bottom)
13381 gimple *stmt;
13382 tree t1, op1, op2;
13384 if (operand_equal_p (top, bottom, 0))
13385 return 1;
13387 if (TREE_CODE (type) != INTEGER_TYPE)
13388 return 0;
13390 switch (TREE_CODE (top))
13392 case BIT_AND_EXPR:
13393 /* Bitwise and provides a power of two multiple. If the mask is
13394 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13395 if (!integer_pow2p (bottom))
13396 return 0;
13397 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13398 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13400 case MULT_EXPR:
13401 if (TREE_CODE (bottom) == INTEGER_CST)
13403 op1 = TREE_OPERAND (top, 0);
13404 op2 = TREE_OPERAND (top, 1);
13405 if (TREE_CODE (op1) == INTEGER_CST)
13406 std::swap (op1, op2);
13407 if (TREE_CODE (op2) == INTEGER_CST)
13409 if (multiple_of_p (type, op2, bottom))
13410 return 1;
13411 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
13412 if (multiple_of_p (type, bottom, op2))
13414 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13415 wi::to_widest (op2));
13416 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13418 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13419 return multiple_of_p (type, op1, op2);
13422 return multiple_of_p (type, op1, bottom);
13425 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13426 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13428 case MINUS_EXPR:
13429 /* It is impossible to prove if op0 - op1 is multiple of bottom
13430 precisely, so be conservative here checking if both op0 and op1
13431 are multiple of bottom. Note we check the second operand first
13432 since it's usually simpler. */
13433 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13434 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13436 case PLUS_EXPR:
13437 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13438 as op0 - 3 if the expression has unsigned type. For example,
13439 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
13440 op1 = TREE_OPERAND (top, 1);
13441 if (TYPE_UNSIGNED (type)
13442 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13443 op1 = fold_build1 (NEGATE_EXPR, type, op1);
13444 return (multiple_of_p (type, op1, bottom)
13445 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13447 case LSHIFT_EXPR:
13448 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13450 op1 = TREE_OPERAND (top, 1);
13451 /* const_binop may not detect overflow correctly,
13452 so check for it explicitly here. */
13453 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13454 wi::to_wide (op1))
13455 && (t1 = fold_convert (type,
13456 const_binop (LSHIFT_EXPR, size_one_node,
13457 op1))) != 0
13458 && !TREE_OVERFLOW (t1))
13459 return multiple_of_p (type, t1, bottom);
13461 return 0;
13463 case NOP_EXPR:
13464 /* Can't handle conversions from non-integral or wider integral type. */
13465 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13466 || (TYPE_PRECISION (type)
13467 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13468 return 0;
13470 /* fall through */
13472 case SAVE_EXPR:
13473 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13475 case COND_EXPR:
13476 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13477 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13479 case INTEGER_CST:
13480 if (TREE_CODE (bottom) != INTEGER_CST
13481 || integer_zerop (bottom)
13482 || (TYPE_UNSIGNED (type)
13483 && (tree_int_cst_sgn (top) < 0
13484 || tree_int_cst_sgn (bottom) < 0)))
13485 return 0;
13486 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13487 SIGNED);
13489 case SSA_NAME:
13490 if (TREE_CODE (bottom) == INTEGER_CST
13491 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13492 && gimple_code (stmt) == GIMPLE_ASSIGN)
13494 enum tree_code code = gimple_assign_rhs_code (stmt);
13496 /* Check for special cases to see if top is defined as multiple
13497 of bottom:
13499 top = (X & ~(bottom - 1) ; bottom is power of 2
13503 Y = X % bottom
13504 top = X - Y. */
13505 if (code == BIT_AND_EXPR
13506 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13507 && TREE_CODE (op2) == INTEGER_CST
13508 && integer_pow2p (bottom)
13509 && wi::multiple_of_p (wi::to_widest (op2),
13510 wi::to_widest (bottom), UNSIGNED))
13511 return 1;
13513 op1 = gimple_assign_rhs1 (stmt);
13514 if (code == MINUS_EXPR
13515 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13516 && TREE_CODE (op2) == SSA_NAME
13517 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13518 && gimple_code (stmt) == GIMPLE_ASSIGN
13519 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13520 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13521 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13522 return 1;
13525 /* fall through */
13527 default:
13528 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13529 return multiple_p (wi::to_poly_widest (top),
13530 wi::to_poly_widest (bottom));
13532 return 0;
13536 #define tree_expr_nonnegative_warnv_p(X, Y) \
13537 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13539 #define RECURSE(X) \
13540 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13542 /* Return true if CODE or TYPE is known to be non-negative. */
13544 static bool
13545 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13547 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13548 && truth_value_p (code))
13549 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13550 have a signed:1 type (where the value is -1 and 0). */
13551 return true;
13552 return false;
13555 /* Return true if (CODE OP0) is known to be non-negative. If the return
13556 value is based on the assumption that signed overflow is undefined,
13557 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13558 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13560 bool
13561 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13562 bool *strict_overflow_p, int depth)
13564 if (TYPE_UNSIGNED (type))
13565 return true;
13567 switch (code)
13569 case ABS_EXPR:
13570 /* We can't return 1 if flag_wrapv is set because
13571 ABS_EXPR<INT_MIN> = INT_MIN. */
13572 if (!ANY_INTEGRAL_TYPE_P (type))
13573 return true;
13574 if (TYPE_OVERFLOW_UNDEFINED (type))
13576 *strict_overflow_p = true;
13577 return true;
13579 break;
13581 case NON_LVALUE_EXPR:
13582 case FLOAT_EXPR:
13583 case FIX_TRUNC_EXPR:
13584 return RECURSE (op0);
13586 CASE_CONVERT:
13588 tree inner_type = TREE_TYPE (op0);
13589 tree outer_type = type;
13591 if (TREE_CODE (outer_type) == REAL_TYPE)
13593 if (TREE_CODE (inner_type) == REAL_TYPE)
13594 return RECURSE (op0);
13595 if (INTEGRAL_TYPE_P (inner_type))
13597 if (TYPE_UNSIGNED (inner_type))
13598 return true;
13599 return RECURSE (op0);
13602 else if (INTEGRAL_TYPE_P (outer_type))
13604 if (TREE_CODE (inner_type) == REAL_TYPE)
13605 return RECURSE (op0);
13606 if (INTEGRAL_TYPE_P (inner_type))
13607 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13608 && TYPE_UNSIGNED (inner_type);
13611 break;
13613 default:
13614 return tree_simple_nonnegative_warnv_p (code, type);
13617 /* We don't know sign of `t', so be conservative and return false. */
13618 return false;
13621 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13622 value is based on the assumption that signed overflow is undefined,
13623 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13624 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13626 bool
13627 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13628 tree op1, bool *strict_overflow_p,
13629 int depth)
13631 if (TYPE_UNSIGNED (type))
13632 return true;
13634 switch (code)
13636 case POINTER_PLUS_EXPR:
13637 case PLUS_EXPR:
13638 if (FLOAT_TYPE_P (type))
13639 return RECURSE (op0) && RECURSE (op1);
13641 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13642 both unsigned and at least 2 bits shorter than the result. */
13643 if (TREE_CODE (type) == INTEGER_TYPE
13644 && TREE_CODE (op0) == NOP_EXPR
13645 && TREE_CODE (op1) == NOP_EXPR)
13647 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13648 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13649 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13650 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13652 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13653 TYPE_PRECISION (inner2)) + 1;
13654 return prec < TYPE_PRECISION (type);
13657 break;
13659 case MULT_EXPR:
13660 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13662 /* x * x is always non-negative for floating point x
13663 or without overflow. */
13664 if (operand_equal_p (op0, op1, 0)
13665 || (RECURSE (op0) && RECURSE (op1)))
13667 if (ANY_INTEGRAL_TYPE_P (type)
13668 && TYPE_OVERFLOW_UNDEFINED (type))
13669 *strict_overflow_p = true;
13670 return true;
13674 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13675 both unsigned and their total bits is shorter than the result. */
13676 if (TREE_CODE (type) == INTEGER_TYPE
13677 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13678 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13680 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13681 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13682 : TREE_TYPE (op0);
13683 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13684 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13685 : TREE_TYPE (op1);
13687 bool unsigned0 = TYPE_UNSIGNED (inner0);
13688 bool unsigned1 = TYPE_UNSIGNED (inner1);
13690 if (TREE_CODE (op0) == INTEGER_CST)
13691 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13693 if (TREE_CODE (op1) == INTEGER_CST)
13694 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13696 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13697 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13699 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13700 ? tree_int_cst_min_precision (op0, UNSIGNED)
13701 : TYPE_PRECISION (inner0);
13703 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13704 ? tree_int_cst_min_precision (op1, UNSIGNED)
13705 : TYPE_PRECISION (inner1);
13707 return precision0 + precision1 < TYPE_PRECISION (type);
13710 return false;
13712 case BIT_AND_EXPR:
13713 case MAX_EXPR:
13714 return RECURSE (op0) || RECURSE (op1);
13716 case BIT_IOR_EXPR:
13717 case BIT_XOR_EXPR:
13718 case MIN_EXPR:
13719 case RDIV_EXPR:
13720 case TRUNC_DIV_EXPR:
13721 case CEIL_DIV_EXPR:
13722 case FLOOR_DIV_EXPR:
13723 case ROUND_DIV_EXPR:
13724 return RECURSE (op0) && RECURSE (op1);
13726 case TRUNC_MOD_EXPR:
13727 return RECURSE (op0);
13729 case FLOOR_MOD_EXPR:
13730 return RECURSE (op1);
13732 case CEIL_MOD_EXPR:
13733 case ROUND_MOD_EXPR:
13734 default:
13735 return tree_simple_nonnegative_warnv_p (code, type);
13738 /* We don't know sign of `t', so be conservative and return false. */
13739 return false;
13742 /* Return true if T is known to be non-negative. If the return
13743 value is based on the assumption that signed overflow is undefined,
13744 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13745 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13747 bool
13748 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13750 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13751 return true;
13753 switch (TREE_CODE (t))
13755 case INTEGER_CST:
13756 return tree_int_cst_sgn (t) >= 0;
13758 case REAL_CST:
13759 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13761 case FIXED_CST:
13762 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13764 case COND_EXPR:
13765 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13767 case SSA_NAME:
13768 /* Limit the depth of recursion to avoid quadratic behavior.
13769 This is expected to catch almost all occurrences in practice.
13770 If this code misses important cases that unbounded recursion
13771 would not, passes that need this information could be revised
13772 to provide it through dataflow propagation. */
13773 return (!name_registered_for_update_p (t)
13774 && depth < param_max_ssa_name_query_depth
13775 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13776 strict_overflow_p, depth));
13778 default:
13779 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13783 /* Return true if T is known to be non-negative. If the return
13784 value is based on the assumption that signed overflow is undefined,
13785 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13786 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13788 bool
13789 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13790 bool *strict_overflow_p, int depth)
13792 switch (fn)
13794 CASE_CFN_ACOS:
13795 CASE_CFN_ACOSH:
13796 CASE_CFN_CABS:
13797 CASE_CFN_COSH:
13798 CASE_CFN_ERFC:
13799 CASE_CFN_EXP:
13800 CASE_CFN_EXP10:
13801 CASE_CFN_EXP2:
13802 CASE_CFN_FABS:
13803 CASE_CFN_FDIM:
13804 CASE_CFN_HYPOT:
13805 CASE_CFN_POW10:
13806 CASE_CFN_FFS:
13807 CASE_CFN_PARITY:
13808 CASE_CFN_POPCOUNT:
13809 CASE_CFN_CLZ:
13810 CASE_CFN_CLRSB:
13811 case CFN_BUILT_IN_BSWAP16:
13812 case CFN_BUILT_IN_BSWAP32:
13813 case CFN_BUILT_IN_BSWAP64:
13814 case CFN_BUILT_IN_BSWAP128:
13815 /* Always true. */
13816 return true;
13818 CASE_CFN_SQRT:
13819 CASE_CFN_SQRT_FN:
13820 /* sqrt(-0.0) is -0.0. */
13821 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13822 return true;
13823 return RECURSE (arg0);
13825 CASE_CFN_ASINH:
13826 CASE_CFN_ATAN:
13827 CASE_CFN_ATANH:
13828 CASE_CFN_CBRT:
13829 CASE_CFN_CEIL:
13830 CASE_CFN_CEIL_FN:
13831 CASE_CFN_ERF:
13832 CASE_CFN_EXPM1:
13833 CASE_CFN_FLOOR:
13834 CASE_CFN_FLOOR_FN:
13835 CASE_CFN_FMOD:
13836 CASE_CFN_FREXP:
13837 CASE_CFN_ICEIL:
13838 CASE_CFN_IFLOOR:
13839 CASE_CFN_IRINT:
13840 CASE_CFN_IROUND:
13841 CASE_CFN_LCEIL:
13842 CASE_CFN_LDEXP:
13843 CASE_CFN_LFLOOR:
13844 CASE_CFN_LLCEIL:
13845 CASE_CFN_LLFLOOR:
13846 CASE_CFN_LLRINT:
13847 CASE_CFN_LLROUND:
13848 CASE_CFN_LRINT:
13849 CASE_CFN_LROUND:
13850 CASE_CFN_MODF:
13851 CASE_CFN_NEARBYINT:
13852 CASE_CFN_NEARBYINT_FN:
13853 CASE_CFN_RINT:
13854 CASE_CFN_RINT_FN:
13855 CASE_CFN_ROUND:
13856 CASE_CFN_ROUND_FN:
13857 CASE_CFN_ROUNDEVEN:
13858 CASE_CFN_ROUNDEVEN_FN:
13859 CASE_CFN_SCALB:
13860 CASE_CFN_SCALBLN:
13861 CASE_CFN_SCALBN:
13862 CASE_CFN_SIGNBIT:
13863 CASE_CFN_SIGNIFICAND:
13864 CASE_CFN_SINH:
13865 CASE_CFN_TANH:
13866 CASE_CFN_TRUNC:
13867 CASE_CFN_TRUNC_FN:
13868 /* True if the 1st argument is nonnegative. */
13869 return RECURSE (arg0);
13871 CASE_CFN_FMAX:
13872 CASE_CFN_FMAX_FN:
13873 /* True if the 1st OR 2nd arguments are nonnegative. */
13874 return RECURSE (arg0) || RECURSE (arg1);
13876 CASE_CFN_FMIN:
13877 CASE_CFN_FMIN_FN:
13878 /* True if the 1st AND 2nd arguments are nonnegative. */
13879 return RECURSE (arg0) && RECURSE (arg1);
13881 CASE_CFN_COPYSIGN:
13882 CASE_CFN_COPYSIGN_FN:
13883 /* True if the 2nd argument is nonnegative. */
13884 return RECURSE (arg1);
13886 CASE_CFN_POWI:
13887 /* True if the 1st argument is nonnegative or the second
13888 argument is an even integer. */
13889 if (TREE_CODE (arg1) == INTEGER_CST
13890 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13891 return true;
13892 return RECURSE (arg0);
13894 CASE_CFN_POW:
13895 /* True if the 1st argument is nonnegative or the second
13896 argument is an even integer valued real. */
13897 if (TREE_CODE (arg1) == REAL_CST)
13899 REAL_VALUE_TYPE c;
13900 HOST_WIDE_INT n;
13902 c = TREE_REAL_CST (arg1);
13903 n = real_to_integer (&c);
13904 if ((n & 1) == 0)
13906 REAL_VALUE_TYPE cint;
13907 real_from_integer (&cint, VOIDmode, n, SIGNED);
13908 if (real_identical (&c, &cint))
13909 return true;
13912 return RECURSE (arg0);
13914 default:
13915 break;
13917 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13920 /* Return true if T is known to be non-negative. If the return
13921 value is based on the assumption that signed overflow is undefined,
13922 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13923 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13925 static bool
13926 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13928 enum tree_code code = TREE_CODE (t);
13929 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13930 return true;
13932 switch (code)
13934 case TARGET_EXPR:
13936 tree temp = TARGET_EXPR_SLOT (t);
13937 t = TARGET_EXPR_INITIAL (t);
13939 /* If the initializer is non-void, then it's a normal expression
13940 that will be assigned to the slot. */
13941 if (!VOID_TYPE_P (t))
13942 return RECURSE (t);
13944 /* Otherwise, the initializer sets the slot in some way. One common
13945 way is an assignment statement at the end of the initializer. */
13946 while (1)
13948 if (TREE_CODE (t) == BIND_EXPR)
13949 t = expr_last (BIND_EXPR_BODY (t));
13950 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13951 || TREE_CODE (t) == TRY_CATCH_EXPR)
13952 t = expr_last (TREE_OPERAND (t, 0));
13953 else if (TREE_CODE (t) == STATEMENT_LIST)
13954 t = expr_last (t);
13955 else
13956 break;
13958 if (TREE_CODE (t) == MODIFY_EXPR
13959 && TREE_OPERAND (t, 0) == temp)
13960 return RECURSE (TREE_OPERAND (t, 1));
13962 return false;
13965 case CALL_EXPR:
13967 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13968 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13970 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13971 get_call_combined_fn (t),
13972 arg0,
13973 arg1,
13974 strict_overflow_p, depth);
13976 case COMPOUND_EXPR:
13977 case MODIFY_EXPR:
13978 return RECURSE (TREE_OPERAND (t, 1));
13980 case BIND_EXPR:
13981 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13983 case SAVE_EXPR:
13984 return RECURSE (TREE_OPERAND (t, 0));
13986 default:
13987 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13991 #undef RECURSE
13992 #undef tree_expr_nonnegative_warnv_p
13994 /* Return true if T is known to be non-negative. If the return
13995 value is based on the assumption that signed overflow is undefined,
13996 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13997 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13999 bool
14000 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14002 enum tree_code code;
14003 if (t == error_mark_node)
14004 return false;
14006 code = TREE_CODE (t);
14007 switch (TREE_CODE_CLASS (code))
14009 case tcc_binary:
14010 case tcc_comparison:
14011 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14012 TREE_TYPE (t),
14013 TREE_OPERAND (t, 0),
14014 TREE_OPERAND (t, 1),
14015 strict_overflow_p, depth);
14017 case tcc_unary:
14018 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14019 TREE_TYPE (t),
14020 TREE_OPERAND (t, 0),
14021 strict_overflow_p, depth);
14023 case tcc_constant:
14024 case tcc_declaration:
14025 case tcc_reference:
14026 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14028 default:
14029 break;
14032 switch (code)
14034 case TRUTH_AND_EXPR:
14035 case TRUTH_OR_EXPR:
14036 case TRUTH_XOR_EXPR:
14037 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14038 TREE_TYPE (t),
14039 TREE_OPERAND (t, 0),
14040 TREE_OPERAND (t, 1),
14041 strict_overflow_p, depth);
14042 case TRUTH_NOT_EXPR:
14043 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14044 TREE_TYPE (t),
14045 TREE_OPERAND (t, 0),
14046 strict_overflow_p, depth);
14048 case COND_EXPR:
14049 case CONSTRUCTOR:
14050 case OBJ_TYPE_REF:
14051 case ASSERT_EXPR:
14052 case ADDR_EXPR:
14053 case WITH_SIZE_EXPR:
14054 case SSA_NAME:
14055 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14057 default:
14058 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14062 /* Return true if `t' is known to be non-negative. Handle warnings
14063 about undefined signed overflow. */
14065 bool
14066 tree_expr_nonnegative_p (tree t)
14068 bool ret, strict_overflow_p;
14070 strict_overflow_p = false;
14071 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14072 if (strict_overflow_p)
14073 fold_overflow_warning (("assuming signed overflow does not occur when "
14074 "determining that expression is always "
14075 "non-negative"),
14076 WARN_STRICT_OVERFLOW_MISC);
14077 return ret;
14081 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14082 For floating point we further ensure that T is not denormal.
14083 Similar logic is present in nonzero_address in rtlanal.h.
14085 If the return value is based on the assumption that signed overflow
14086 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14087 change *STRICT_OVERFLOW_P. */
14089 bool
14090 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14091 bool *strict_overflow_p)
14093 switch (code)
14095 case ABS_EXPR:
14096 return tree_expr_nonzero_warnv_p (op0,
14097 strict_overflow_p);
14099 case NOP_EXPR:
14101 tree inner_type = TREE_TYPE (op0);
14102 tree outer_type = type;
14104 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14105 && tree_expr_nonzero_warnv_p (op0,
14106 strict_overflow_p));
14108 break;
14110 case NON_LVALUE_EXPR:
14111 return tree_expr_nonzero_warnv_p (op0,
14112 strict_overflow_p);
14114 default:
14115 break;
14118 return false;
14121 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14122 For floating point we further ensure that T is not denormal.
14123 Similar logic is present in nonzero_address in rtlanal.h.
14125 If the return value is based on the assumption that signed overflow
14126 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14127 change *STRICT_OVERFLOW_P. */
14129 bool
14130 tree_binary_nonzero_warnv_p (enum tree_code code,
14131 tree type,
14132 tree op0,
14133 tree op1, bool *strict_overflow_p)
14135 bool sub_strict_overflow_p;
14136 switch (code)
14138 case POINTER_PLUS_EXPR:
14139 case PLUS_EXPR:
14140 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14142 /* With the presence of negative values it is hard
14143 to say something. */
14144 sub_strict_overflow_p = false;
14145 if (!tree_expr_nonnegative_warnv_p (op0,
14146 &sub_strict_overflow_p)
14147 || !tree_expr_nonnegative_warnv_p (op1,
14148 &sub_strict_overflow_p))
14149 return false;
14150 /* One of operands must be positive and the other non-negative. */
14151 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14152 overflows, on a twos-complement machine the sum of two
14153 nonnegative numbers can never be zero. */
14154 return (tree_expr_nonzero_warnv_p (op0,
14155 strict_overflow_p)
14156 || tree_expr_nonzero_warnv_p (op1,
14157 strict_overflow_p));
14159 break;
14161 case MULT_EXPR:
14162 if (TYPE_OVERFLOW_UNDEFINED (type))
14164 if (tree_expr_nonzero_warnv_p (op0,
14165 strict_overflow_p)
14166 && tree_expr_nonzero_warnv_p (op1,
14167 strict_overflow_p))
14169 *strict_overflow_p = true;
14170 return true;
14173 break;
14175 case MIN_EXPR:
14176 sub_strict_overflow_p = false;
14177 if (tree_expr_nonzero_warnv_p (op0,
14178 &sub_strict_overflow_p)
14179 && tree_expr_nonzero_warnv_p (op1,
14180 &sub_strict_overflow_p))
14182 if (sub_strict_overflow_p)
14183 *strict_overflow_p = true;
14185 break;
14187 case MAX_EXPR:
14188 sub_strict_overflow_p = false;
14189 if (tree_expr_nonzero_warnv_p (op0,
14190 &sub_strict_overflow_p))
14192 if (sub_strict_overflow_p)
14193 *strict_overflow_p = true;
14195 /* When both operands are nonzero, then MAX must be too. */
14196 if (tree_expr_nonzero_warnv_p (op1,
14197 strict_overflow_p))
14198 return true;
14200 /* MAX where operand 0 is positive is positive. */
14201 return tree_expr_nonnegative_warnv_p (op0,
14202 strict_overflow_p);
14204 /* MAX where operand 1 is positive is positive. */
14205 else if (tree_expr_nonzero_warnv_p (op1,
14206 &sub_strict_overflow_p)
14207 && tree_expr_nonnegative_warnv_p (op1,
14208 &sub_strict_overflow_p))
14210 if (sub_strict_overflow_p)
14211 *strict_overflow_p = true;
14212 return true;
14214 break;
14216 case BIT_IOR_EXPR:
14217 return (tree_expr_nonzero_warnv_p (op1,
14218 strict_overflow_p)
14219 || tree_expr_nonzero_warnv_p (op0,
14220 strict_overflow_p));
14222 default:
14223 break;
14226 return false;
14229 /* Return true when T is an address and is known to be nonzero.
14230 For floating point we further ensure that T is not denormal.
14231 Similar logic is present in nonzero_address in rtlanal.h.
14233 If the return value is based on the assumption that signed overflow
14234 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14235 change *STRICT_OVERFLOW_P. */
14237 bool
14238 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14240 bool sub_strict_overflow_p;
14241 switch (TREE_CODE (t))
14243 case INTEGER_CST:
14244 return !integer_zerop (t);
14246 case ADDR_EXPR:
14248 tree base = TREE_OPERAND (t, 0);
14250 if (!DECL_P (base))
14251 base = get_base_address (base);
14253 if (base && TREE_CODE (base) == TARGET_EXPR)
14254 base = TARGET_EXPR_SLOT (base);
14256 if (!base)
14257 return false;
14259 /* For objects in symbol table check if we know they are non-zero.
14260 Don't do anything for variables and functions before symtab is built;
14261 it is quite possible that they will be declared weak later. */
14262 int nonzero_addr = maybe_nonzero_address (base);
14263 if (nonzero_addr >= 0)
14264 return nonzero_addr;
14266 /* Constants are never weak. */
14267 if (CONSTANT_CLASS_P (base))
14268 return true;
14270 return false;
14273 case COND_EXPR:
14274 sub_strict_overflow_p = false;
14275 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14276 &sub_strict_overflow_p)
14277 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14278 &sub_strict_overflow_p))
14280 if (sub_strict_overflow_p)
14281 *strict_overflow_p = true;
14282 return true;
14284 break;
14286 case SSA_NAME:
14287 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14288 break;
14289 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14291 default:
14292 break;
14294 return false;
14297 #define integer_valued_real_p(X) \
14298 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14300 #define RECURSE(X) \
14301 ((integer_valued_real_p) (X, depth + 1))
14303 /* Return true if the floating point result of (CODE OP0) has an
14304 integer value. We also allow +Inf, -Inf and NaN to be considered
14305 integer values. Return false for signaling NaN.
14307 DEPTH is the current nesting depth of the query. */
14309 bool
14310 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14312 switch (code)
14314 case FLOAT_EXPR:
14315 return true;
14317 case ABS_EXPR:
14318 return RECURSE (op0);
14320 CASE_CONVERT:
14322 tree type = TREE_TYPE (op0);
14323 if (TREE_CODE (type) == INTEGER_TYPE)
14324 return true;
14325 if (TREE_CODE (type) == REAL_TYPE)
14326 return RECURSE (op0);
14327 break;
14330 default:
14331 break;
14333 return false;
14336 /* Return true if the floating point result of (CODE OP0 OP1) has an
14337 integer value. We also allow +Inf, -Inf and NaN to be considered
14338 integer values. Return false for signaling NaN.
14340 DEPTH is the current nesting depth of the query. */
14342 bool
14343 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14345 switch (code)
14347 case PLUS_EXPR:
14348 case MINUS_EXPR:
14349 case MULT_EXPR:
14350 case MIN_EXPR:
14351 case MAX_EXPR:
14352 return RECURSE (op0) && RECURSE (op1);
14354 default:
14355 break;
14357 return false;
14360 /* Return true if the floating point result of calling FNDECL with arguments
14361 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
14362 considered integer values. Return false for signaling NaN. If FNDECL
14363 takes fewer than 2 arguments, the remaining ARGn are null.
14365 DEPTH is the current nesting depth of the query. */
14367 bool
14368 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14370 switch (fn)
14372 CASE_CFN_CEIL:
14373 CASE_CFN_CEIL_FN:
14374 CASE_CFN_FLOOR:
14375 CASE_CFN_FLOOR_FN:
14376 CASE_CFN_NEARBYINT:
14377 CASE_CFN_NEARBYINT_FN:
14378 CASE_CFN_RINT:
14379 CASE_CFN_RINT_FN:
14380 CASE_CFN_ROUND:
14381 CASE_CFN_ROUND_FN:
14382 CASE_CFN_ROUNDEVEN:
14383 CASE_CFN_ROUNDEVEN_FN:
14384 CASE_CFN_TRUNC:
14385 CASE_CFN_TRUNC_FN:
14386 return true;
14388 CASE_CFN_FMIN:
14389 CASE_CFN_FMIN_FN:
14390 CASE_CFN_FMAX:
14391 CASE_CFN_FMAX_FN:
14392 return RECURSE (arg0) && RECURSE (arg1);
14394 default:
14395 break;
14397 return false;
14400 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14401 has an integer value. We also allow +Inf, -Inf and NaN to be
14402 considered integer values. Return false for signaling NaN.
14404 DEPTH is the current nesting depth of the query. */
14406 bool
14407 integer_valued_real_single_p (tree t, int depth)
14409 switch (TREE_CODE (t))
14411 case REAL_CST:
14412 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14414 case COND_EXPR:
14415 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14417 case SSA_NAME:
14418 /* Limit the depth of recursion to avoid quadratic behavior.
14419 This is expected to catch almost all occurrences in practice.
14420 If this code misses important cases that unbounded recursion
14421 would not, passes that need this information could be revised
14422 to provide it through dataflow propagation. */
14423 return (!name_registered_for_update_p (t)
14424 && depth < param_max_ssa_name_query_depth
14425 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14426 depth));
14428 default:
14429 break;
14431 return false;
14434 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14435 has an integer value. We also allow +Inf, -Inf and NaN to be
14436 considered integer values. Return false for signaling NaN.
14438 DEPTH is the current nesting depth of the query. */
14440 static bool
14441 integer_valued_real_invalid_p (tree t, int depth)
14443 switch (TREE_CODE (t))
14445 case COMPOUND_EXPR:
14446 case MODIFY_EXPR:
14447 case BIND_EXPR:
14448 return RECURSE (TREE_OPERAND (t, 1));
14450 case SAVE_EXPR:
14451 return RECURSE (TREE_OPERAND (t, 0));
14453 default:
14454 break;
14456 return false;
14459 #undef RECURSE
14460 #undef integer_valued_real_p
14462 /* Return true if the floating point expression T has an integer value.
14463 We also allow +Inf, -Inf and NaN to be considered integer values.
14464 Return false for signaling NaN.
14466 DEPTH is the current nesting depth of the query. */
14468 bool
14469 integer_valued_real_p (tree t, int depth)
14471 if (t == error_mark_node)
14472 return false;
14474 STRIP_ANY_LOCATION_WRAPPER (t);
14476 tree_code code = TREE_CODE (t);
14477 switch (TREE_CODE_CLASS (code))
14479 case tcc_binary:
14480 case tcc_comparison:
14481 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14482 TREE_OPERAND (t, 1), depth);
14484 case tcc_unary:
14485 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14487 case tcc_constant:
14488 case tcc_declaration:
14489 case tcc_reference:
14490 return integer_valued_real_single_p (t, depth);
14492 default:
14493 break;
14496 switch (code)
14498 case COND_EXPR:
14499 case SSA_NAME:
14500 return integer_valued_real_single_p (t, depth);
14502 case CALL_EXPR:
14504 tree arg0 = (call_expr_nargs (t) > 0
14505 ? CALL_EXPR_ARG (t, 0)
14506 : NULL_TREE);
14507 tree arg1 = (call_expr_nargs (t) > 1
14508 ? CALL_EXPR_ARG (t, 1)
14509 : NULL_TREE);
14510 return integer_valued_real_call_p (get_call_combined_fn (t),
14511 arg0, arg1, depth);
14514 default:
14515 return integer_valued_real_invalid_p (t, depth);
14519 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14520 attempt to fold the expression to a constant without modifying TYPE,
14521 OP0 or OP1.
14523 If the expression could be simplified to a constant, then return
14524 the constant. If the expression would not be simplified to a
14525 constant, then return NULL_TREE. */
14527 tree
14528 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14530 tree tem = fold_binary (code, type, op0, op1);
14531 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14534 /* Given the components of a unary expression CODE, TYPE and OP0,
14535 attempt to fold the expression to a constant without modifying
14536 TYPE or OP0.
14538 If the expression could be simplified to a constant, then return
14539 the constant. If the expression would not be simplified to a
14540 constant, then return NULL_TREE. */
14542 tree
14543 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14545 tree tem = fold_unary (code, type, op0);
14546 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14549 /* If EXP represents referencing an element in a constant string
14550 (either via pointer arithmetic or array indexing), return the
14551 tree representing the value accessed, otherwise return NULL. */
14553 tree
14554 fold_read_from_constant_string (tree exp)
14556 if ((TREE_CODE (exp) == INDIRECT_REF
14557 || TREE_CODE (exp) == ARRAY_REF)
14558 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14560 tree exp1 = TREE_OPERAND (exp, 0);
14561 tree index;
14562 tree string;
14563 location_t loc = EXPR_LOCATION (exp);
14565 if (TREE_CODE (exp) == INDIRECT_REF)
14566 string = string_constant (exp1, &index, NULL, NULL);
14567 else
14569 tree low_bound = array_ref_low_bound (exp);
14570 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14572 /* Optimize the special-case of a zero lower bound.
14574 We convert the low_bound to sizetype to avoid some problems
14575 with constant folding. (E.g. suppose the lower bound is 1,
14576 and its mode is QI. Without the conversion,l (ARRAY
14577 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14578 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14579 if (! integer_zerop (low_bound))
14580 index = size_diffop_loc (loc, index,
14581 fold_convert_loc (loc, sizetype, low_bound));
14583 string = exp1;
14586 scalar_int_mode char_mode;
14587 if (string
14588 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14589 && TREE_CODE (string) == STRING_CST
14590 && TREE_CODE (index) == INTEGER_CST
14591 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14592 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14593 &char_mode)
14594 && GET_MODE_SIZE (char_mode) == 1)
14595 return build_int_cst_type (TREE_TYPE (exp),
14596 (TREE_STRING_POINTER (string)
14597 [TREE_INT_CST_LOW (index)]));
14599 return NULL;
14602 /* Folds a read from vector element at IDX of vector ARG. */
14604 tree
14605 fold_read_from_vector (tree arg, poly_uint64 idx)
14607 unsigned HOST_WIDE_INT i;
14608 if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14609 && known_ge (idx, 0u)
14610 && idx.is_constant (&i))
14612 if (TREE_CODE (arg) == VECTOR_CST)
14613 return VECTOR_CST_ELT (arg, i);
14614 else if (TREE_CODE (arg) == CONSTRUCTOR)
14616 if (i >= CONSTRUCTOR_NELTS (arg))
14617 return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14618 return CONSTRUCTOR_ELT (arg, i)->value;
14621 return NULL_TREE;
14624 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14625 an integer constant, real, or fixed-point constant.
14627 TYPE is the type of the result. */
14629 static tree
14630 fold_negate_const (tree arg0, tree type)
14632 tree t = NULL_TREE;
14634 switch (TREE_CODE (arg0))
14636 case REAL_CST:
14637 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14638 break;
14640 case FIXED_CST:
14642 FIXED_VALUE_TYPE f;
14643 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14644 &(TREE_FIXED_CST (arg0)), NULL,
14645 TYPE_SATURATING (type));
14646 t = build_fixed (type, f);
14647 /* Propagate overflow flags. */
14648 if (overflow_p | TREE_OVERFLOW (arg0))
14649 TREE_OVERFLOW (t) = 1;
14650 break;
14653 default:
14654 if (poly_int_tree_p (arg0))
14656 wi::overflow_type overflow;
14657 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14658 t = force_fit_type (type, res, 1,
14659 (overflow && ! TYPE_UNSIGNED (type))
14660 || TREE_OVERFLOW (arg0));
14661 break;
14664 gcc_unreachable ();
14667 return t;
14670 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14671 an integer constant or real constant.
14673 TYPE is the type of the result. */
14675 tree
14676 fold_abs_const (tree arg0, tree type)
14678 tree t = NULL_TREE;
14680 switch (TREE_CODE (arg0))
14682 case INTEGER_CST:
14684 /* If the value is unsigned or non-negative, then the absolute value
14685 is the same as the ordinary value. */
14686 wide_int val = wi::to_wide (arg0);
14687 wi::overflow_type overflow = wi::OVF_NONE;
14688 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14691 /* If the value is negative, then the absolute value is
14692 its negation. */
14693 else
14694 val = wi::neg (val, &overflow);
14696 /* Force to the destination type, set TREE_OVERFLOW for signed
14697 TYPE only. */
14698 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14700 break;
14702 case REAL_CST:
14703 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14704 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14705 else
14706 t = arg0;
14707 break;
14709 default:
14710 gcc_unreachable ();
14713 return t;
14716 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14717 constant. TYPE is the type of the result. */
14719 static tree
14720 fold_not_const (const_tree arg0, tree type)
14722 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14724 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14727 /* Given CODE, a relational operator, the target type, TYPE and two
14728 constant operands OP0 and OP1, return the result of the
14729 relational operation. If the result is not a compile time
14730 constant, then return NULL_TREE. */
14732 static tree
14733 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14735 int result, invert;
14737 /* From here on, the only cases we handle are when the result is
14738 known to be a constant. */
14740 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14742 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14743 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14745 /* Handle the cases where either operand is a NaN. */
14746 if (real_isnan (c0) || real_isnan (c1))
14748 switch (code)
14750 case EQ_EXPR:
14751 case ORDERED_EXPR:
14752 result = 0;
14753 break;
14755 case NE_EXPR:
14756 case UNORDERED_EXPR:
14757 case UNLT_EXPR:
14758 case UNLE_EXPR:
14759 case UNGT_EXPR:
14760 case UNGE_EXPR:
14761 case UNEQ_EXPR:
14762 result = 1;
14763 break;
14765 case LT_EXPR:
14766 case LE_EXPR:
14767 case GT_EXPR:
14768 case GE_EXPR:
14769 case LTGT_EXPR:
14770 if (flag_trapping_math)
14771 return NULL_TREE;
14772 result = 0;
14773 break;
14775 default:
14776 gcc_unreachable ();
14779 return constant_boolean_node (result, type);
14782 return constant_boolean_node (real_compare (code, c0, c1), type);
14785 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14787 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14788 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14789 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14792 /* Handle equality/inequality of complex constants. */
14793 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14795 tree rcond = fold_relational_const (code, type,
14796 TREE_REALPART (op0),
14797 TREE_REALPART (op1));
14798 tree icond = fold_relational_const (code, type,
14799 TREE_IMAGPART (op0),
14800 TREE_IMAGPART (op1));
14801 if (code == EQ_EXPR)
14802 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14803 else if (code == NE_EXPR)
14804 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14805 else
14806 return NULL_TREE;
14809 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14811 if (!VECTOR_TYPE_P (type))
14813 /* Have vector comparison with scalar boolean result. */
14814 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14815 && known_eq (VECTOR_CST_NELTS (op0),
14816 VECTOR_CST_NELTS (op1)));
14817 unsigned HOST_WIDE_INT nunits;
14818 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14819 return NULL_TREE;
14820 for (unsigned i = 0; i < nunits; i++)
14822 tree elem0 = VECTOR_CST_ELT (op0, i);
14823 tree elem1 = VECTOR_CST_ELT (op1, i);
14824 tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14825 if (tmp == NULL_TREE)
14826 return NULL_TREE;
14827 if (integer_zerop (tmp))
14828 return constant_boolean_node (code == NE_EXPR, type);
14830 return constant_boolean_node (code == EQ_EXPR, type);
14832 tree_vector_builder elts;
14833 if (!elts.new_binary_operation (type, op0, op1, false))
14834 return NULL_TREE;
14835 unsigned int count = elts.encoded_nelts ();
14836 for (unsigned i = 0; i < count; i++)
14838 tree elem_type = TREE_TYPE (type);
14839 tree elem0 = VECTOR_CST_ELT (op0, i);
14840 tree elem1 = VECTOR_CST_ELT (op1, i);
14842 tree tem = fold_relational_const (code, elem_type,
14843 elem0, elem1);
14845 if (tem == NULL_TREE)
14846 return NULL_TREE;
14848 elts.quick_push (build_int_cst (elem_type,
14849 integer_zerop (tem) ? 0 : -1));
14852 return elts.build ();
14855 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14857 To compute GT, swap the arguments and do LT.
14858 To compute GE, do LT and invert the result.
14859 To compute LE, swap the arguments, do LT and invert the result.
14860 To compute NE, do EQ and invert the result.
14862 Therefore, the code below must handle only EQ and LT. */
14864 if (code == LE_EXPR || code == GT_EXPR)
14866 std::swap (op0, op1);
14867 code = swap_tree_comparison (code);
14870 /* Note that it is safe to invert for real values here because we
14871 have already handled the one case that it matters. */
14873 invert = 0;
14874 if (code == NE_EXPR || code == GE_EXPR)
14876 invert = 1;
14877 code = invert_tree_comparison (code, false);
14880 /* Compute a result for LT or EQ if args permit;
14881 Otherwise return T. */
14882 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14884 if (code == EQ_EXPR)
14885 result = tree_int_cst_equal (op0, op1);
14886 else
14887 result = tree_int_cst_lt (op0, op1);
14889 else
14890 return NULL_TREE;
14892 if (invert)
14893 result ^= 1;
14894 return constant_boolean_node (result, type);
14897 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14898 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14899 itself. */
14901 tree
14902 fold_build_cleanup_point_expr (tree type, tree expr)
14904 /* If the expression does not have side effects then we don't have to wrap
14905 it with a cleanup point expression. */
14906 if (!TREE_SIDE_EFFECTS (expr))
14907 return expr;
14909 /* If the expression is a return, check to see if the expression inside the
14910 return has no side effects or the right hand side of the modify expression
14911 inside the return. If either don't have side effects set we don't need to
14912 wrap the expression in a cleanup point expression. Note we don't check the
14913 left hand side of the modify because it should always be a return decl. */
14914 if (TREE_CODE (expr) == RETURN_EXPR)
14916 tree op = TREE_OPERAND (expr, 0);
14917 if (!op || !TREE_SIDE_EFFECTS (op))
14918 return expr;
14919 op = TREE_OPERAND (op, 1);
14920 if (!TREE_SIDE_EFFECTS (op))
14921 return expr;
14924 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14927 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14928 of an indirection through OP0, or NULL_TREE if no simplification is
14929 possible. */
14931 tree
14932 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14934 tree sub = op0;
14935 tree subtype;
14936 poly_uint64 const_op01;
14938 STRIP_NOPS (sub);
14939 subtype = TREE_TYPE (sub);
14940 if (!POINTER_TYPE_P (subtype)
14941 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14942 return NULL_TREE;
14944 if (TREE_CODE (sub) == ADDR_EXPR)
14946 tree op = TREE_OPERAND (sub, 0);
14947 tree optype = TREE_TYPE (op);
14949 /* *&CONST_DECL -> to the value of the const decl. */
14950 if (TREE_CODE (op) == CONST_DECL)
14951 return DECL_INITIAL (op);
14952 /* *&p => p; make sure to handle *&"str"[cst] here. */
14953 if (type == optype)
14955 tree fop = fold_read_from_constant_string (op);
14956 if (fop)
14957 return fop;
14958 else
14959 return op;
14961 /* *(foo *)&fooarray => fooarray[0] */
14962 else if (TREE_CODE (optype) == ARRAY_TYPE
14963 && type == TREE_TYPE (optype)
14964 && (!in_gimple_form
14965 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14967 tree type_domain = TYPE_DOMAIN (optype);
14968 tree min_val = size_zero_node;
14969 if (type_domain && TYPE_MIN_VALUE (type_domain))
14970 min_val = TYPE_MIN_VALUE (type_domain);
14971 if (in_gimple_form
14972 && TREE_CODE (min_val) != INTEGER_CST)
14973 return NULL_TREE;
14974 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14975 NULL_TREE, NULL_TREE);
14977 /* *(foo *)&complexfoo => __real__ complexfoo */
14978 else if (TREE_CODE (optype) == COMPLEX_TYPE
14979 && type == TREE_TYPE (optype))
14980 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14981 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14982 else if (VECTOR_TYPE_P (optype)
14983 && type == TREE_TYPE (optype))
14985 tree part_width = TYPE_SIZE (type);
14986 tree index = bitsize_int (0);
14987 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14988 index);
14992 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14993 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14995 tree op00 = TREE_OPERAND (sub, 0);
14996 tree op01 = TREE_OPERAND (sub, 1);
14998 STRIP_NOPS (op00);
14999 if (TREE_CODE (op00) == ADDR_EXPR)
15001 tree op00type;
15002 op00 = TREE_OPERAND (op00, 0);
15003 op00type = TREE_TYPE (op00);
15005 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15006 if (VECTOR_TYPE_P (op00type)
15007 && type == TREE_TYPE (op00type)
15008 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15009 but we want to treat offsets with MSB set as negative.
15010 For the code below negative offsets are invalid and
15011 TYPE_SIZE of the element is something unsigned, so
15012 check whether op01 fits into poly_int64, which implies
15013 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15014 then just use poly_uint64 because we want to treat the
15015 value as unsigned. */
15016 && tree_fits_poly_int64_p (op01))
15018 tree part_width = TYPE_SIZE (type);
15019 poly_uint64 max_offset
15020 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
15021 * TYPE_VECTOR_SUBPARTS (op00type));
15022 if (known_lt (const_op01, max_offset))
15024 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15025 return fold_build3_loc (loc,
15026 BIT_FIELD_REF, type, op00,
15027 part_width, index);
15030 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15031 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15032 && type == TREE_TYPE (op00type))
15034 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15035 const_op01))
15036 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15038 /* ((foo *)&fooarray)[1] => fooarray[1] */
15039 else if (TREE_CODE (op00type) == ARRAY_TYPE
15040 && type == TREE_TYPE (op00type))
15042 tree type_domain = TYPE_DOMAIN (op00type);
15043 tree min_val = size_zero_node;
15044 if (type_domain && TYPE_MIN_VALUE (type_domain))
15045 min_val = TYPE_MIN_VALUE (type_domain);
15046 poly_uint64 type_size, index;
15047 if (poly_int_tree_p (min_val)
15048 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15049 && multiple_p (const_op01, type_size, &index))
15051 poly_offset_int off = index + wi::to_poly_offset (min_val);
15052 op01 = wide_int_to_tree (sizetype, off);
15053 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15054 NULL_TREE, NULL_TREE);
15060 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15061 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15062 && type == TREE_TYPE (TREE_TYPE (subtype))
15063 && (!in_gimple_form
15064 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15066 tree type_domain;
15067 tree min_val = size_zero_node;
15068 sub = build_fold_indirect_ref_loc (loc, sub);
15069 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15070 if (type_domain && TYPE_MIN_VALUE (type_domain))
15071 min_val = TYPE_MIN_VALUE (type_domain);
15072 if (in_gimple_form
15073 && TREE_CODE (min_val) != INTEGER_CST)
15074 return NULL_TREE;
15075 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15076 NULL_TREE);
15079 return NULL_TREE;
15082 /* Builds an expression for an indirection through T, simplifying some
15083 cases. */
15085 tree
15086 build_fold_indirect_ref_loc (location_t loc, tree t)
15088 tree type = TREE_TYPE (TREE_TYPE (t));
15089 tree sub = fold_indirect_ref_1 (loc, type, t);
15091 if (sub)
15092 return sub;
15094 return build1_loc (loc, INDIRECT_REF, type, t);
15097 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15099 tree
15100 fold_indirect_ref_loc (location_t loc, tree t)
15102 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15104 if (sub)
15105 return sub;
15106 else
15107 return t;
15110 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15111 whose result is ignored. The type of the returned tree need not be
15112 the same as the original expression. */
15114 tree
15115 fold_ignored_result (tree t)
15117 if (!TREE_SIDE_EFFECTS (t))
15118 return integer_zero_node;
15120 for (;;)
15121 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15123 case tcc_unary:
15124 t = TREE_OPERAND (t, 0);
15125 break;
15127 case tcc_binary:
15128 case tcc_comparison:
15129 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15130 t = TREE_OPERAND (t, 0);
15131 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15132 t = TREE_OPERAND (t, 1);
15133 else
15134 return t;
15135 break;
15137 case tcc_expression:
15138 switch (TREE_CODE (t))
15140 case COMPOUND_EXPR:
15141 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15142 return t;
15143 t = TREE_OPERAND (t, 0);
15144 break;
15146 case COND_EXPR:
15147 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15148 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15149 return t;
15150 t = TREE_OPERAND (t, 0);
15151 break;
15153 default:
15154 return t;
15156 break;
15158 default:
15159 return t;
15163 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15165 tree
15166 round_up_loc (location_t loc, tree value, unsigned int divisor)
15168 tree div = NULL_TREE;
15170 if (divisor == 1)
15171 return value;
15173 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15174 have to do anything. Only do this when we are not given a const,
15175 because in that case, this check is more expensive than just
15176 doing it. */
15177 if (TREE_CODE (value) != INTEGER_CST)
15179 div = build_int_cst (TREE_TYPE (value), divisor);
15181 if (multiple_of_p (TREE_TYPE (value), value, div))
15182 return value;
15185 /* If divisor is a power of two, simplify this to bit manipulation. */
15186 if (pow2_or_zerop (divisor))
15188 if (TREE_CODE (value) == INTEGER_CST)
15190 wide_int val = wi::to_wide (value);
15191 bool overflow_p;
15193 if ((val & (divisor - 1)) == 0)
15194 return value;
15196 overflow_p = TREE_OVERFLOW (value);
15197 val += divisor - 1;
15198 val &= (int) -divisor;
15199 if (val == 0)
15200 overflow_p = true;
15202 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15204 else
15206 tree t;
15208 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15209 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15210 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15211 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15214 else
15216 if (!div)
15217 div = build_int_cst (TREE_TYPE (value), divisor);
15218 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15219 value = size_binop_loc (loc, MULT_EXPR, value, div);
15222 return value;
15225 /* Likewise, but round down. */
15227 tree
15228 round_down_loc (location_t loc, tree value, int divisor)
15230 tree div = NULL_TREE;
15232 gcc_assert (divisor > 0);
15233 if (divisor == 1)
15234 return value;
15236 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15237 have to do anything. Only do this when we are not given a const,
15238 because in that case, this check is more expensive than just
15239 doing it. */
15240 if (TREE_CODE (value) != INTEGER_CST)
15242 div = build_int_cst (TREE_TYPE (value), divisor);
15244 if (multiple_of_p (TREE_TYPE (value), value, div))
15245 return value;
15248 /* If divisor is a power of two, simplify this to bit manipulation. */
15249 if (pow2_or_zerop (divisor))
15251 tree t;
15253 t = build_int_cst (TREE_TYPE (value), -divisor);
15254 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15256 else
15258 if (!div)
15259 div = build_int_cst (TREE_TYPE (value), divisor);
15260 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15261 value = size_binop_loc (loc, MULT_EXPR, value, div);
15264 return value;
15267 /* Returns the pointer to the base of the object addressed by EXP and
15268 extracts the information about the offset of the access, storing it
15269 to PBITPOS and POFFSET. */
15271 static tree
15272 split_address_to_core_and_offset (tree exp,
15273 poly_int64_pod *pbitpos, tree *poffset)
15275 tree core;
15276 machine_mode mode;
15277 int unsignedp, reversep, volatilep;
15278 poly_int64 bitsize;
15279 location_t loc = EXPR_LOCATION (exp);
15281 if (TREE_CODE (exp) == ADDR_EXPR)
15283 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15284 poffset, &mode, &unsignedp, &reversep,
15285 &volatilep);
15286 core = build_fold_addr_expr_loc (loc, core);
15288 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15290 core = TREE_OPERAND (exp, 0);
15291 STRIP_NOPS (core);
15292 *pbitpos = 0;
15293 *poffset = TREE_OPERAND (exp, 1);
15294 if (poly_int_tree_p (*poffset))
15296 poly_offset_int tem
15297 = wi::sext (wi::to_poly_offset (*poffset),
15298 TYPE_PRECISION (TREE_TYPE (*poffset)));
15299 tem <<= LOG2_BITS_PER_UNIT;
15300 if (tem.to_shwi (pbitpos))
15301 *poffset = NULL_TREE;
15304 else
15306 core = exp;
15307 *pbitpos = 0;
15308 *poffset = NULL_TREE;
15311 return core;
15314 /* Returns true if addresses of E1 and E2 differ by a constant, false
15315 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15317 bool
15318 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15320 tree core1, core2;
15321 poly_int64 bitpos1, bitpos2;
15322 tree toffset1, toffset2, tdiff, type;
15324 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15325 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15327 poly_int64 bytepos1, bytepos2;
15328 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15329 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15330 || !operand_equal_p (core1, core2, 0))
15331 return false;
15333 if (toffset1 && toffset2)
15335 type = TREE_TYPE (toffset1);
15336 if (type != TREE_TYPE (toffset2))
15337 toffset2 = fold_convert (type, toffset2);
15339 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15340 if (!cst_and_fits_in_hwi (tdiff))
15341 return false;
15343 *diff = int_cst_value (tdiff);
15345 else if (toffset1 || toffset2)
15347 /* If only one of the offsets is non-constant, the difference cannot
15348 be a constant. */
15349 return false;
15351 else
15352 *diff = 0;
15354 *diff += bytepos1 - bytepos2;
15355 return true;
15358 /* Return OFF converted to a pointer offset type suitable as offset for
15359 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15360 tree
15361 convert_to_ptrofftype_loc (location_t loc, tree off)
15363 return fold_convert_loc (loc, sizetype, off);
15366 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15367 tree
15368 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15370 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15371 ptr, convert_to_ptrofftype_loc (loc, off));
15374 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15375 tree
15376 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15378 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15379 ptr, size_int (off));
15382 /* Return a pointer P to a NUL-terminated string representing the sequence
15383 of constant characters referred to by SRC (or a subsequence of such
15384 characters within it if SRC is a reference to a string plus some
15385 constant offset). If STRLEN is non-null, store the number of bytes
15386 in the string constant including the terminating NUL char. *STRLEN is
15387 typically strlen(P) + 1 in the absence of embedded NUL characters. */
15389 const char *
15390 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
15392 tree offset_node;
15393 tree mem_size;
15395 if (strlen)
15396 *strlen = 0;
15398 src = string_constant (src, &offset_node, &mem_size, NULL);
15399 if (src == 0)
15400 return NULL;
15402 unsigned HOST_WIDE_INT offset = 0;
15403 if (offset_node != NULL_TREE)
15405 if (!tree_fits_uhwi_p (offset_node))
15406 return NULL;
15407 else
15408 offset = tree_to_uhwi (offset_node);
15411 if (!tree_fits_uhwi_p (mem_size))
15412 return NULL;
15414 /* STRING_LENGTH is the size of the string literal, including any
15415 embedded NULs. STRING_SIZE is the size of the array the string
15416 literal is stored in. */
15417 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
15418 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
15420 /* Ideally this would turn into a gcc_checking_assert over time. */
15421 if (string_length > string_size)
15422 string_length = string_size;
15424 const char *string = TREE_STRING_POINTER (src);
15426 /* Ideally this would turn into a gcc_checking_assert over time. */
15427 if (string_length > string_size)
15428 string_length = string_size;
15430 if (string_length == 0
15431 || offset >= string_size)
15432 return NULL;
15434 if (strlen)
15436 /* Compute and store the length of the substring at OFFSET.
15437 All offsets past the initial length refer to null strings. */
15438 if (offset < string_length)
15439 *strlen = string_length - offset;
15440 else
15441 *strlen = 1;
15443 else
15445 tree eltype = TREE_TYPE (TREE_TYPE (src));
15446 /* Support only properly NUL-terminated single byte strings. */
15447 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15448 return NULL;
15449 if (string[string_length - 1] != '\0')
15450 return NULL;
15453 return offset < string_length ? string + offset : "";
15456 /* Given a tree T, compute which bits in T may be nonzero. */
15458 wide_int
15459 tree_nonzero_bits (const_tree t)
15461 switch (TREE_CODE (t))
15463 case INTEGER_CST:
15464 return wi::to_wide (t);
15465 case SSA_NAME:
15466 return get_nonzero_bits (t);
15467 case NON_LVALUE_EXPR:
15468 case SAVE_EXPR:
15469 return tree_nonzero_bits (TREE_OPERAND (t, 0));
15470 case BIT_AND_EXPR:
15471 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15472 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15473 case BIT_IOR_EXPR:
15474 case BIT_XOR_EXPR:
15475 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15476 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15477 case COND_EXPR:
15478 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15479 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15480 CASE_CONVERT:
15481 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15482 TYPE_PRECISION (TREE_TYPE (t)),
15483 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15484 case PLUS_EXPR:
15485 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15487 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15488 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15489 if (wi::bit_and (nzbits1, nzbits2) == 0)
15490 return wi::bit_or (nzbits1, nzbits2);
15492 break;
15493 case LSHIFT_EXPR:
15494 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15496 tree type = TREE_TYPE (t);
15497 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15498 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15499 TYPE_PRECISION (type));
15500 return wi::neg_p (arg1)
15501 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15502 : wi::lshift (nzbits, arg1);
15504 break;
15505 case RSHIFT_EXPR:
15506 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15508 tree type = TREE_TYPE (t);
15509 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15510 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15511 TYPE_PRECISION (type));
15512 return wi::neg_p (arg1)
15513 ? wi::lshift (nzbits, -arg1)
15514 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15516 break;
15517 default:
15518 break;
15521 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15524 #if CHECKING_P
15526 namespace selftest {
15528 /* Helper functions for writing tests of folding trees. */
15530 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
15532 static void
15533 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15534 tree constant)
15536 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15539 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15540 wrapping WRAPPED_EXPR. */
15542 static void
15543 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15544 tree wrapped_expr)
15546 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15547 ASSERT_NE (wrapped_expr, result);
15548 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15549 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15552 /* Verify that various arithmetic binary operations are folded
15553 correctly. */
15555 static void
15556 test_arithmetic_folding ()
15558 tree type = integer_type_node;
15559 tree x = create_tmp_var_raw (type, "x");
15560 tree zero = build_zero_cst (type);
15561 tree one = build_int_cst (type, 1);
15563 /* Addition. */
15564 /* 1 <-- (0 + 1) */
15565 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15566 one);
15567 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15568 one);
15570 /* (nonlvalue)x <-- (x + 0) */
15571 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15574 /* Subtraction. */
15575 /* 0 <-- (x - x) */
15576 assert_binop_folds_to_const (x, MINUS_EXPR, x,
15577 zero);
15578 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15581 /* Multiplication. */
15582 /* 0 <-- (x * 0) */
15583 assert_binop_folds_to_const (x, MULT_EXPR, zero,
15584 zero);
15586 /* (nonlvalue)x <-- (x * 1) */
15587 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15591 /* Verify that various binary operations on vectors are folded
15592 correctly. */
15594 static void
15595 test_vector_folding ()
15597 tree inner_type = integer_type_node;
15598 tree type = build_vector_type (inner_type, 4);
15599 tree zero = build_zero_cst (type);
15600 tree one = build_one_cst (type);
15601 tree index = build_index_vector (type, 0, 1);
15603 /* Verify equality tests that return a scalar boolean result. */
15604 tree res_type = boolean_type_node;
15605 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15606 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15607 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15608 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15609 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15610 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15611 index, one)));
15612 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15613 index, index)));
15614 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15615 index, index)));
15618 /* Verify folding of VEC_DUPLICATE_EXPRs. */
15620 static void
15621 test_vec_duplicate_folding ()
15623 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15624 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15625 /* This will be 1 if VEC_MODE isn't a vector mode. */
15626 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15628 tree type = build_vector_type (ssizetype, nunits);
15629 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15630 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15631 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15634 /* Run all of the selftests within this file. */
15636 void
15637 fold_const_c_tests ()
15639 test_arithmetic_folding ();
15640 test_vector_folding ();
15641 test_vec_duplicate_folding ();
15644 } // namespace selftest
15646 #endif /* CHECKING_P */