Implement SLP of internal functions
[official-gcc.git] / gcc / fold-const.c
blob1197d75ec75924d5aa17b7073aa17f6956103831
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 tree, tree, tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
147 static location_t
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc == UNKNOWN_LOCATION ? loc : tloc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
166 x = copy_node (x);
167 SET_EXPR_LOCATION (x, loc);
169 return x;
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173 division and returns the quotient. Otherwise returns
174 NULL_TREE. */
176 tree
177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
179 widest_int quo;
181 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 SIGNED, &quo))
183 return wide_int_to_tree (TREE_TYPE (arg1), quo);
185 return NULL_TREE;
188 /* This is nonzero if we should defer warnings about undefined
189 overflow. This facility exists because these warnings are a
190 special case. The code to estimate loop iterations does not want
191 to issue any warnings, since it works with expressions which do not
192 occur in user code. Various bits of cleanup code call fold(), but
193 only use the result if it has certain characteristics (e.g., is a
194 constant); that code only wants to issue a warning if the result is
195 used. */
197 static int fold_deferring_overflow_warnings;
199 /* If a warning about undefined overflow is deferred, this is the
200 warning. Note that this may cause us to turn two warnings into
201 one, but that is fine since it is sufficient to only give one
202 warning per expression. */
204 static const char* fold_deferred_overflow_warning;
206 /* If a warning about undefined overflow is deferred, this is the
207 level at which the warning should be emitted. */
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
211 /* Start deferring overflow warnings. We could use a stack here to
212 permit nested calls, but at present it is not necessary. */
214 void
215 fold_defer_overflow_warnings (void)
217 ++fold_deferring_overflow_warnings;
220 /* Stop deferring overflow warnings. If there is a pending warning,
221 and ISSUE is true, then issue the warning if appropriate. STMT is
222 the statement with which the warning should be associated (used for
223 location information); STMT may be NULL. CODE is the level of the
224 warning--a warn_strict_overflow_code value. This function will use
225 the smaller of CODE and the deferred code when deciding whether to
226 issue the warning. CODE may be zero to mean to always use the
227 deferred code. */
229 void
230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
232 const char *warnmsg;
233 location_t locus;
235 gcc_assert (fold_deferring_overflow_warnings > 0);
236 --fold_deferring_overflow_warnings;
237 if (fold_deferring_overflow_warnings > 0)
239 if (fold_deferred_overflow_warning != NULL
240 && code != 0
241 && code < (int) fold_deferred_overflow_code)
242 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243 return;
246 warnmsg = fold_deferred_overflow_warning;
247 fold_deferred_overflow_warning = NULL;
249 if (!issue || warnmsg == NULL)
250 return;
252 if (gimple_no_warning_p (stmt))
253 return;
255 /* Use the smallest code level when deciding to issue the
256 warning. */
257 if (code == 0 || code > (int) fold_deferred_overflow_code)
258 code = fold_deferred_overflow_code;
260 if (!issue_strict_overflow_warning (code))
261 return;
263 if (stmt == NULL)
264 locus = input_location;
265 else
266 locus = gimple_location (stmt);
267 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
270 /* Stop deferring overflow warnings, ignoring any deferred
271 warnings. */
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
276 fold_undefer_overflow_warnings (false, NULL, 0);
279 /* Whether we are deferring overflow warnings. */
281 bool
282 fold_deferring_overflow_warnings_p (void)
284 return fold_deferring_overflow_warnings > 0;
287 /* This is called when we fold something based on the fact that signed
288 overflow is undefined. */
290 void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
293 if (fold_deferring_overflow_warnings > 0)
295 if (fold_deferred_overflow_warning == NULL
296 || wc < fold_deferred_overflow_code)
298 fold_deferred_overflow_warning = gmsgid;
299 fold_deferred_overflow_code = wc;
302 else if (issue_strict_overflow_warning (wc))
303 warning (OPT_Wstrict_overflow, gmsgid);
306 /* Return true if the built-in mathematical function specified by CODE
307 is odd, i.e. -f(x) == f(-x). */
309 bool
310 negate_mathfn_p (combined_fn fn)
312 switch (fn)
314 CASE_CFN_ASIN:
315 CASE_CFN_ASINH:
316 CASE_CFN_ATAN:
317 CASE_CFN_ATANH:
318 CASE_CFN_CASIN:
319 CASE_CFN_CASINH:
320 CASE_CFN_CATAN:
321 CASE_CFN_CATANH:
322 CASE_CFN_CBRT:
323 CASE_CFN_CPROJ:
324 CASE_CFN_CSIN:
325 CASE_CFN_CSINH:
326 CASE_CFN_CTAN:
327 CASE_CFN_CTANH:
328 CASE_CFN_ERF:
329 CASE_CFN_LLROUND:
330 CASE_CFN_LROUND:
331 CASE_CFN_ROUND:
332 CASE_CFN_SIN:
333 CASE_CFN_SINH:
334 CASE_CFN_TAN:
335 CASE_CFN_TANH:
336 CASE_CFN_TRUNC:
337 return true;
339 CASE_CFN_LLRINT:
340 CASE_CFN_LRINT:
341 CASE_CFN_NEARBYINT:
342 CASE_CFN_RINT:
343 return !flag_rounding_math;
345 default:
346 break;
348 return false;
351 /* Check whether we may negate an integer constant T without causing
352 overflow. */
354 bool
355 may_negate_without_overflow_p (const_tree t)
357 tree type;
359 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361 type = TREE_TYPE (t);
362 if (TYPE_UNSIGNED (type))
363 return false;
365 return !wi::only_sign_bit_p (wi::to_wide (t));
368 /* Determine whether an expression T can be cheaply negated using
369 the function negate_expr without introducing undefined overflow. */
371 static bool
372 negate_expr_p (tree t)
374 tree type;
376 if (t == 0)
377 return false;
379 type = TREE_TYPE (t);
381 STRIP_SIGN_NOPS (t);
382 switch (TREE_CODE (t))
384 case INTEGER_CST:
385 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
386 return true;
388 /* Check that -CST will not overflow type. */
389 return may_negate_without_overflow_p (t);
390 case BIT_NOT_EXPR:
391 return (INTEGRAL_TYPE_P (type)
392 && TYPE_OVERFLOW_WRAPS (type));
394 case FIXED_CST:
395 return true;
397 case NEGATE_EXPR:
398 return !TYPE_OVERFLOW_SANITIZED (type);
400 case REAL_CST:
401 /* We want to canonicalize to positive real constants. Pretend
402 that only negative ones can be easily negated. */
403 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405 case COMPLEX_CST:
406 return negate_expr_p (TREE_REALPART (t))
407 && negate_expr_p (TREE_IMAGPART (t));
409 case VECTOR_CST:
411 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
412 return true;
414 /* Steps don't prevent negation. */
415 unsigned int count = vector_cst_encoded_nelts (t);
416 for (unsigned int i = 0; i < count; ++i)
417 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
418 return false;
420 return true;
423 case COMPLEX_EXPR:
424 return negate_expr_p (TREE_OPERAND (t, 0))
425 && negate_expr_p (TREE_OPERAND (t, 1));
427 case CONJ_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0));
430 case PLUS_EXPR:
431 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
432 || HONOR_SIGNED_ZEROS (element_mode (type))
433 || (ANY_INTEGRAL_TYPE_P (type)
434 && ! TYPE_OVERFLOW_WRAPS (type)))
435 return false;
436 /* -(A + B) -> (-B) - A. */
437 if (negate_expr_p (TREE_OPERAND (t, 1)))
438 return true;
439 /* -(A + B) -> (-A) - B. */
440 return negate_expr_p (TREE_OPERAND (t, 0));
442 case MINUS_EXPR:
443 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
444 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 && !HONOR_SIGNED_ZEROS (element_mode (type))
446 && (! ANY_INTEGRAL_TYPE_P (type)
447 || TYPE_OVERFLOW_WRAPS (type));
449 case MULT_EXPR:
450 if (TYPE_UNSIGNED (type))
451 break;
452 /* INT_MIN/n * n doesn't overflow while negating one operand it does
453 if n is a (negative) power of two. */
454 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
455 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
456 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
457 && (wi::popcount
458 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
459 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 && (wi::popcount
461 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
462 break;
464 /* Fall through. */
466 case RDIV_EXPR:
467 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
468 return negate_expr_p (TREE_OPERAND (t, 1))
469 || negate_expr_p (TREE_OPERAND (t, 0));
470 break;
472 case TRUNC_DIV_EXPR:
473 case ROUND_DIV_EXPR:
474 case EXACT_DIV_EXPR:
475 if (TYPE_UNSIGNED (type))
476 break;
477 /* In general we can't negate A in A / B, because if A is INT_MIN and
478 B is not 1 we change the sign of the result. */
479 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
480 && negate_expr_p (TREE_OPERAND (t, 0)))
481 return true;
482 /* In general we can't negate B in A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. */
485 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
486 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
487 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
488 && ! integer_onep (TREE_OPERAND (t, 1))))
489 return negate_expr_p (TREE_OPERAND (t, 1));
490 break;
492 case NOP_EXPR:
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type) == REAL_TYPE)
496 tree tem = strip_float_extensions (t);
497 if (tem != t)
498 return negate_expr_p (tem);
500 break;
502 case CALL_EXPR:
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (get_call_combined_fn (t)))
505 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 break;
508 case RSHIFT_EXPR:
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
510 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 tree op1 = TREE_OPERAND (t, 1);
513 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
514 return true;
516 break;
518 default:
519 break;
521 return false;
524 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
525 simplification is possible.
526 If negate_expr_p would return true for T, NULL_TREE will never be
527 returned. */
529 static tree
530 fold_negate_expr_1 (location_t loc, tree t)
532 tree type = TREE_TYPE (t);
533 tree tem;
535 switch (TREE_CODE (t))
537 /* Convert - (~A) to A + 1. */
538 case BIT_NOT_EXPR:
539 if (INTEGRAL_TYPE_P (type))
540 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
541 build_one_cst (type));
542 break;
544 case INTEGER_CST:
545 tem = fold_negate_const (t, type);
546 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
547 || (ANY_INTEGRAL_TYPE_P (type)
548 && !TYPE_OVERFLOW_TRAPS (type)
549 && TYPE_OVERFLOW_WRAPS (type))
550 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
551 return tem;
552 break;
554 case POLY_INT_CST:
555 case REAL_CST:
556 case FIXED_CST:
557 tem = fold_negate_const (t, type);
558 return tem;
560 case COMPLEX_CST:
562 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
563 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
564 if (rpart && ipart)
565 return build_complex (type, rpart, ipart);
567 break;
569 case VECTOR_CST:
571 tree_vector_builder elts;
572 elts.new_unary_operation (type, t, true);
573 unsigned int count = elts.encoded_nelts ();
574 for (unsigned int i = 0; i < count; ++i)
576 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
577 if (elt == NULL_TREE)
578 return NULL_TREE;
579 elts.quick_push (elt);
582 return elts.build ();
585 case COMPLEX_EXPR:
586 if (negate_expr_p (t))
587 return fold_build2_loc (loc, COMPLEX_EXPR, type,
588 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
589 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
590 break;
592 case CONJ_EXPR:
593 if (negate_expr_p (t))
594 return fold_build1_loc (loc, CONJ_EXPR, type,
595 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
596 break;
598 case NEGATE_EXPR:
599 if (!TYPE_OVERFLOW_SANITIZED (type))
600 return TREE_OPERAND (t, 0);
601 break;
603 case PLUS_EXPR:
604 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
605 && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 /* -(A + B) -> (-B) - A. */
608 if (negate_expr_p (TREE_OPERAND (t, 1)))
610 tem = negate_expr (TREE_OPERAND (t, 1));
611 return fold_build2_loc (loc, MINUS_EXPR, type,
612 tem, TREE_OPERAND (t, 0));
615 /* -(A + B) -> (-A) - B. */
616 if (negate_expr_p (TREE_OPERAND (t, 0)))
618 tem = negate_expr (TREE_OPERAND (t, 0));
619 return fold_build2_loc (loc, MINUS_EXPR, type,
620 tem, TREE_OPERAND (t, 1));
623 break;
625 case MINUS_EXPR:
626 /* - (A - B) -> B - A */
627 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
628 && !HONOR_SIGNED_ZEROS (element_mode (type)))
629 return fold_build2_loc (loc, MINUS_EXPR, type,
630 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
631 break;
633 case MULT_EXPR:
634 if (TYPE_UNSIGNED (type))
635 break;
637 /* Fall through. */
639 case RDIV_EXPR:
640 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 tem = TREE_OPERAND (t, 1);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 TREE_OPERAND (t, 0), negate_expr (tem));
646 tem = TREE_OPERAND (t, 0);
647 if (negate_expr_p (tem))
648 return fold_build2_loc (loc, TREE_CODE (t), type,
649 negate_expr (tem), TREE_OPERAND (t, 1));
651 break;
653 case TRUNC_DIV_EXPR:
654 case ROUND_DIV_EXPR:
655 case EXACT_DIV_EXPR:
656 if (TYPE_UNSIGNED (type))
657 break;
658 /* In general we can't negate A in A / B, because if A is INT_MIN and
659 B is not 1 we change the sign of the result. */
660 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
661 && negate_expr_p (TREE_OPERAND (t, 0)))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 negate_expr (TREE_OPERAND (t, 0)),
664 TREE_OPERAND (t, 1));
665 /* In general we can't negate B in A / B, because if A is INT_MIN and
666 B is 1, we may turn this into INT_MIN / -1 which is undefined
667 and actually traps on some architectures. */
668 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
669 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
670 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
671 && ! integer_onep (TREE_OPERAND (t, 1))))
672 && negate_expr_p (TREE_OPERAND (t, 1)))
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0),
675 negate_expr (TREE_OPERAND (t, 1)));
676 break;
678 case NOP_EXPR:
679 /* Convert -((double)float) into (double)(-float). */
680 if (TREE_CODE (type) == REAL_TYPE)
682 tem = strip_float_extensions (t);
683 if (tem != t && negate_expr_p (tem))
684 return fold_convert_loc (loc, type, negate_expr (tem));
686 break;
688 case CALL_EXPR:
689 /* Negate -f(x) as f(-x). */
690 if (negate_mathfn_p (get_call_combined_fn (t))
691 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 tree fndecl, arg;
695 fndecl = get_callee_fndecl (t);
696 arg = negate_expr (CALL_EXPR_ARG (t, 0));
697 return build_call_expr_loc (loc, fndecl, 1, arg);
699 break;
701 case RSHIFT_EXPR:
702 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
703 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 tree op1 = TREE_OPERAND (t, 1);
706 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
708 tree ntype = TYPE_UNSIGNED (type)
709 ? signed_type_for (type)
710 : unsigned_type_for (type);
711 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
712 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
713 return fold_convert_loc (loc, type, temp);
716 break;
718 default:
719 break;
722 return NULL_TREE;
725 /* A wrapper for fold_negate_expr_1. */
727 static tree
728 fold_negate_expr (location_t loc, tree t)
730 tree type = TREE_TYPE (t);
731 STRIP_SIGN_NOPS (t);
732 tree tem = fold_negate_expr_1 (loc, t);
733 if (tem == NULL_TREE)
734 return NULL_TREE;
735 return fold_convert_loc (loc, type, tem);
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
740 return NULL_TREE. */
742 static tree
743 negate_expr (tree t)
745 tree type, tem;
746 location_t loc;
748 if (t == NULL_TREE)
749 return NULL_TREE;
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
753 STRIP_SIGN_NOPS (t);
755 tem = fold_negate_expr (loc, t);
756 if (!tem)
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead. If a variable part is of pointer
775 type, it is negated after converting to TYPE. This prevents us from
776 generating illegal MINUS pointer expression. LOC is the location of
777 the converted variable part.
779 If IN is itself a literal or constant, return it as appropriate.
781 Note that we do not guarantee that any of the three values will be the
782 same type as IN, but they will have the same signedness and mode. */
784 static tree
785 split_tree (tree in, tree type, enum tree_code code,
786 tree *minus_varp, tree *conp, tree *minus_conp,
787 tree *litp, tree *minus_litp, int negate_p)
789 tree var = 0;
790 *minus_varp = 0;
791 *conp = 0;
792 *minus_conp = 0;
793 *litp = 0;
794 *minus_litp = 0;
796 /* Strip any conversions that don't change the machine mode or signedness. */
797 STRIP_SIGN_NOPS (in);
799 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
800 || TREE_CODE (in) == FIXED_CST)
801 *litp = in;
802 else if (TREE_CODE (in) == code
803 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
804 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
805 /* We can associate addition and subtraction together (even
806 though the C standard doesn't say so) for integers because
807 the value is not affected. For reals, the value might be
808 affected, so we can't. */
809 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
810 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
811 || (code == MINUS_EXPR
812 && (TREE_CODE (in) == PLUS_EXPR
813 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815 tree op0 = TREE_OPERAND (in, 0);
816 tree op1 = TREE_OPERAND (in, 1);
817 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
818 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820 /* First see if either of the operands is a literal, then a constant. */
821 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
822 || TREE_CODE (op0) == FIXED_CST)
823 *litp = op0, op0 = 0;
824 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
825 || TREE_CODE (op1) == FIXED_CST)
826 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
828 if (op0 != 0 && TREE_CONSTANT (op0))
829 *conp = op0, op0 = 0;
830 else if (op1 != 0 && TREE_CONSTANT (op1))
831 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
833 /* If we haven't dealt with either operand, this is not a case we can
834 decompose. Otherwise, VAR is either of the ones remaining, if any. */
835 if (op0 != 0 && op1 != 0)
836 var = in;
837 else if (op0 != 0)
838 var = op0;
839 else
840 var = op1, neg_var_p = neg1_p;
842 /* Now do any needed negations. */
843 if (neg_litp_p)
844 *minus_litp = *litp, *litp = 0;
845 if (neg_conp_p && *conp)
846 *minus_conp = *conp, *conp = 0;
847 if (neg_var_p && var)
848 *minus_varp = var, var = 0;
850 else if (TREE_CONSTANT (in))
851 *conp = in;
852 else if (TREE_CODE (in) == BIT_NOT_EXPR
853 && code == PLUS_EXPR)
855 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
856 when IN is constant. */
857 *litp = build_minus_one_cst (type);
858 *minus_varp = TREE_OPERAND (in, 0);
860 else
861 var = in;
863 if (negate_p)
865 if (*litp)
866 *minus_litp = *litp, *litp = 0;
867 else if (*minus_litp)
868 *litp = *minus_litp, *minus_litp = 0;
869 if (*conp)
870 *minus_conp = *conp, *conp = 0;
871 else if (*minus_conp)
872 *conp = *minus_conp, *minus_conp = 0;
873 if (var)
874 *minus_varp = var, var = 0;
875 else if (*minus_varp)
876 var = *minus_varp, *minus_varp = 0;
879 if (*litp
880 && TREE_OVERFLOW_P (*litp))
881 *litp = drop_tree_overflow (*litp);
882 if (*minus_litp
883 && TREE_OVERFLOW_P (*minus_litp))
884 *minus_litp = drop_tree_overflow (*minus_litp);
886 return var;
889 /* Re-associate trees split by the above function. T1 and T2 are
890 either expressions to associate or null. Return the new
891 expression, if any. LOC is the location of the new expression. If
892 we build an operation, do it in TYPE and with CODE. */
894 static tree
895 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 if (t1 == 0)
899 gcc_assert (t2 == 0 || code != MINUS_EXPR);
900 return t2;
902 else if (t2 == 0)
903 return t1;
905 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
906 try to fold this since we will have infinite recursion. But do
907 deal with any NEGATE_EXPRs. */
908 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
909 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
910 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 if (code == PLUS_EXPR)
914 if (TREE_CODE (t1) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t2),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t1, 0)));
919 else if (TREE_CODE (t2) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t1),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t2, 0)));
924 else if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
927 else if (code == MINUS_EXPR)
929 if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
933 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type, t2));
937 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942 for use in int_const_binop, size_binop and size_diffop. */
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948 return false;
949 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950 return false;
952 switch (code)
954 case LSHIFT_EXPR:
955 case RSHIFT_EXPR:
956 case LROTATE_EXPR:
957 case RROTATE_EXPR:
958 return true;
960 default:
961 break;
964 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 && TYPE_MODE (type1) == TYPE_MODE (type2);
969 /* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs. */
971 static tree
972 int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
973 int overflowable)
975 wide_int res;
976 tree t;
977 tree type = TREE_TYPE (parg1);
978 signop sign = TYPE_SIGN (type);
979 wi::overflow_type overflow = wi::OVF_NONE;
981 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
982 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
984 switch (code)
986 case BIT_IOR_EXPR:
987 res = wi::bit_or (arg1, arg2);
988 break;
990 case BIT_XOR_EXPR:
991 res = wi::bit_xor (arg1, arg2);
992 break;
994 case BIT_AND_EXPR:
995 res = wi::bit_and (arg1, arg2);
996 break;
998 case RSHIFT_EXPR:
999 case LSHIFT_EXPR:
1000 if (wi::neg_p (arg2))
1002 arg2 = -arg2;
1003 if (code == RSHIFT_EXPR)
1004 code = LSHIFT_EXPR;
1005 else
1006 code = RSHIFT_EXPR;
1009 if (code == RSHIFT_EXPR)
1010 /* It's unclear from the C standard whether shifts can overflow.
1011 The following code ignores overflow; perhaps a C standard
1012 interpretation ruling is needed. */
1013 res = wi::rshift (arg1, arg2, sign);
1014 else
1015 res = wi::lshift (arg1, arg2);
1016 break;
1018 case RROTATE_EXPR:
1019 case LROTATE_EXPR:
1020 if (wi::neg_p (arg2))
1022 arg2 = -arg2;
1023 if (code == RROTATE_EXPR)
1024 code = LROTATE_EXPR;
1025 else
1026 code = RROTATE_EXPR;
1029 if (code == RROTATE_EXPR)
1030 res = wi::rrotate (arg1, arg2);
1031 else
1032 res = wi::lrotate (arg1, arg2);
1033 break;
1035 case PLUS_EXPR:
1036 res = wi::add (arg1, arg2, sign, &overflow);
1037 break;
1039 case MINUS_EXPR:
1040 res = wi::sub (arg1, arg2, sign, &overflow);
1041 break;
1043 case MULT_EXPR:
1044 res = wi::mul (arg1, arg2, sign, &overflow);
1045 break;
1047 case MULT_HIGHPART_EXPR:
1048 res = wi::mul_high (arg1, arg2, sign);
1049 break;
1051 case TRUNC_DIV_EXPR:
1052 case EXACT_DIV_EXPR:
1053 if (arg2 == 0)
1054 return NULL_TREE;
1055 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1056 break;
1058 case FLOOR_DIV_EXPR:
1059 if (arg2 == 0)
1060 return NULL_TREE;
1061 res = wi::div_floor (arg1, arg2, sign, &overflow);
1062 break;
1064 case CEIL_DIV_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1068 break;
1070 case ROUND_DIV_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::div_round (arg1, arg2, sign, &overflow);
1074 break;
1076 case TRUNC_MOD_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1080 break;
1082 case FLOOR_MOD_EXPR:
1083 if (arg2 == 0)
1084 return NULL_TREE;
1085 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1086 break;
1088 case CEIL_MOD_EXPR:
1089 if (arg2 == 0)
1090 return NULL_TREE;
1091 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1092 break;
1094 case ROUND_MOD_EXPR:
1095 if (arg2 == 0)
1096 return NULL_TREE;
1097 res = wi::mod_round (arg1, arg2, sign, &overflow);
1098 break;
1100 case MIN_EXPR:
1101 res = wi::min (arg1, arg2, sign);
1102 break;
1104 case MAX_EXPR:
1105 res = wi::max (arg1, arg2, sign);
1106 break;
1108 default:
1109 return NULL_TREE;
1112 t = force_fit_type (type, res, overflowable,
1113 (((sign == SIGNED || overflowable == -1)
1114 && overflow)
1115 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1117 return t;
1120 /* Combine two integer constants PARG1 and PARG2 under operation CODE
1121 to produce a new constant. Return NULL_TREE if we don't know how
1122 to evaluate CODE at compile-time. */
1124 static tree
1125 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
1126 int overflowable)
1128 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1129 return int_const_binop_2 (code, arg1, arg2, overflowable);
1131 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1133 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1135 poly_wide_int res;
1136 wi::overflow_type overflow;
1137 tree type = TREE_TYPE (arg1);
1138 signop sign = TYPE_SIGN (type);
1139 switch (code)
1141 case PLUS_EXPR:
1142 res = wi::add (wi::to_poly_wide (arg1),
1143 wi::to_poly_wide (arg2), sign, &overflow);
1144 break;
1146 case MINUS_EXPR:
1147 res = wi::sub (wi::to_poly_wide (arg1),
1148 wi::to_poly_wide (arg2), sign, &overflow);
1149 break;
1151 case MULT_EXPR:
1152 if (TREE_CODE (arg2) == INTEGER_CST)
1153 res = wi::mul (wi::to_poly_wide (arg1),
1154 wi::to_wide (arg2), sign, &overflow);
1155 else if (TREE_CODE (arg1) == INTEGER_CST)
1156 res = wi::mul (wi::to_poly_wide (arg2),
1157 wi::to_wide (arg1), sign, &overflow);
1158 else
1159 return NULL_TREE;
1160 break;
1162 case LSHIFT_EXPR:
1163 if (TREE_CODE (arg2) == INTEGER_CST)
1164 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1165 else
1166 return NULL_TREE;
1167 break;
1169 case BIT_IOR_EXPR:
1170 if (TREE_CODE (arg2) != INTEGER_CST
1171 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1172 &res))
1173 return NULL_TREE;
1174 break;
1176 default:
1177 return NULL_TREE;
1179 return force_fit_type (type, res, overflowable,
1180 (((sign == SIGNED || overflowable == -1)
1181 && overflow)
1182 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1185 return NULL_TREE;
1188 tree
1189 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1191 return int_const_binop_1 (code, arg1, arg2, 1);
1194 /* Return true if binary operation OP distributes over addition in operand
1195 OPNO, with the other operand being held constant. OPNO counts from 1. */
1197 static bool
1198 distributes_over_addition_p (tree_code op, int opno)
1200 switch (op)
1202 case PLUS_EXPR:
1203 case MINUS_EXPR:
1204 case MULT_EXPR:
1205 return true;
1207 case LSHIFT_EXPR:
1208 return opno == 1;
1210 default:
1211 return false;
1215 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1216 constant. We assume ARG1 and ARG2 have the same data type, or at least
1217 are the same kind of constant and the same machine mode. Return zero if
1218 combining the constants is not allowed in the current operating mode. */
1220 static tree
1221 const_binop (enum tree_code code, tree arg1, tree arg2)
1223 /* Sanity check for the recursive cases. */
1224 if (!arg1 || !arg2)
1225 return NULL_TREE;
1227 STRIP_NOPS (arg1);
1228 STRIP_NOPS (arg2);
1230 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1232 if (code == POINTER_PLUS_EXPR)
1233 return int_const_binop (PLUS_EXPR,
1234 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1236 return int_const_binop (code, arg1, arg2);
1239 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1241 machine_mode mode;
1242 REAL_VALUE_TYPE d1;
1243 REAL_VALUE_TYPE d2;
1244 REAL_VALUE_TYPE value;
1245 REAL_VALUE_TYPE result;
1246 bool inexact;
1247 tree t, type;
1249 /* The following codes are handled by real_arithmetic. */
1250 switch (code)
1252 case PLUS_EXPR:
1253 case MINUS_EXPR:
1254 case MULT_EXPR:
1255 case RDIV_EXPR:
1256 case MIN_EXPR:
1257 case MAX_EXPR:
1258 break;
1260 default:
1261 return NULL_TREE;
1264 d1 = TREE_REAL_CST (arg1);
1265 d2 = TREE_REAL_CST (arg2);
1267 type = TREE_TYPE (arg1);
1268 mode = TYPE_MODE (type);
1270 /* Don't perform operation if we honor signaling NaNs and
1271 either operand is a signaling NaN. */
1272 if (HONOR_SNANS (mode)
1273 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1274 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1275 return NULL_TREE;
1277 /* Don't perform operation if it would raise a division
1278 by zero exception. */
1279 if (code == RDIV_EXPR
1280 && real_equal (&d2, &dconst0)
1281 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1282 return NULL_TREE;
1284 /* If either operand is a NaN, just return it. Otherwise, set up
1285 for floating-point trap; we return an overflow. */
1286 if (REAL_VALUE_ISNAN (d1))
1288 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1289 is off. */
1290 d1.signalling = 0;
1291 t = build_real (type, d1);
1292 return t;
1294 else if (REAL_VALUE_ISNAN (d2))
1296 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1297 is off. */
1298 d2.signalling = 0;
1299 t = build_real (type, d2);
1300 return t;
1303 inexact = real_arithmetic (&value, code, &d1, &d2);
1304 real_convert (&result, mode, &value);
1306 /* Don't constant fold this floating point operation if
1307 the result has overflowed and flag_trapping_math. */
1308 if (flag_trapping_math
1309 && MODE_HAS_INFINITIES (mode)
1310 && REAL_VALUE_ISINF (result)
1311 && !REAL_VALUE_ISINF (d1)
1312 && !REAL_VALUE_ISINF (d2))
1313 return NULL_TREE;
1315 /* Don't constant fold this floating point operation if the
1316 result may dependent upon the run-time rounding mode and
1317 flag_rounding_math is set, or if GCC's software emulation
1318 is unable to accurately represent the result. */
1319 if ((flag_rounding_math
1320 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1321 && (inexact || !real_identical (&result, &value)))
1322 return NULL_TREE;
1324 t = build_real (type, result);
1326 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1327 return t;
1330 if (TREE_CODE (arg1) == FIXED_CST)
1332 FIXED_VALUE_TYPE f1;
1333 FIXED_VALUE_TYPE f2;
1334 FIXED_VALUE_TYPE result;
1335 tree t, type;
1336 int sat_p;
1337 bool overflow_p;
1339 /* The following codes are handled by fixed_arithmetic. */
1340 switch (code)
1342 case PLUS_EXPR:
1343 case MINUS_EXPR:
1344 case MULT_EXPR:
1345 case TRUNC_DIV_EXPR:
1346 if (TREE_CODE (arg2) != FIXED_CST)
1347 return NULL_TREE;
1348 f2 = TREE_FIXED_CST (arg2);
1349 break;
1351 case LSHIFT_EXPR:
1352 case RSHIFT_EXPR:
1354 if (TREE_CODE (arg2) != INTEGER_CST)
1355 return NULL_TREE;
1356 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1357 f2.data.high = w2.elt (1);
1358 f2.data.low = w2.ulow ();
1359 f2.mode = SImode;
1361 break;
1363 default:
1364 return NULL_TREE;
1367 f1 = TREE_FIXED_CST (arg1);
1368 type = TREE_TYPE (arg1);
1369 sat_p = TYPE_SATURATING (type);
1370 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1371 t = build_fixed (type, result);
1372 /* Propagate overflow flags. */
1373 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1374 TREE_OVERFLOW (t) = 1;
1375 return t;
1378 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1380 tree type = TREE_TYPE (arg1);
1381 tree r1 = TREE_REALPART (arg1);
1382 tree i1 = TREE_IMAGPART (arg1);
1383 tree r2 = TREE_REALPART (arg2);
1384 tree i2 = TREE_IMAGPART (arg2);
1385 tree real, imag;
1387 switch (code)
1389 case PLUS_EXPR:
1390 case MINUS_EXPR:
1391 real = const_binop (code, r1, r2);
1392 imag = const_binop (code, i1, i2);
1393 break;
1395 case MULT_EXPR:
1396 if (COMPLEX_FLOAT_TYPE_P (type))
1397 return do_mpc_arg2 (arg1, arg2, type,
1398 /* do_nonfinite= */ folding_initializer,
1399 mpc_mul);
1401 real = const_binop (MINUS_EXPR,
1402 const_binop (MULT_EXPR, r1, r2),
1403 const_binop (MULT_EXPR, i1, i2));
1404 imag = const_binop (PLUS_EXPR,
1405 const_binop (MULT_EXPR, r1, i2),
1406 const_binop (MULT_EXPR, i1, r2));
1407 break;
1409 case RDIV_EXPR:
1410 if (COMPLEX_FLOAT_TYPE_P (type))
1411 return do_mpc_arg2 (arg1, arg2, type,
1412 /* do_nonfinite= */ folding_initializer,
1413 mpc_div);
1414 /* Fallthru. */
1415 case TRUNC_DIV_EXPR:
1416 case CEIL_DIV_EXPR:
1417 case FLOOR_DIV_EXPR:
1418 case ROUND_DIV_EXPR:
1419 if (flag_complex_method == 0)
1421 /* Keep this algorithm in sync with
1422 tree-complex.c:expand_complex_div_straight().
1424 Expand complex division to scalars, straightforward algorithm.
1425 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1426 t = br*br + bi*bi
1428 tree magsquared
1429 = const_binop (PLUS_EXPR,
1430 const_binop (MULT_EXPR, r2, r2),
1431 const_binop (MULT_EXPR, i2, i2));
1432 tree t1
1433 = const_binop (PLUS_EXPR,
1434 const_binop (MULT_EXPR, r1, r2),
1435 const_binop (MULT_EXPR, i1, i2));
1436 tree t2
1437 = const_binop (MINUS_EXPR,
1438 const_binop (MULT_EXPR, i1, r2),
1439 const_binop (MULT_EXPR, r1, i2));
1441 real = const_binop (code, t1, magsquared);
1442 imag = const_binop (code, t2, magsquared);
1444 else
1446 /* Keep this algorithm in sync with
1447 tree-complex.c:expand_complex_div_wide().
1449 Expand complex division to scalars, modified algorithm to minimize
1450 overflow with wide input ranges. */
1451 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1452 fold_abs_const (r2, TREE_TYPE (type)),
1453 fold_abs_const (i2, TREE_TYPE (type)));
1455 if (integer_nonzerop (compare))
1457 /* In the TRUE branch, we compute
1458 ratio = br/bi;
1459 div = (br * ratio) + bi;
1460 tr = (ar * ratio) + ai;
1461 ti = (ai * ratio) - ar;
1462 tr = tr / div;
1463 ti = ti / div; */
1464 tree ratio = const_binop (code, r2, i2);
1465 tree div = const_binop (PLUS_EXPR, i2,
1466 const_binop (MULT_EXPR, r2, ratio));
1467 real = const_binop (MULT_EXPR, r1, ratio);
1468 real = const_binop (PLUS_EXPR, real, i1);
1469 real = const_binop (code, real, div);
1471 imag = const_binop (MULT_EXPR, i1, ratio);
1472 imag = const_binop (MINUS_EXPR, imag, r1);
1473 imag = const_binop (code, imag, div);
1475 else
1477 /* In the FALSE branch, we compute
1478 ratio = d/c;
1479 divisor = (d * ratio) + c;
1480 tr = (b * ratio) + a;
1481 ti = b - (a * ratio);
1482 tr = tr / div;
1483 ti = ti / div; */
1484 tree ratio = const_binop (code, i2, r2);
1485 tree div = const_binop (PLUS_EXPR, r2,
1486 const_binop (MULT_EXPR, i2, ratio));
1488 real = const_binop (MULT_EXPR, i1, ratio);
1489 real = const_binop (PLUS_EXPR, real, r1);
1490 real = const_binop (code, real, div);
1492 imag = const_binop (MULT_EXPR, r1, ratio);
1493 imag = const_binop (MINUS_EXPR, i1, imag);
1494 imag = const_binop (code, imag, div);
1497 break;
1499 default:
1500 return NULL_TREE;
1503 if (real && imag)
1504 return build_complex (type, real, imag);
1507 if (TREE_CODE (arg1) == VECTOR_CST
1508 && TREE_CODE (arg2) == VECTOR_CST
1509 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1510 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1512 tree type = TREE_TYPE (arg1);
1513 bool step_ok_p;
1514 if (VECTOR_CST_STEPPED_P (arg1)
1515 && VECTOR_CST_STEPPED_P (arg2))
1516 /* We can operate directly on the encoding if:
1518 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1519 implies
1520 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1522 Addition and subtraction are the supported operators
1523 for which this is true. */
1524 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1525 else if (VECTOR_CST_STEPPED_P (arg1))
1526 /* We can operate directly on stepped encodings if:
1528 a3 - a2 == a2 - a1
1529 implies:
1530 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1532 which is true if (x -> x op c) distributes over addition. */
1533 step_ok_p = distributes_over_addition_p (code, 1);
1534 else
1535 /* Similarly in reverse. */
1536 step_ok_p = distributes_over_addition_p (code, 2);
1537 tree_vector_builder elts;
1538 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1539 return NULL_TREE;
1540 unsigned int count = elts.encoded_nelts ();
1541 for (unsigned int i = 0; i < count; ++i)
1543 tree elem1 = VECTOR_CST_ELT (arg1, i);
1544 tree elem2 = VECTOR_CST_ELT (arg2, i);
1546 tree elt = const_binop (code, elem1, elem2);
1548 /* It is possible that const_binop cannot handle the given
1549 code and return NULL_TREE */
1550 if (elt == NULL_TREE)
1551 return NULL_TREE;
1552 elts.quick_push (elt);
1555 return elts.build ();
1558 /* Shifts allow a scalar offset for a vector. */
1559 if (TREE_CODE (arg1) == VECTOR_CST
1560 && TREE_CODE (arg2) == INTEGER_CST)
1562 tree type = TREE_TYPE (arg1);
1563 bool step_ok_p = distributes_over_addition_p (code, 1);
1564 tree_vector_builder elts;
1565 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1566 return NULL_TREE;
1567 unsigned int count = elts.encoded_nelts ();
1568 for (unsigned int i = 0; i < count; ++i)
1570 tree elem1 = VECTOR_CST_ELT (arg1, i);
1572 tree elt = const_binop (code, elem1, arg2);
1574 /* It is possible that const_binop cannot handle the given
1575 code and return NULL_TREE. */
1576 if (elt == NULL_TREE)
1577 return NULL_TREE;
1578 elts.quick_push (elt);
1581 return elts.build ();
1583 return NULL_TREE;
1586 /* Overload that adds a TYPE parameter to be able to dispatch
1587 to fold_relational_const. */
1589 tree
1590 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1592 if (TREE_CODE_CLASS (code) == tcc_comparison)
1593 return fold_relational_const (code, type, arg1, arg2);
1595 /* ??? Until we make the const_binop worker take the type of the
1596 result as argument put those cases that need it here. */
1597 switch (code)
1599 case VEC_SERIES_EXPR:
1600 if (CONSTANT_CLASS_P (arg1)
1601 && CONSTANT_CLASS_P (arg2))
1602 return build_vec_series (type, arg1, arg2);
1603 return NULL_TREE;
1605 case COMPLEX_EXPR:
1606 if ((TREE_CODE (arg1) == REAL_CST
1607 && TREE_CODE (arg2) == REAL_CST)
1608 || (TREE_CODE (arg1) == INTEGER_CST
1609 && TREE_CODE (arg2) == INTEGER_CST))
1610 return build_complex (type, arg1, arg2);
1611 return NULL_TREE;
1613 case POINTER_DIFF_EXPR:
1614 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1616 poly_offset_int res = (wi::to_poly_offset (arg1)
1617 - wi::to_poly_offset (arg2));
1618 return force_fit_type (type, res, 1,
1619 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1621 return NULL_TREE;
1623 case VEC_PACK_TRUNC_EXPR:
1624 case VEC_PACK_FIX_TRUNC_EXPR:
1625 case VEC_PACK_FLOAT_EXPR:
1627 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1629 if (TREE_CODE (arg1) != VECTOR_CST
1630 || TREE_CODE (arg2) != VECTOR_CST)
1631 return NULL_TREE;
1633 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1634 return NULL_TREE;
1636 out_nelts = in_nelts * 2;
1637 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1638 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1640 tree_vector_builder elts (type, out_nelts, 1);
1641 for (i = 0; i < out_nelts; i++)
1643 tree elt = (i < in_nelts
1644 ? VECTOR_CST_ELT (arg1, i)
1645 : VECTOR_CST_ELT (arg2, i - in_nelts));
1646 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1647 ? NOP_EXPR
1648 : code == VEC_PACK_FLOAT_EXPR
1649 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1650 TREE_TYPE (type), elt);
1651 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1652 return NULL_TREE;
1653 elts.quick_push (elt);
1656 return elts.build ();
1659 case VEC_WIDEN_MULT_LO_EXPR:
1660 case VEC_WIDEN_MULT_HI_EXPR:
1661 case VEC_WIDEN_MULT_EVEN_EXPR:
1662 case VEC_WIDEN_MULT_ODD_EXPR:
1664 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1666 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1667 return NULL_TREE;
1669 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1670 return NULL_TREE;
1671 out_nelts = in_nelts / 2;
1672 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1673 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1675 if (code == VEC_WIDEN_MULT_LO_EXPR)
1676 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1677 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1678 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1679 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1680 scale = 1, ofs = 0;
1681 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1682 scale = 1, ofs = 1;
1684 tree_vector_builder elts (type, out_nelts, 1);
1685 for (out = 0; out < out_nelts; out++)
1687 unsigned int in = (out << scale) + ofs;
1688 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1689 VECTOR_CST_ELT (arg1, in));
1690 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1691 VECTOR_CST_ELT (arg2, in));
1693 if (t1 == NULL_TREE || t2 == NULL_TREE)
1694 return NULL_TREE;
1695 tree elt = const_binop (MULT_EXPR, t1, t2);
1696 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1697 return NULL_TREE;
1698 elts.quick_push (elt);
1701 return elts.build ();
1704 default:;
1707 if (TREE_CODE_CLASS (code) != tcc_binary)
1708 return NULL_TREE;
1710 /* Make sure type and arg0 have the same saturating flag. */
1711 gcc_checking_assert (TYPE_SATURATING (type)
1712 == TYPE_SATURATING (TREE_TYPE (arg1)));
1714 return const_binop (code, arg1, arg2);
1717 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1718 Return zero if computing the constants is not possible. */
1720 tree
1721 const_unop (enum tree_code code, tree type, tree arg0)
1723 /* Don't perform the operation, other than NEGATE and ABS, if
1724 flag_signaling_nans is on and the operand is a signaling NaN. */
1725 if (TREE_CODE (arg0) == REAL_CST
1726 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1727 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1728 && code != NEGATE_EXPR
1729 && code != ABS_EXPR
1730 && code != ABSU_EXPR)
1731 return NULL_TREE;
1733 switch (code)
1735 CASE_CONVERT:
1736 case FLOAT_EXPR:
1737 case FIX_TRUNC_EXPR:
1738 case FIXED_CONVERT_EXPR:
1739 return fold_convert_const (code, type, arg0);
1741 case ADDR_SPACE_CONVERT_EXPR:
1742 /* If the source address is 0, and the source address space
1743 cannot have a valid object at 0, fold to dest type null. */
1744 if (integer_zerop (arg0)
1745 && !(targetm.addr_space.zero_address_valid
1746 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1747 return fold_convert_const (code, type, arg0);
1748 break;
1750 case VIEW_CONVERT_EXPR:
1751 return fold_view_convert_expr (type, arg0);
1753 case NEGATE_EXPR:
1755 /* Can't call fold_negate_const directly here as that doesn't
1756 handle all cases and we might not be able to negate some
1757 constants. */
1758 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1759 if (tem && CONSTANT_CLASS_P (tem))
1760 return tem;
1761 break;
1764 case ABS_EXPR:
1765 case ABSU_EXPR:
1766 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1767 return fold_abs_const (arg0, type);
1768 break;
1770 case CONJ_EXPR:
1771 if (TREE_CODE (arg0) == COMPLEX_CST)
1773 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1774 TREE_TYPE (type));
1775 return build_complex (type, TREE_REALPART (arg0), ipart);
1777 break;
1779 case BIT_NOT_EXPR:
1780 if (TREE_CODE (arg0) == INTEGER_CST)
1781 return fold_not_const (arg0, type);
1782 else if (POLY_INT_CST_P (arg0))
1783 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1784 /* Perform BIT_NOT_EXPR on each element individually. */
1785 else if (TREE_CODE (arg0) == VECTOR_CST)
1787 tree elem;
1789 /* This can cope with stepped encodings because ~x == -1 - x. */
1790 tree_vector_builder elements;
1791 elements.new_unary_operation (type, arg0, true);
1792 unsigned int i, count = elements.encoded_nelts ();
1793 for (i = 0; i < count; ++i)
1795 elem = VECTOR_CST_ELT (arg0, i);
1796 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1797 if (elem == NULL_TREE)
1798 break;
1799 elements.quick_push (elem);
1801 if (i == count)
1802 return elements.build ();
1804 break;
1806 case TRUTH_NOT_EXPR:
1807 if (TREE_CODE (arg0) == INTEGER_CST)
1808 return constant_boolean_node (integer_zerop (arg0), type);
1809 break;
1811 case REALPART_EXPR:
1812 if (TREE_CODE (arg0) == COMPLEX_CST)
1813 return fold_convert (type, TREE_REALPART (arg0));
1814 break;
1816 case IMAGPART_EXPR:
1817 if (TREE_CODE (arg0) == COMPLEX_CST)
1818 return fold_convert (type, TREE_IMAGPART (arg0));
1819 break;
1821 case VEC_UNPACK_LO_EXPR:
1822 case VEC_UNPACK_HI_EXPR:
1823 case VEC_UNPACK_FLOAT_LO_EXPR:
1824 case VEC_UNPACK_FLOAT_HI_EXPR:
1825 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1826 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1828 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1829 enum tree_code subcode;
1831 if (TREE_CODE (arg0) != VECTOR_CST)
1832 return NULL_TREE;
1834 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1835 return NULL_TREE;
1836 out_nelts = in_nelts / 2;
1837 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1839 unsigned int offset = 0;
1840 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1841 || code == VEC_UNPACK_FLOAT_LO_EXPR
1842 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1843 offset = out_nelts;
1845 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1846 subcode = NOP_EXPR;
1847 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1848 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1849 subcode = FLOAT_EXPR;
1850 else
1851 subcode = FIX_TRUNC_EXPR;
1853 tree_vector_builder elts (type, out_nelts, 1);
1854 for (i = 0; i < out_nelts; i++)
1856 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1857 VECTOR_CST_ELT (arg0, i + offset));
1858 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1859 return NULL_TREE;
1860 elts.quick_push (elt);
1863 return elts.build ();
1866 case VEC_DUPLICATE_EXPR:
1867 if (CONSTANT_CLASS_P (arg0))
1868 return build_vector_from_val (type, arg0);
1869 return NULL_TREE;
1871 default:
1872 break;
1875 return NULL_TREE;
1878 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1879 indicates which particular sizetype to create. */
1881 tree
1882 size_int_kind (poly_int64 number, enum size_type_kind kind)
1884 return build_int_cst (sizetype_tab[(int) kind], number);
1887 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1888 is a tree code. The type of the result is taken from the operands.
1889 Both must be equivalent integer types, ala int_binop_types_match_p.
1890 If the operands are constant, so is the result. */
1892 tree
1893 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1895 tree type = TREE_TYPE (arg0);
1897 if (arg0 == error_mark_node || arg1 == error_mark_node)
1898 return error_mark_node;
1900 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1901 TREE_TYPE (arg1)));
1903 /* Handle the special case of two poly_int constants faster. */
1904 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1906 /* And some specific cases even faster than that. */
1907 if (code == PLUS_EXPR)
1909 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1910 return arg1;
1911 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1912 return arg0;
1914 else if (code == MINUS_EXPR)
1916 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1917 return arg0;
1919 else if (code == MULT_EXPR)
1921 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1922 return arg1;
1925 /* Handle general case of two integer constants. For sizetype
1926 constant calculations we always want to know about overflow,
1927 even in the unsigned case. */
1928 tree res = int_const_binop_1 (code, arg0, arg1, -1);
1929 if (res != NULL_TREE)
1930 return res;
1933 return fold_build2_loc (loc, code, type, arg0, arg1);
1936 /* Given two values, either both of sizetype or both of bitsizetype,
1937 compute the difference between the two values. Return the value
1938 in signed type corresponding to the type of the operands. */
1940 tree
1941 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1943 tree type = TREE_TYPE (arg0);
1944 tree ctype;
1946 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1947 TREE_TYPE (arg1)));
1949 /* If the type is already signed, just do the simple thing. */
1950 if (!TYPE_UNSIGNED (type))
1951 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1953 if (type == sizetype)
1954 ctype = ssizetype;
1955 else if (type == bitsizetype)
1956 ctype = sbitsizetype;
1957 else
1958 ctype = signed_type_for (type);
1960 /* If either operand is not a constant, do the conversions to the signed
1961 type and subtract. The hardware will do the right thing with any
1962 overflow in the subtraction. */
1963 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1964 return size_binop_loc (loc, MINUS_EXPR,
1965 fold_convert_loc (loc, ctype, arg0),
1966 fold_convert_loc (loc, ctype, arg1));
1968 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1969 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1970 overflow) and negate (which can't either). Special-case a result
1971 of zero while we're here. */
1972 if (tree_int_cst_equal (arg0, arg1))
1973 return build_int_cst (ctype, 0);
1974 else if (tree_int_cst_lt (arg1, arg0))
1975 return fold_convert_loc (loc, ctype,
1976 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1977 else
1978 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1979 fold_convert_loc (loc, ctype,
1980 size_binop_loc (loc,
1981 MINUS_EXPR,
1982 arg1, arg0)));
1985 /* A subroutine of fold_convert_const handling conversions of an
1986 INTEGER_CST to another integer type. */
1988 static tree
1989 fold_convert_const_int_from_int (tree type, const_tree arg1)
1991 /* Given an integer constant, make new constant with new type,
1992 appropriately sign-extended or truncated. Use widest_int
1993 so that any extension is done according ARG1's type. */
1994 return force_fit_type (type, wi::to_widest (arg1),
1995 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1996 TREE_OVERFLOW (arg1));
1999 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2000 to an integer type. */
2002 static tree
2003 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2005 bool overflow = false;
2006 tree t;
2008 /* The following code implements the floating point to integer
2009 conversion rules required by the Java Language Specification,
2010 that IEEE NaNs are mapped to zero and values that overflow
2011 the target precision saturate, i.e. values greater than
2012 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2013 are mapped to INT_MIN. These semantics are allowed by the
2014 C and C++ standards that simply state that the behavior of
2015 FP-to-integer conversion is unspecified upon overflow. */
2017 wide_int val;
2018 REAL_VALUE_TYPE r;
2019 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2021 switch (code)
2023 case FIX_TRUNC_EXPR:
2024 real_trunc (&r, VOIDmode, &x);
2025 break;
2027 default:
2028 gcc_unreachable ();
2031 /* If R is NaN, return zero and show we have an overflow. */
2032 if (REAL_VALUE_ISNAN (r))
2034 overflow = true;
2035 val = wi::zero (TYPE_PRECISION (type));
2038 /* See if R is less than the lower bound or greater than the
2039 upper bound. */
2041 if (! overflow)
2043 tree lt = TYPE_MIN_VALUE (type);
2044 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2045 if (real_less (&r, &l))
2047 overflow = true;
2048 val = wi::to_wide (lt);
2052 if (! overflow)
2054 tree ut = TYPE_MAX_VALUE (type);
2055 if (ut)
2057 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2058 if (real_less (&u, &r))
2060 overflow = true;
2061 val = wi::to_wide (ut);
2066 if (! overflow)
2067 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2069 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2070 return t;
2073 /* A subroutine of fold_convert_const handling conversions of a
2074 FIXED_CST to an integer type. */
2076 static tree
2077 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2079 tree t;
2080 double_int temp, temp_trunc;
2081 scalar_mode mode;
2083 /* Right shift FIXED_CST to temp by fbit. */
2084 temp = TREE_FIXED_CST (arg1).data;
2085 mode = TREE_FIXED_CST (arg1).mode;
2086 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2088 temp = temp.rshift (GET_MODE_FBIT (mode),
2089 HOST_BITS_PER_DOUBLE_INT,
2090 SIGNED_FIXED_POINT_MODE_P (mode));
2092 /* Left shift temp to temp_trunc by fbit. */
2093 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2094 HOST_BITS_PER_DOUBLE_INT,
2095 SIGNED_FIXED_POINT_MODE_P (mode));
2097 else
2099 temp = double_int_zero;
2100 temp_trunc = double_int_zero;
2103 /* If FIXED_CST is negative, we need to round the value toward 0.
2104 By checking if the fractional bits are not zero to add 1 to temp. */
2105 if (SIGNED_FIXED_POINT_MODE_P (mode)
2106 && temp_trunc.is_negative ()
2107 && TREE_FIXED_CST (arg1).data != temp_trunc)
2108 temp += double_int_one;
2110 /* Given a fixed-point constant, make new constant with new type,
2111 appropriately sign-extended or truncated. */
2112 t = force_fit_type (type, temp, -1,
2113 (temp.is_negative ()
2114 && (TYPE_UNSIGNED (type)
2115 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2116 | TREE_OVERFLOW (arg1));
2118 return t;
2121 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2122 to another floating point type. */
2124 static tree
2125 fold_convert_const_real_from_real (tree type, const_tree arg1)
2127 REAL_VALUE_TYPE value;
2128 tree t;
2130 /* Don't perform the operation if flag_signaling_nans is on
2131 and the operand is a signaling NaN. */
2132 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2133 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2134 return NULL_TREE;
2136 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2137 t = build_real (type, value);
2139 /* If converting an infinity or NAN to a representation that doesn't
2140 have one, set the overflow bit so that we can produce some kind of
2141 error message at the appropriate point if necessary. It's not the
2142 most user-friendly message, but it's better than nothing. */
2143 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2144 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2145 TREE_OVERFLOW (t) = 1;
2146 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2147 && !MODE_HAS_NANS (TYPE_MODE (type)))
2148 TREE_OVERFLOW (t) = 1;
2149 /* Regular overflow, conversion produced an infinity in a mode that
2150 can't represent them. */
2151 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2152 && REAL_VALUE_ISINF (value)
2153 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2154 TREE_OVERFLOW (t) = 1;
2155 else
2156 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2157 return t;
2160 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2161 to a floating point type. */
2163 static tree
2164 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2166 REAL_VALUE_TYPE value;
2167 tree t;
2169 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2170 &TREE_FIXED_CST (arg1));
2171 t = build_real (type, value);
2173 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2174 return t;
2177 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2178 to another fixed-point type. */
2180 static tree
2181 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2183 FIXED_VALUE_TYPE value;
2184 tree t;
2185 bool overflow_p;
2187 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2188 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2189 t = build_fixed (type, value);
2191 /* Propagate overflow flags. */
2192 if (overflow_p | TREE_OVERFLOW (arg1))
2193 TREE_OVERFLOW (t) = 1;
2194 return t;
2197 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2198 to a fixed-point type. */
2200 static tree
2201 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2203 FIXED_VALUE_TYPE value;
2204 tree t;
2205 bool overflow_p;
2206 double_int di;
2208 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2210 di.low = TREE_INT_CST_ELT (arg1, 0);
2211 if (TREE_INT_CST_NUNITS (arg1) == 1)
2212 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2213 else
2214 di.high = TREE_INT_CST_ELT (arg1, 1);
2216 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2217 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2218 TYPE_SATURATING (type));
2219 t = build_fixed (type, value);
2221 /* Propagate overflow flags. */
2222 if (overflow_p | TREE_OVERFLOW (arg1))
2223 TREE_OVERFLOW (t) = 1;
2224 return t;
2227 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2228 to a fixed-point type. */
2230 static tree
2231 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2233 FIXED_VALUE_TYPE value;
2234 tree t;
2235 bool overflow_p;
2237 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2238 &TREE_REAL_CST (arg1),
2239 TYPE_SATURATING (type));
2240 t = build_fixed (type, value);
2242 /* Propagate overflow flags. */
2243 if (overflow_p | TREE_OVERFLOW (arg1))
2244 TREE_OVERFLOW (t) = 1;
2245 return t;
2248 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2249 type TYPE. If no simplification can be done return NULL_TREE. */
2251 static tree
2252 fold_convert_const (enum tree_code code, tree type, tree arg1)
2254 tree arg_type = TREE_TYPE (arg1);
2255 if (arg_type == type)
2256 return arg1;
2258 /* We can't widen types, since the runtime value could overflow the
2259 original type before being extended to the new type. */
2260 if (POLY_INT_CST_P (arg1)
2261 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2262 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2263 return build_poly_int_cst (type,
2264 poly_wide_int::from (poly_int_cst_value (arg1),
2265 TYPE_PRECISION (type),
2266 TYPE_SIGN (arg_type)));
2268 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2269 || TREE_CODE (type) == OFFSET_TYPE)
2271 if (TREE_CODE (arg1) == INTEGER_CST)
2272 return fold_convert_const_int_from_int (type, arg1);
2273 else if (TREE_CODE (arg1) == REAL_CST)
2274 return fold_convert_const_int_from_real (code, type, arg1);
2275 else if (TREE_CODE (arg1) == FIXED_CST)
2276 return fold_convert_const_int_from_fixed (type, arg1);
2278 else if (TREE_CODE (type) == REAL_TYPE)
2280 if (TREE_CODE (arg1) == INTEGER_CST)
2281 return build_real_from_int_cst (type, arg1);
2282 else if (TREE_CODE (arg1) == REAL_CST)
2283 return fold_convert_const_real_from_real (type, arg1);
2284 else if (TREE_CODE (arg1) == FIXED_CST)
2285 return fold_convert_const_real_from_fixed (type, arg1);
2287 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2289 if (TREE_CODE (arg1) == FIXED_CST)
2290 return fold_convert_const_fixed_from_fixed (type, arg1);
2291 else if (TREE_CODE (arg1) == INTEGER_CST)
2292 return fold_convert_const_fixed_from_int (type, arg1);
2293 else if (TREE_CODE (arg1) == REAL_CST)
2294 return fold_convert_const_fixed_from_real (type, arg1);
2296 else if (TREE_CODE (type) == VECTOR_TYPE)
2298 if (TREE_CODE (arg1) == VECTOR_CST
2299 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2301 tree elttype = TREE_TYPE (type);
2302 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2303 /* We can't handle steps directly when extending, since the
2304 values need to wrap at the original precision first. */
2305 bool step_ok_p
2306 = (INTEGRAL_TYPE_P (elttype)
2307 && INTEGRAL_TYPE_P (arg1_elttype)
2308 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2309 tree_vector_builder v;
2310 if (!v.new_unary_operation (type, arg1, step_ok_p))
2311 return NULL_TREE;
2312 unsigned int len = v.encoded_nelts ();
2313 for (unsigned int i = 0; i < len; ++i)
2315 tree elt = VECTOR_CST_ELT (arg1, i);
2316 tree cvt = fold_convert_const (code, elttype, elt);
2317 if (cvt == NULL_TREE)
2318 return NULL_TREE;
2319 v.quick_push (cvt);
2321 return v.build ();
2324 return NULL_TREE;
2327 /* Construct a vector of zero elements of vector type TYPE. */
2329 static tree
2330 build_zero_vector (tree type)
2332 tree t;
2334 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2335 return build_vector_from_val (type, t);
2338 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2340 bool
2341 fold_convertible_p (const_tree type, const_tree arg)
2343 tree orig = TREE_TYPE (arg);
2345 if (type == orig)
2346 return true;
2348 if (TREE_CODE (arg) == ERROR_MARK
2349 || TREE_CODE (type) == ERROR_MARK
2350 || TREE_CODE (orig) == ERROR_MARK)
2351 return false;
2353 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2354 return true;
2356 switch (TREE_CODE (type))
2358 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2359 case POINTER_TYPE: case REFERENCE_TYPE:
2360 case OFFSET_TYPE:
2361 return (INTEGRAL_TYPE_P (orig)
2362 || (POINTER_TYPE_P (orig)
2363 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2364 || TREE_CODE (orig) == OFFSET_TYPE);
2366 case REAL_TYPE:
2367 case FIXED_POINT_TYPE:
2368 case VECTOR_TYPE:
2369 case VOID_TYPE:
2370 return TREE_CODE (type) == TREE_CODE (orig);
2372 default:
2373 return false;
2377 /* Convert expression ARG to type TYPE. Used by the middle-end for
2378 simple conversions in preference to calling the front-end's convert. */
2380 tree
2381 fold_convert_loc (location_t loc, tree type, tree arg)
2383 tree orig = TREE_TYPE (arg);
2384 tree tem;
2386 if (type == orig)
2387 return arg;
2389 if (TREE_CODE (arg) == ERROR_MARK
2390 || TREE_CODE (type) == ERROR_MARK
2391 || TREE_CODE (orig) == ERROR_MARK)
2392 return error_mark_node;
2394 switch (TREE_CODE (type))
2396 case POINTER_TYPE:
2397 case REFERENCE_TYPE:
2398 /* Handle conversions between pointers to different address spaces. */
2399 if (POINTER_TYPE_P (orig)
2400 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2401 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2402 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2403 /* fall through */
2405 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2406 case OFFSET_TYPE:
2407 if (TREE_CODE (arg) == INTEGER_CST)
2409 tem = fold_convert_const (NOP_EXPR, type, arg);
2410 if (tem != NULL_TREE)
2411 return tem;
2413 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2414 || TREE_CODE (orig) == OFFSET_TYPE)
2415 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2416 if (TREE_CODE (orig) == COMPLEX_TYPE)
2417 return fold_convert_loc (loc, type,
2418 fold_build1_loc (loc, REALPART_EXPR,
2419 TREE_TYPE (orig), arg));
2420 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2421 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2422 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2424 case REAL_TYPE:
2425 if (TREE_CODE (arg) == INTEGER_CST)
2427 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2428 if (tem != NULL_TREE)
2429 return tem;
2431 else if (TREE_CODE (arg) == REAL_CST)
2433 tem = fold_convert_const (NOP_EXPR, type, arg);
2434 if (tem != NULL_TREE)
2435 return tem;
2437 else if (TREE_CODE (arg) == FIXED_CST)
2439 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2440 if (tem != NULL_TREE)
2441 return tem;
2444 switch (TREE_CODE (orig))
2446 case INTEGER_TYPE:
2447 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2448 case POINTER_TYPE: case REFERENCE_TYPE:
2449 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2451 case REAL_TYPE:
2452 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2454 case FIXED_POINT_TYPE:
2455 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2457 case COMPLEX_TYPE:
2458 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2459 return fold_convert_loc (loc, type, tem);
2461 default:
2462 gcc_unreachable ();
2465 case FIXED_POINT_TYPE:
2466 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2467 || TREE_CODE (arg) == REAL_CST)
2469 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2470 if (tem != NULL_TREE)
2471 goto fold_convert_exit;
2474 switch (TREE_CODE (orig))
2476 case FIXED_POINT_TYPE:
2477 case INTEGER_TYPE:
2478 case ENUMERAL_TYPE:
2479 case BOOLEAN_TYPE:
2480 case REAL_TYPE:
2481 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2483 case COMPLEX_TYPE:
2484 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2485 return fold_convert_loc (loc, type, tem);
2487 default:
2488 gcc_unreachable ();
2491 case COMPLEX_TYPE:
2492 switch (TREE_CODE (orig))
2494 case INTEGER_TYPE:
2495 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2496 case POINTER_TYPE: case REFERENCE_TYPE:
2497 case REAL_TYPE:
2498 case FIXED_POINT_TYPE:
2499 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2500 fold_convert_loc (loc, TREE_TYPE (type), arg),
2501 fold_convert_loc (loc, TREE_TYPE (type),
2502 integer_zero_node));
2503 case COMPLEX_TYPE:
2505 tree rpart, ipart;
2507 if (TREE_CODE (arg) == COMPLEX_EXPR)
2509 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2510 TREE_OPERAND (arg, 0));
2511 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2512 TREE_OPERAND (arg, 1));
2513 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2516 arg = save_expr (arg);
2517 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2518 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2519 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2520 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2521 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2524 default:
2525 gcc_unreachable ();
2528 case VECTOR_TYPE:
2529 if (integer_zerop (arg))
2530 return build_zero_vector (type);
2531 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2532 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2533 || TREE_CODE (orig) == VECTOR_TYPE);
2534 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2536 case VOID_TYPE:
2537 tem = fold_ignored_result (arg);
2538 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2540 default:
2541 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2542 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2543 gcc_unreachable ();
2545 fold_convert_exit:
2546 protected_set_expr_location_unshare (tem, loc);
2547 return tem;
2550 /* Return false if expr can be assumed not to be an lvalue, true
2551 otherwise. */
2553 static bool
2554 maybe_lvalue_p (const_tree x)
2556 /* We only need to wrap lvalue tree codes. */
2557 switch (TREE_CODE (x))
2559 case VAR_DECL:
2560 case PARM_DECL:
2561 case RESULT_DECL:
2562 case LABEL_DECL:
2563 case FUNCTION_DECL:
2564 case SSA_NAME:
2566 case COMPONENT_REF:
2567 case MEM_REF:
2568 case INDIRECT_REF:
2569 case ARRAY_REF:
2570 case ARRAY_RANGE_REF:
2571 case BIT_FIELD_REF:
2572 case OBJ_TYPE_REF:
2574 case REALPART_EXPR:
2575 case IMAGPART_EXPR:
2576 case PREINCREMENT_EXPR:
2577 case PREDECREMENT_EXPR:
2578 case SAVE_EXPR:
2579 case TRY_CATCH_EXPR:
2580 case WITH_CLEANUP_EXPR:
2581 case COMPOUND_EXPR:
2582 case MODIFY_EXPR:
2583 case TARGET_EXPR:
2584 case COND_EXPR:
2585 case BIND_EXPR:
2586 break;
2588 default:
2589 /* Assume the worst for front-end tree codes. */
2590 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2591 break;
2592 return false;
2595 return true;
2598 /* Return an expr equal to X but certainly not valid as an lvalue. */
2600 tree
2601 non_lvalue_loc (location_t loc, tree x)
2603 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2604 us. */
2605 if (in_gimple_form)
2606 return x;
2608 if (! maybe_lvalue_p (x))
2609 return x;
2610 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2613 /* When pedantic, return an expr equal to X but certainly not valid as a
2614 pedantic lvalue. Otherwise, return X. */
2616 static tree
2617 pedantic_non_lvalue_loc (location_t loc, tree x)
2619 return protected_set_expr_location_unshare (x, loc);
2622 /* Given a tree comparison code, return the code that is the logical inverse.
2623 It is generally not safe to do this for floating-point comparisons, except
2624 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2625 ERROR_MARK in this case. */
2627 enum tree_code
2628 invert_tree_comparison (enum tree_code code, bool honor_nans)
2630 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2631 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2632 return ERROR_MARK;
2634 switch (code)
2636 case EQ_EXPR:
2637 return NE_EXPR;
2638 case NE_EXPR:
2639 return EQ_EXPR;
2640 case GT_EXPR:
2641 return honor_nans ? UNLE_EXPR : LE_EXPR;
2642 case GE_EXPR:
2643 return honor_nans ? UNLT_EXPR : LT_EXPR;
2644 case LT_EXPR:
2645 return honor_nans ? UNGE_EXPR : GE_EXPR;
2646 case LE_EXPR:
2647 return honor_nans ? UNGT_EXPR : GT_EXPR;
2648 case LTGT_EXPR:
2649 return UNEQ_EXPR;
2650 case UNEQ_EXPR:
2651 return LTGT_EXPR;
2652 case UNGT_EXPR:
2653 return LE_EXPR;
2654 case UNGE_EXPR:
2655 return LT_EXPR;
2656 case UNLT_EXPR:
2657 return GE_EXPR;
2658 case UNLE_EXPR:
2659 return GT_EXPR;
2660 case ORDERED_EXPR:
2661 return UNORDERED_EXPR;
2662 case UNORDERED_EXPR:
2663 return ORDERED_EXPR;
2664 default:
2665 gcc_unreachable ();
2669 /* Similar, but return the comparison that results if the operands are
2670 swapped. This is safe for floating-point. */
2672 enum tree_code
2673 swap_tree_comparison (enum tree_code code)
2675 switch (code)
2677 case EQ_EXPR:
2678 case NE_EXPR:
2679 case ORDERED_EXPR:
2680 case UNORDERED_EXPR:
2681 case LTGT_EXPR:
2682 case UNEQ_EXPR:
2683 return code;
2684 case GT_EXPR:
2685 return LT_EXPR;
2686 case GE_EXPR:
2687 return LE_EXPR;
2688 case LT_EXPR:
2689 return GT_EXPR;
2690 case LE_EXPR:
2691 return GE_EXPR;
2692 case UNGT_EXPR:
2693 return UNLT_EXPR;
2694 case UNGE_EXPR:
2695 return UNLE_EXPR;
2696 case UNLT_EXPR:
2697 return UNGT_EXPR;
2698 case UNLE_EXPR:
2699 return UNGE_EXPR;
2700 default:
2701 gcc_unreachable ();
2706 /* Convert a comparison tree code from an enum tree_code representation
2707 into a compcode bit-based encoding. This function is the inverse of
2708 compcode_to_comparison. */
2710 static enum comparison_code
2711 comparison_to_compcode (enum tree_code code)
2713 switch (code)
2715 case LT_EXPR:
2716 return COMPCODE_LT;
2717 case EQ_EXPR:
2718 return COMPCODE_EQ;
2719 case LE_EXPR:
2720 return COMPCODE_LE;
2721 case GT_EXPR:
2722 return COMPCODE_GT;
2723 case NE_EXPR:
2724 return COMPCODE_NE;
2725 case GE_EXPR:
2726 return COMPCODE_GE;
2727 case ORDERED_EXPR:
2728 return COMPCODE_ORD;
2729 case UNORDERED_EXPR:
2730 return COMPCODE_UNORD;
2731 case UNLT_EXPR:
2732 return COMPCODE_UNLT;
2733 case UNEQ_EXPR:
2734 return COMPCODE_UNEQ;
2735 case UNLE_EXPR:
2736 return COMPCODE_UNLE;
2737 case UNGT_EXPR:
2738 return COMPCODE_UNGT;
2739 case LTGT_EXPR:
2740 return COMPCODE_LTGT;
2741 case UNGE_EXPR:
2742 return COMPCODE_UNGE;
2743 default:
2744 gcc_unreachable ();
2748 /* Convert a compcode bit-based encoding of a comparison operator back
2749 to GCC's enum tree_code representation. This function is the
2750 inverse of comparison_to_compcode. */
2752 static enum tree_code
2753 compcode_to_comparison (enum comparison_code code)
2755 switch (code)
2757 case COMPCODE_LT:
2758 return LT_EXPR;
2759 case COMPCODE_EQ:
2760 return EQ_EXPR;
2761 case COMPCODE_LE:
2762 return LE_EXPR;
2763 case COMPCODE_GT:
2764 return GT_EXPR;
2765 case COMPCODE_NE:
2766 return NE_EXPR;
2767 case COMPCODE_GE:
2768 return GE_EXPR;
2769 case COMPCODE_ORD:
2770 return ORDERED_EXPR;
2771 case COMPCODE_UNORD:
2772 return UNORDERED_EXPR;
2773 case COMPCODE_UNLT:
2774 return UNLT_EXPR;
2775 case COMPCODE_UNEQ:
2776 return UNEQ_EXPR;
2777 case COMPCODE_UNLE:
2778 return UNLE_EXPR;
2779 case COMPCODE_UNGT:
2780 return UNGT_EXPR;
2781 case COMPCODE_LTGT:
2782 return LTGT_EXPR;
2783 case COMPCODE_UNGE:
2784 return UNGE_EXPR;
2785 default:
2786 gcc_unreachable ();
2790 /* Return true if COND1 tests the opposite condition of COND2. */
2792 bool
2793 inverse_conditions_p (const_tree cond1, const_tree cond2)
2795 return (COMPARISON_CLASS_P (cond1)
2796 && COMPARISON_CLASS_P (cond2)
2797 && (invert_tree_comparison
2798 (TREE_CODE (cond1),
2799 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2800 && operand_equal_p (TREE_OPERAND (cond1, 0),
2801 TREE_OPERAND (cond2, 0), 0)
2802 && operand_equal_p (TREE_OPERAND (cond1, 1),
2803 TREE_OPERAND (cond2, 1), 0));
2806 /* Return a tree for the comparison which is the combination of
2807 doing the AND or OR (depending on CODE) of the two operations LCODE
2808 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2809 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2810 if this makes the transformation invalid. */
2812 tree
2813 combine_comparisons (location_t loc,
2814 enum tree_code code, enum tree_code lcode,
2815 enum tree_code rcode, tree truth_type,
2816 tree ll_arg, tree lr_arg)
2818 bool honor_nans = HONOR_NANS (ll_arg);
2819 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2820 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2821 int compcode;
2823 switch (code)
2825 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2826 compcode = lcompcode & rcompcode;
2827 break;
2829 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2830 compcode = lcompcode | rcompcode;
2831 break;
2833 default:
2834 return NULL_TREE;
2837 if (!honor_nans)
2839 /* Eliminate unordered comparisons, as well as LTGT and ORD
2840 which are not used unless the mode has NaNs. */
2841 compcode &= ~COMPCODE_UNORD;
2842 if (compcode == COMPCODE_LTGT)
2843 compcode = COMPCODE_NE;
2844 else if (compcode == COMPCODE_ORD)
2845 compcode = COMPCODE_TRUE;
2847 else if (flag_trapping_math)
2849 /* Check that the original operation and the optimized ones will trap
2850 under the same condition. */
2851 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2852 && (lcompcode != COMPCODE_EQ)
2853 && (lcompcode != COMPCODE_ORD);
2854 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2855 && (rcompcode != COMPCODE_EQ)
2856 && (rcompcode != COMPCODE_ORD);
2857 bool trap = (compcode & COMPCODE_UNORD) == 0
2858 && (compcode != COMPCODE_EQ)
2859 && (compcode != COMPCODE_ORD);
2861 /* In a short-circuited boolean expression the LHS might be
2862 such that the RHS, if evaluated, will never trap. For
2863 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2864 if neither x nor y is NaN. (This is a mixed blessing: for
2865 example, the expression above will never trap, hence
2866 optimizing it to x < y would be invalid). */
2867 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2868 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2869 rtrap = false;
2871 /* If the comparison was short-circuited, and only the RHS
2872 trapped, we may now generate a spurious trap. */
2873 if (rtrap && !ltrap
2874 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2875 return NULL_TREE;
2877 /* If we changed the conditions that cause a trap, we lose. */
2878 if ((ltrap || rtrap) != trap)
2879 return NULL_TREE;
2882 if (compcode == COMPCODE_TRUE)
2883 return constant_boolean_node (true, truth_type);
2884 else if (compcode == COMPCODE_FALSE)
2885 return constant_boolean_node (false, truth_type);
2886 else
2888 enum tree_code tcode;
2890 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2891 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2895 /* Return nonzero if two operands (typically of the same tree node)
2896 are necessarily equal. FLAGS modifies behavior as follows:
2898 If OEP_ONLY_CONST is set, only return nonzero for constants.
2899 This function tests whether the operands are indistinguishable;
2900 it does not test whether they are equal using C's == operation.
2901 The distinction is important for IEEE floating point, because
2902 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2903 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2905 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2906 even though it may hold multiple values during a function.
2907 This is because a GCC tree node guarantees that nothing else is
2908 executed between the evaluation of its "operands" (which may often
2909 be evaluated in arbitrary order). Hence if the operands themselves
2910 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2911 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2912 unset means assuming isochronic (or instantaneous) tree equivalence.
2913 Unless comparing arbitrary expression trees, such as from different
2914 statements, this flag can usually be left unset.
2916 If OEP_PURE_SAME is set, then pure functions with identical arguments
2917 are considered the same. It is used when the caller has other ways
2918 to ensure that global memory is unchanged in between.
2920 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2921 not values of expressions.
2923 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2924 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2926 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2927 any operand with side effect. This is unnecesarily conservative in the
2928 case we know that arg0 and arg1 are in disjoint code paths (such as in
2929 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2930 addresses with TREE_CONSTANT flag set so we know that &var == &var
2931 even if var is volatile. */
2934 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2936 /* When checking, verify at the outermost operand_equal_p call that
2937 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2938 hash value. */
2939 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2941 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2943 if (arg0 != arg1)
2945 inchash::hash hstate0 (0), hstate1 (0);
2946 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2947 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2948 hashval_t h0 = hstate0.end ();
2949 hashval_t h1 = hstate1.end ();
2950 gcc_assert (h0 == h1);
2952 return 1;
2954 else
2955 return 0;
2958 /* If either is ERROR_MARK, they aren't equal. */
2959 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2960 || TREE_TYPE (arg0) == error_mark_node
2961 || TREE_TYPE (arg1) == error_mark_node)
2962 return 0;
2964 /* Similar, if either does not have a type (like a released SSA name),
2965 they aren't equal. */
2966 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2967 return 0;
2969 /* We cannot consider pointers to different address space equal. */
2970 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2971 && POINTER_TYPE_P (TREE_TYPE (arg1))
2972 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2973 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2974 return 0;
2976 /* Check equality of integer constants before bailing out due to
2977 precision differences. */
2978 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2980 /* Address of INTEGER_CST is not defined; check that we did not forget
2981 to drop the OEP_ADDRESS_OF flags. */
2982 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2983 return tree_int_cst_equal (arg0, arg1);
2986 if (!(flags & OEP_ADDRESS_OF))
2988 /* If both types don't have the same signedness, then we can't consider
2989 them equal. We must check this before the STRIP_NOPS calls
2990 because they may change the signedness of the arguments. As pointers
2991 strictly don't have a signedness, require either two pointers or
2992 two non-pointers as well. */
2993 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2994 || POINTER_TYPE_P (TREE_TYPE (arg0))
2995 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2996 return 0;
2998 /* If both types don't have the same precision, then it is not safe
2999 to strip NOPs. */
3000 if (element_precision (TREE_TYPE (arg0))
3001 != element_precision (TREE_TYPE (arg1)))
3002 return 0;
3004 STRIP_NOPS (arg0);
3005 STRIP_NOPS (arg1);
3007 #if 0
3008 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3009 sanity check once the issue is solved. */
3010 else
3011 /* Addresses of conversions and SSA_NAMEs (and many other things)
3012 are not defined. Check that we did not forget to drop the
3013 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3014 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3015 && TREE_CODE (arg0) != SSA_NAME);
3016 #endif
3018 /* In case both args are comparisons but with different comparison
3019 code, try to swap the comparison operands of one arg to produce
3020 a match and compare that variant. */
3021 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3022 && COMPARISON_CLASS_P (arg0)
3023 && COMPARISON_CLASS_P (arg1))
3025 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3027 if (TREE_CODE (arg0) == swap_code)
3028 return operand_equal_p (TREE_OPERAND (arg0, 0),
3029 TREE_OPERAND (arg1, 1), flags)
3030 && operand_equal_p (TREE_OPERAND (arg0, 1),
3031 TREE_OPERAND (arg1, 0), flags);
3034 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3036 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3037 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3039 else if (flags & OEP_ADDRESS_OF)
3041 /* If we are interested in comparing addresses ignore
3042 MEM_REF wrappings of the base that can appear just for
3043 TBAA reasons. */
3044 if (TREE_CODE (arg0) == MEM_REF
3045 && DECL_P (arg1)
3046 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3047 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3048 && integer_zerop (TREE_OPERAND (arg0, 1)))
3049 return 1;
3050 else if (TREE_CODE (arg1) == MEM_REF
3051 && DECL_P (arg0)
3052 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3053 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3054 && integer_zerop (TREE_OPERAND (arg1, 1)))
3055 return 1;
3056 return 0;
3058 else
3059 return 0;
3062 /* When not checking adddresses, this is needed for conversions and for
3063 COMPONENT_REF. Might as well play it safe and always test this. */
3064 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3065 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3066 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3067 && !(flags & OEP_ADDRESS_OF)))
3068 return 0;
3070 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3071 We don't care about side effects in that case because the SAVE_EXPR
3072 takes care of that for us. In all other cases, two expressions are
3073 equal if they have no side effects. If we have two identical
3074 expressions with side effects that should be treated the same due
3075 to the only side effects being identical SAVE_EXPR's, that will
3076 be detected in the recursive calls below.
3077 If we are taking an invariant address of two identical objects
3078 they are necessarily equal as well. */
3079 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3080 && (TREE_CODE (arg0) == SAVE_EXPR
3081 || (flags & OEP_MATCH_SIDE_EFFECTS)
3082 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3083 return 1;
3085 /* Next handle constant cases, those for which we can return 1 even
3086 if ONLY_CONST is set. */
3087 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3088 switch (TREE_CODE (arg0))
3090 case INTEGER_CST:
3091 return tree_int_cst_equal (arg0, arg1);
3093 case FIXED_CST:
3094 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3095 TREE_FIXED_CST (arg1));
3097 case REAL_CST:
3098 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3099 return 1;
3102 if (!HONOR_SIGNED_ZEROS (arg0))
3104 /* If we do not distinguish between signed and unsigned zero,
3105 consider them equal. */
3106 if (real_zerop (arg0) && real_zerop (arg1))
3107 return 1;
3109 return 0;
3111 case VECTOR_CST:
3113 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3114 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3115 return 0;
3117 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3118 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3119 return 0;
3121 unsigned int count = vector_cst_encoded_nelts (arg0);
3122 for (unsigned int i = 0; i < count; ++i)
3123 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3124 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3125 return 0;
3126 return 1;
3129 case COMPLEX_CST:
3130 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3131 flags)
3132 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3133 flags));
3135 case STRING_CST:
3136 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3137 && ! memcmp (TREE_STRING_POINTER (arg0),
3138 TREE_STRING_POINTER (arg1),
3139 TREE_STRING_LENGTH (arg0)));
3141 case ADDR_EXPR:
3142 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3143 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3144 flags | OEP_ADDRESS_OF
3145 | OEP_MATCH_SIDE_EFFECTS);
3146 case CONSTRUCTOR:
3147 /* In GIMPLE empty constructors are allowed in initializers of
3148 aggregates. */
3149 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3150 default:
3151 break;
3154 if (flags & OEP_ONLY_CONST)
3155 return 0;
3157 /* Define macros to test an operand from arg0 and arg1 for equality and a
3158 variant that allows null and views null as being different from any
3159 non-null value. In the latter case, if either is null, the both
3160 must be; otherwise, do the normal comparison. */
3161 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3162 TREE_OPERAND (arg1, N), flags)
3164 #define OP_SAME_WITH_NULL(N) \
3165 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3166 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3168 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3170 case tcc_unary:
3171 /* Two conversions are equal only if signedness and modes match. */
3172 switch (TREE_CODE (arg0))
3174 CASE_CONVERT:
3175 case FIX_TRUNC_EXPR:
3176 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3177 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3178 return 0;
3179 break;
3180 default:
3181 break;
3184 return OP_SAME (0);
3187 case tcc_comparison:
3188 case tcc_binary:
3189 if (OP_SAME (0) && OP_SAME (1))
3190 return 1;
3192 /* For commutative ops, allow the other order. */
3193 return (commutative_tree_code (TREE_CODE (arg0))
3194 && operand_equal_p (TREE_OPERAND (arg0, 0),
3195 TREE_OPERAND (arg1, 1), flags)
3196 && operand_equal_p (TREE_OPERAND (arg0, 1),
3197 TREE_OPERAND (arg1, 0), flags));
3199 case tcc_reference:
3200 /* If either of the pointer (or reference) expressions we are
3201 dereferencing contain a side effect, these cannot be equal,
3202 but their addresses can be. */
3203 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3204 && (TREE_SIDE_EFFECTS (arg0)
3205 || TREE_SIDE_EFFECTS (arg1)))
3206 return 0;
3208 switch (TREE_CODE (arg0))
3210 case INDIRECT_REF:
3211 if (!(flags & OEP_ADDRESS_OF)
3212 && (TYPE_ALIGN (TREE_TYPE (arg0))
3213 != TYPE_ALIGN (TREE_TYPE (arg1))))
3214 return 0;
3215 flags &= ~OEP_ADDRESS_OF;
3216 return OP_SAME (0);
3218 case IMAGPART_EXPR:
3219 /* Require the same offset. */
3220 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3221 TYPE_SIZE (TREE_TYPE (arg1)),
3222 flags & ~OEP_ADDRESS_OF))
3223 return 0;
3225 /* Fallthru. */
3226 case REALPART_EXPR:
3227 case VIEW_CONVERT_EXPR:
3228 return OP_SAME (0);
3230 case TARGET_MEM_REF:
3231 case MEM_REF:
3232 if (!(flags & OEP_ADDRESS_OF))
3234 /* Require equal access sizes */
3235 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3236 && (!TYPE_SIZE (TREE_TYPE (arg0))
3237 || !TYPE_SIZE (TREE_TYPE (arg1))
3238 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3239 TYPE_SIZE (TREE_TYPE (arg1)),
3240 flags)))
3241 return 0;
3242 /* Verify that access happens in similar types. */
3243 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3244 return 0;
3245 /* Verify that accesses are TBAA compatible. */
3246 if (!alias_ptr_types_compatible_p
3247 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3248 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3249 || (MR_DEPENDENCE_CLIQUE (arg0)
3250 != MR_DEPENDENCE_CLIQUE (arg1))
3251 || (MR_DEPENDENCE_BASE (arg0)
3252 != MR_DEPENDENCE_BASE (arg1)))
3253 return 0;
3254 /* Verify that alignment is compatible. */
3255 if (TYPE_ALIGN (TREE_TYPE (arg0))
3256 != TYPE_ALIGN (TREE_TYPE (arg1)))
3257 return 0;
3259 flags &= ~OEP_ADDRESS_OF;
3260 return (OP_SAME (0) && OP_SAME (1)
3261 /* TARGET_MEM_REF require equal extra operands. */
3262 && (TREE_CODE (arg0) != TARGET_MEM_REF
3263 || (OP_SAME_WITH_NULL (2)
3264 && OP_SAME_WITH_NULL (3)
3265 && OP_SAME_WITH_NULL (4))));
3267 case ARRAY_REF:
3268 case ARRAY_RANGE_REF:
3269 if (!OP_SAME (0))
3270 return 0;
3271 flags &= ~OEP_ADDRESS_OF;
3272 /* Compare the array index by value if it is constant first as we
3273 may have different types but same value here. */
3274 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3275 TREE_OPERAND (arg1, 1))
3276 || OP_SAME (1))
3277 && OP_SAME_WITH_NULL (2)
3278 && OP_SAME_WITH_NULL (3)
3279 /* Compare low bound and element size as with OEP_ADDRESS_OF
3280 we have to account for the offset of the ref. */
3281 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3282 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3283 || (operand_equal_p (array_ref_low_bound
3284 (CONST_CAST_TREE (arg0)),
3285 array_ref_low_bound
3286 (CONST_CAST_TREE (arg1)), flags)
3287 && operand_equal_p (array_ref_element_size
3288 (CONST_CAST_TREE (arg0)),
3289 array_ref_element_size
3290 (CONST_CAST_TREE (arg1)),
3291 flags))));
3293 case COMPONENT_REF:
3294 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3295 may be NULL when we're called to compare MEM_EXPRs. */
3296 if (!OP_SAME_WITH_NULL (0)
3297 || !OP_SAME (1))
3298 return 0;
3299 flags &= ~OEP_ADDRESS_OF;
3300 return OP_SAME_WITH_NULL (2);
3302 case BIT_FIELD_REF:
3303 if (!OP_SAME (0))
3304 return 0;
3305 flags &= ~OEP_ADDRESS_OF;
3306 return OP_SAME (1) && OP_SAME (2);
3308 default:
3309 return 0;
3312 case tcc_expression:
3313 switch (TREE_CODE (arg0))
3315 case ADDR_EXPR:
3316 /* Be sure we pass right ADDRESS_OF flag. */
3317 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3318 return operand_equal_p (TREE_OPERAND (arg0, 0),
3319 TREE_OPERAND (arg1, 0),
3320 flags | OEP_ADDRESS_OF);
3322 case TRUTH_NOT_EXPR:
3323 return OP_SAME (0);
3325 case TRUTH_ANDIF_EXPR:
3326 case TRUTH_ORIF_EXPR:
3327 return OP_SAME (0) && OP_SAME (1);
3329 case WIDEN_MULT_PLUS_EXPR:
3330 case WIDEN_MULT_MINUS_EXPR:
3331 if (!OP_SAME (2))
3332 return 0;
3333 /* The multiplcation operands are commutative. */
3334 /* FALLTHRU */
3336 case TRUTH_AND_EXPR:
3337 case TRUTH_OR_EXPR:
3338 case TRUTH_XOR_EXPR:
3339 if (OP_SAME (0) && OP_SAME (1))
3340 return 1;
3342 /* Otherwise take into account this is a commutative operation. */
3343 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3344 TREE_OPERAND (arg1, 1), flags)
3345 && operand_equal_p (TREE_OPERAND (arg0, 1),
3346 TREE_OPERAND (arg1, 0), flags));
3348 case COND_EXPR:
3349 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3350 return 0;
3351 flags &= ~OEP_ADDRESS_OF;
3352 return OP_SAME (0);
3354 case BIT_INSERT_EXPR:
3355 /* BIT_INSERT_EXPR has an implict operand as the type precision
3356 of op1. Need to check to make sure they are the same. */
3357 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3358 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3359 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3360 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3361 return false;
3362 /* FALLTHRU */
3364 case VEC_COND_EXPR:
3365 case DOT_PROD_EXPR:
3366 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3368 case MODIFY_EXPR:
3369 case INIT_EXPR:
3370 case COMPOUND_EXPR:
3371 case PREDECREMENT_EXPR:
3372 case PREINCREMENT_EXPR:
3373 case POSTDECREMENT_EXPR:
3374 case POSTINCREMENT_EXPR:
3375 if (flags & OEP_LEXICOGRAPHIC)
3376 return OP_SAME (0) && OP_SAME (1);
3377 return 0;
3379 case CLEANUP_POINT_EXPR:
3380 case EXPR_STMT:
3381 case SAVE_EXPR:
3382 if (flags & OEP_LEXICOGRAPHIC)
3383 return OP_SAME (0);
3384 return 0;
3386 default:
3387 return 0;
3390 case tcc_vl_exp:
3391 switch (TREE_CODE (arg0))
3393 case CALL_EXPR:
3394 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3395 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3396 /* If not both CALL_EXPRs are either internal or normal function
3397 functions, then they are not equal. */
3398 return 0;
3399 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3401 /* If the CALL_EXPRs call different internal functions, then they
3402 are not equal. */
3403 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3404 return 0;
3406 else
3408 /* If the CALL_EXPRs call different functions, then they are not
3409 equal. */
3410 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3411 flags))
3412 return 0;
3415 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3417 unsigned int cef = call_expr_flags (arg0);
3418 if (flags & OEP_PURE_SAME)
3419 cef &= ECF_CONST | ECF_PURE;
3420 else
3421 cef &= ECF_CONST;
3422 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3423 return 0;
3426 /* Now see if all the arguments are the same. */
3428 const_call_expr_arg_iterator iter0, iter1;
3429 const_tree a0, a1;
3430 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3431 a1 = first_const_call_expr_arg (arg1, &iter1);
3432 a0 && a1;
3433 a0 = next_const_call_expr_arg (&iter0),
3434 a1 = next_const_call_expr_arg (&iter1))
3435 if (! operand_equal_p (a0, a1, flags))
3436 return 0;
3438 /* If we get here and both argument lists are exhausted
3439 then the CALL_EXPRs are equal. */
3440 return ! (a0 || a1);
3442 default:
3443 return 0;
3446 case tcc_declaration:
3447 /* Consider __builtin_sqrt equal to sqrt. */
3448 return (TREE_CODE (arg0) == FUNCTION_DECL
3449 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3450 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3451 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3453 case tcc_exceptional:
3454 if (TREE_CODE (arg0) == CONSTRUCTOR)
3456 /* In GIMPLE constructors are used only to build vectors from
3457 elements. Individual elements in the constructor must be
3458 indexed in increasing order and form an initial sequence.
3460 We make no effort to compare constructors in generic.
3461 (see sem_variable::equals in ipa-icf which can do so for
3462 constants). */
3463 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3464 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3465 return 0;
3467 /* Be sure that vectors constructed have the same representation.
3468 We only tested element precision and modes to match.
3469 Vectors may be BLKmode and thus also check that the number of
3470 parts match. */
3471 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3472 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3473 return 0;
3475 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3476 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3477 unsigned int len = vec_safe_length (v0);
3479 if (len != vec_safe_length (v1))
3480 return 0;
3482 for (unsigned int i = 0; i < len; i++)
3484 constructor_elt *c0 = &(*v0)[i];
3485 constructor_elt *c1 = &(*v1)[i];
3487 if (!operand_equal_p (c0->value, c1->value, flags)
3488 /* In GIMPLE the indexes can be either NULL or matching i.
3489 Double check this so we won't get false
3490 positives for GENERIC. */
3491 || (c0->index
3492 && (TREE_CODE (c0->index) != INTEGER_CST
3493 || !compare_tree_int (c0->index, i)))
3494 || (c1->index
3495 && (TREE_CODE (c1->index) != INTEGER_CST
3496 || !compare_tree_int (c1->index, i))))
3497 return 0;
3499 return 1;
3501 else if (TREE_CODE (arg0) == STATEMENT_LIST
3502 && (flags & OEP_LEXICOGRAPHIC))
3504 /* Compare the STATEMENT_LISTs. */
3505 tree_stmt_iterator tsi1, tsi2;
3506 tree body1 = CONST_CAST_TREE (arg0);
3507 tree body2 = CONST_CAST_TREE (arg1);
3508 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3509 tsi_next (&tsi1), tsi_next (&tsi2))
3511 /* The lists don't have the same number of statements. */
3512 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3513 return 0;
3514 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3515 return 1;
3516 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3517 flags & (OEP_LEXICOGRAPHIC
3518 | OEP_NO_HASH_CHECK)))
3519 return 0;
3522 return 0;
3524 case tcc_statement:
3525 switch (TREE_CODE (arg0))
3527 case RETURN_EXPR:
3528 if (flags & OEP_LEXICOGRAPHIC)
3529 return OP_SAME_WITH_NULL (0);
3530 return 0;
3531 case DEBUG_BEGIN_STMT:
3532 if (flags & OEP_LEXICOGRAPHIC)
3533 return 1;
3534 return 0;
3535 default:
3536 return 0;
3539 default:
3540 return 0;
3543 #undef OP_SAME
3544 #undef OP_SAME_WITH_NULL
3547 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3548 with a different signedness or a narrower precision. */
3550 static bool
3551 operand_equal_for_comparison_p (tree arg0, tree arg1)
3553 if (operand_equal_p (arg0, arg1, 0))
3554 return true;
3556 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3557 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3558 return false;
3560 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3561 and see if the inner values are the same. This removes any
3562 signedness comparison, which doesn't matter here. */
3563 tree op0 = arg0;
3564 tree op1 = arg1;
3565 STRIP_NOPS (op0);
3566 STRIP_NOPS (op1);
3567 if (operand_equal_p (op0, op1, 0))
3568 return true;
3570 /* Discard a single widening conversion from ARG1 and see if the inner
3571 value is the same as ARG0. */
3572 if (CONVERT_EXPR_P (arg1)
3573 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3574 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3575 < TYPE_PRECISION (TREE_TYPE (arg1))
3576 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3577 return true;
3579 return false;
3582 /* See if ARG is an expression that is either a comparison or is performing
3583 arithmetic on comparisons. The comparisons must only be comparing
3584 two different values, which will be stored in *CVAL1 and *CVAL2; if
3585 they are nonzero it means that some operands have already been found.
3586 No variables may be used anywhere else in the expression except in the
3587 comparisons.
3589 If this is true, return 1. Otherwise, return zero. */
3591 static int
3592 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3594 enum tree_code code = TREE_CODE (arg);
3595 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3597 /* We can handle some of the tcc_expression cases here. */
3598 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3599 tclass = tcc_unary;
3600 else if (tclass == tcc_expression
3601 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3602 || code == COMPOUND_EXPR))
3603 tclass = tcc_binary;
3605 switch (tclass)
3607 case tcc_unary:
3608 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3610 case tcc_binary:
3611 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3612 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3614 case tcc_constant:
3615 return 1;
3617 case tcc_expression:
3618 if (code == COND_EXPR)
3619 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3620 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3621 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3622 return 0;
3624 case tcc_comparison:
3625 /* First see if we can handle the first operand, then the second. For
3626 the second operand, we know *CVAL1 can't be zero. It must be that
3627 one side of the comparison is each of the values; test for the
3628 case where this isn't true by failing if the two operands
3629 are the same. */
3631 if (operand_equal_p (TREE_OPERAND (arg, 0),
3632 TREE_OPERAND (arg, 1), 0))
3633 return 0;
3635 if (*cval1 == 0)
3636 *cval1 = TREE_OPERAND (arg, 0);
3637 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3639 else if (*cval2 == 0)
3640 *cval2 = TREE_OPERAND (arg, 0);
3641 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3643 else
3644 return 0;
3646 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3648 else if (*cval2 == 0)
3649 *cval2 = TREE_OPERAND (arg, 1);
3650 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3652 else
3653 return 0;
3655 return 1;
3657 default:
3658 return 0;
3662 /* ARG is a tree that is known to contain just arithmetic operations and
3663 comparisons. Evaluate the operations in the tree substituting NEW0 for
3664 any occurrence of OLD0 as an operand of a comparison and likewise for
3665 NEW1 and OLD1. */
3667 static tree
3668 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3669 tree old1, tree new1)
3671 tree type = TREE_TYPE (arg);
3672 enum tree_code code = TREE_CODE (arg);
3673 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3675 /* We can handle some of the tcc_expression cases here. */
3676 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3677 tclass = tcc_unary;
3678 else if (tclass == tcc_expression
3679 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3680 tclass = tcc_binary;
3682 switch (tclass)
3684 case tcc_unary:
3685 return fold_build1_loc (loc, code, type,
3686 eval_subst (loc, TREE_OPERAND (arg, 0),
3687 old0, new0, old1, new1));
3689 case tcc_binary:
3690 return fold_build2_loc (loc, code, type,
3691 eval_subst (loc, TREE_OPERAND (arg, 0),
3692 old0, new0, old1, new1),
3693 eval_subst (loc, TREE_OPERAND (arg, 1),
3694 old0, new0, old1, new1));
3696 case tcc_expression:
3697 switch (code)
3699 case SAVE_EXPR:
3700 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3701 old1, new1);
3703 case COMPOUND_EXPR:
3704 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3705 old1, new1);
3707 case COND_EXPR:
3708 return fold_build3_loc (loc, code, type,
3709 eval_subst (loc, TREE_OPERAND (arg, 0),
3710 old0, new0, old1, new1),
3711 eval_subst (loc, TREE_OPERAND (arg, 1),
3712 old0, new0, old1, new1),
3713 eval_subst (loc, TREE_OPERAND (arg, 2),
3714 old0, new0, old1, new1));
3715 default:
3716 break;
3718 /* Fall through - ??? */
3720 case tcc_comparison:
3722 tree arg0 = TREE_OPERAND (arg, 0);
3723 tree arg1 = TREE_OPERAND (arg, 1);
3725 /* We need to check both for exact equality and tree equality. The
3726 former will be true if the operand has a side-effect. In that
3727 case, we know the operand occurred exactly once. */
3729 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3730 arg0 = new0;
3731 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3732 arg0 = new1;
3734 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3735 arg1 = new0;
3736 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3737 arg1 = new1;
3739 return fold_build2_loc (loc, code, type, arg0, arg1);
3742 default:
3743 return arg;
3747 /* Return a tree for the case when the result of an expression is RESULT
3748 converted to TYPE and OMITTED was previously an operand of the expression
3749 but is now not needed (e.g., we folded OMITTED * 0).
3751 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3752 the conversion of RESULT to TYPE. */
3754 tree
3755 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3757 tree t = fold_convert_loc (loc, type, result);
3759 /* If the resulting operand is an empty statement, just return the omitted
3760 statement casted to void. */
3761 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3762 return build1_loc (loc, NOP_EXPR, void_type_node,
3763 fold_ignored_result (omitted));
3765 if (TREE_SIDE_EFFECTS (omitted))
3766 return build2_loc (loc, COMPOUND_EXPR, type,
3767 fold_ignored_result (omitted), t);
3769 return non_lvalue_loc (loc, t);
3772 /* Return a tree for the case when the result of an expression is RESULT
3773 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3774 of the expression but are now not needed.
3776 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3777 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3778 evaluated before OMITTED2. Otherwise, if neither has side effects,
3779 just do the conversion of RESULT to TYPE. */
3781 tree
3782 omit_two_operands_loc (location_t loc, tree type, tree result,
3783 tree omitted1, tree omitted2)
3785 tree t = fold_convert_loc (loc, type, result);
3787 if (TREE_SIDE_EFFECTS (omitted2))
3788 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3789 if (TREE_SIDE_EFFECTS (omitted1))
3790 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3792 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3796 /* Return a simplified tree node for the truth-negation of ARG. This
3797 never alters ARG itself. We assume that ARG is an operation that
3798 returns a truth value (0 or 1).
3800 FIXME: one would think we would fold the result, but it causes
3801 problems with the dominator optimizer. */
3803 static tree
3804 fold_truth_not_expr (location_t loc, tree arg)
3806 tree type = TREE_TYPE (arg);
3807 enum tree_code code = TREE_CODE (arg);
3808 location_t loc1, loc2;
3810 /* If this is a comparison, we can simply invert it, except for
3811 floating-point non-equality comparisons, in which case we just
3812 enclose a TRUTH_NOT_EXPR around what we have. */
3814 if (TREE_CODE_CLASS (code) == tcc_comparison)
3816 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3817 if (FLOAT_TYPE_P (op_type)
3818 && flag_trapping_math
3819 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3820 && code != NE_EXPR && code != EQ_EXPR)
3821 return NULL_TREE;
3823 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3824 if (code == ERROR_MARK)
3825 return NULL_TREE;
3827 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3828 TREE_OPERAND (arg, 1));
3829 if (TREE_NO_WARNING (arg))
3830 TREE_NO_WARNING (ret) = 1;
3831 return ret;
3834 switch (code)
3836 case INTEGER_CST:
3837 return constant_boolean_node (integer_zerop (arg), type);
3839 case TRUTH_AND_EXPR:
3840 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3841 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3842 return build2_loc (loc, TRUTH_OR_EXPR, type,
3843 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3844 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3846 case TRUTH_OR_EXPR:
3847 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3848 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3849 return build2_loc (loc, TRUTH_AND_EXPR, type,
3850 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3851 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3853 case TRUTH_XOR_EXPR:
3854 /* Here we can invert either operand. We invert the first operand
3855 unless the second operand is a TRUTH_NOT_EXPR in which case our
3856 result is the XOR of the first operand with the inside of the
3857 negation of the second operand. */
3859 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3860 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3861 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3862 else
3863 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3864 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3865 TREE_OPERAND (arg, 1));
3867 case TRUTH_ANDIF_EXPR:
3868 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3869 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3870 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3871 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3872 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3874 case TRUTH_ORIF_EXPR:
3875 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3876 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3877 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3878 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3879 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3881 case TRUTH_NOT_EXPR:
3882 return TREE_OPERAND (arg, 0);
3884 case COND_EXPR:
3886 tree arg1 = TREE_OPERAND (arg, 1);
3887 tree arg2 = TREE_OPERAND (arg, 2);
3889 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3890 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3892 /* A COND_EXPR may have a throw as one operand, which
3893 then has void type. Just leave void operands
3894 as they are. */
3895 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3896 VOID_TYPE_P (TREE_TYPE (arg1))
3897 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3898 VOID_TYPE_P (TREE_TYPE (arg2))
3899 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3902 case COMPOUND_EXPR:
3903 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3904 return build2_loc (loc, COMPOUND_EXPR, type,
3905 TREE_OPERAND (arg, 0),
3906 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3908 case NON_LVALUE_EXPR:
3909 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3910 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3912 CASE_CONVERT:
3913 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3914 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3916 /* fall through */
3918 case FLOAT_EXPR:
3919 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3920 return build1_loc (loc, TREE_CODE (arg), type,
3921 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3923 case BIT_AND_EXPR:
3924 if (!integer_onep (TREE_OPERAND (arg, 1)))
3925 return NULL_TREE;
3926 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3928 case SAVE_EXPR:
3929 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3931 case CLEANUP_POINT_EXPR:
3932 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3933 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3934 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3936 default:
3937 return NULL_TREE;
3941 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3942 assume that ARG is an operation that returns a truth value (0 or 1
3943 for scalars, 0 or -1 for vectors). Return the folded expression if
3944 folding is successful. Otherwise, return NULL_TREE. */
3946 static tree
3947 fold_invert_truthvalue (location_t loc, tree arg)
3949 tree type = TREE_TYPE (arg);
3950 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3951 ? BIT_NOT_EXPR
3952 : TRUTH_NOT_EXPR,
3953 type, arg);
3956 /* Return a simplified tree node for the truth-negation of ARG. This
3957 never alters ARG itself. We assume that ARG is an operation that
3958 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3960 tree
3961 invert_truthvalue_loc (location_t loc, tree arg)
3963 if (TREE_CODE (arg) == ERROR_MARK)
3964 return arg;
3966 tree type = TREE_TYPE (arg);
3967 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3968 ? BIT_NOT_EXPR
3969 : TRUTH_NOT_EXPR,
3970 type, arg);
3973 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3974 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3975 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3976 is the original memory reference used to preserve the alias set of
3977 the access. */
3979 static tree
3980 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3981 HOST_WIDE_INT bitsize, poly_int64 bitpos,
3982 int unsignedp, int reversep)
3984 tree result, bftype;
3986 /* Attempt not to lose the access path if possible. */
3987 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3989 tree ninner = TREE_OPERAND (orig_inner, 0);
3990 machine_mode nmode;
3991 poly_int64 nbitsize, nbitpos;
3992 tree noffset;
3993 int nunsignedp, nreversep, nvolatilep = 0;
3994 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3995 &noffset, &nmode, &nunsignedp,
3996 &nreversep, &nvolatilep);
3997 if (base == inner
3998 && noffset == NULL_TREE
3999 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4000 && !reversep
4001 && !nreversep
4002 && !nvolatilep)
4004 inner = ninner;
4005 bitpos -= nbitpos;
4009 alias_set_type iset = get_alias_set (orig_inner);
4010 if (iset == 0 && get_alias_set (inner) != iset)
4011 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4012 build_fold_addr_expr (inner),
4013 build_int_cst (ptr_type_node, 0));
4015 if (known_eq (bitpos, 0) && !reversep)
4017 tree size = TYPE_SIZE (TREE_TYPE (inner));
4018 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4019 || POINTER_TYPE_P (TREE_TYPE (inner)))
4020 && tree_fits_shwi_p (size)
4021 && tree_to_shwi (size) == bitsize)
4022 return fold_convert_loc (loc, type, inner);
4025 bftype = type;
4026 if (TYPE_PRECISION (bftype) != bitsize
4027 || TYPE_UNSIGNED (bftype) == !unsignedp)
4028 bftype = build_nonstandard_integer_type (bitsize, 0);
4030 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4031 bitsize_int (bitsize), bitsize_int (bitpos));
4032 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4034 if (bftype != type)
4035 result = fold_convert_loc (loc, type, result);
4037 return result;
4040 /* Optimize a bit-field compare.
4042 There are two cases: First is a compare against a constant and the
4043 second is a comparison of two items where the fields are at the same
4044 bit position relative to the start of a chunk (byte, halfword, word)
4045 large enough to contain it. In these cases we can avoid the shift
4046 implicit in bitfield extractions.
4048 For constants, we emit a compare of the shifted constant with the
4049 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4050 compared. For two fields at the same position, we do the ANDs with the
4051 similar mask and compare the result of the ANDs.
4053 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4054 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4055 are the left and right operands of the comparison, respectively.
4057 If the optimization described above can be done, we return the resulting
4058 tree. Otherwise we return zero. */
4060 static tree
4061 optimize_bit_field_compare (location_t loc, enum tree_code code,
4062 tree compare_type, tree lhs, tree rhs)
4064 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4065 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4066 tree type = TREE_TYPE (lhs);
4067 tree unsigned_type;
4068 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4069 machine_mode lmode, rmode;
4070 scalar_int_mode nmode;
4071 int lunsignedp, runsignedp;
4072 int lreversep, rreversep;
4073 int lvolatilep = 0, rvolatilep = 0;
4074 tree linner, rinner = NULL_TREE;
4075 tree mask;
4076 tree offset;
4078 /* Get all the information about the extractions being done. If the bit size
4079 is the same as the size of the underlying object, we aren't doing an
4080 extraction at all and so can do nothing. We also don't want to
4081 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4082 then will no longer be able to replace it. */
4083 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4084 &lunsignedp, &lreversep, &lvolatilep);
4085 if (linner == lhs
4086 || !known_size_p (plbitsize)
4087 || !plbitsize.is_constant (&lbitsize)
4088 || !plbitpos.is_constant (&lbitpos)
4089 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4090 || offset != 0
4091 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4092 || lvolatilep)
4093 return 0;
4095 if (const_p)
4096 rreversep = lreversep;
4097 else
4099 /* If this is not a constant, we can only do something if bit positions,
4100 sizes, signedness and storage order are the same. */
4101 rinner
4102 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4103 &runsignedp, &rreversep, &rvolatilep);
4105 if (rinner == rhs
4106 || maybe_ne (lbitpos, rbitpos)
4107 || maybe_ne (lbitsize, rbitsize)
4108 || lunsignedp != runsignedp
4109 || lreversep != rreversep
4110 || offset != 0
4111 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4112 || rvolatilep)
4113 return 0;
4116 /* Honor the C++ memory model and mimic what RTL expansion does. */
4117 poly_uint64 bitstart = 0;
4118 poly_uint64 bitend = 0;
4119 if (TREE_CODE (lhs) == COMPONENT_REF)
4121 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4122 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4123 return 0;
4126 /* See if we can find a mode to refer to this field. We should be able to,
4127 but fail if we can't. */
4128 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4129 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4130 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4131 TYPE_ALIGN (TREE_TYPE (rinner))),
4132 BITS_PER_WORD, false, &nmode))
4133 return 0;
4135 /* Set signed and unsigned types of the precision of this mode for the
4136 shifts below. */
4137 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4139 /* Compute the bit position and size for the new reference and our offset
4140 within it. If the new reference is the same size as the original, we
4141 won't optimize anything, so return zero. */
4142 nbitsize = GET_MODE_BITSIZE (nmode);
4143 nbitpos = lbitpos & ~ (nbitsize - 1);
4144 lbitpos -= nbitpos;
4145 if (nbitsize == lbitsize)
4146 return 0;
4148 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4149 lbitpos = nbitsize - lbitsize - lbitpos;
4151 /* Make the mask to be used against the extracted field. */
4152 mask = build_int_cst_type (unsigned_type, -1);
4153 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4154 mask = const_binop (RSHIFT_EXPR, mask,
4155 size_int (nbitsize - lbitsize - lbitpos));
4157 if (! const_p)
4159 if (nbitpos < 0)
4160 return 0;
4162 /* If not comparing with constant, just rework the comparison
4163 and return. */
4164 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4165 nbitsize, nbitpos, 1, lreversep);
4166 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4167 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4168 nbitsize, nbitpos, 1, rreversep);
4169 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4170 return fold_build2_loc (loc, code, compare_type, t1, t2);
4173 /* Otherwise, we are handling the constant case. See if the constant is too
4174 big for the field. Warn and return a tree for 0 (false) if so. We do
4175 this not only for its own sake, but to avoid having to test for this
4176 error case below. If we didn't, we might generate wrong code.
4178 For unsigned fields, the constant shifted right by the field length should
4179 be all zero. For signed fields, the high-order bits should agree with
4180 the sign bit. */
4182 if (lunsignedp)
4184 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4186 warning (0, "comparison is always %d due to width of bit-field",
4187 code == NE_EXPR);
4188 return constant_boolean_node (code == NE_EXPR, compare_type);
4191 else
4193 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4194 if (tem != 0 && tem != -1)
4196 warning (0, "comparison is always %d due to width of bit-field",
4197 code == NE_EXPR);
4198 return constant_boolean_node (code == NE_EXPR, compare_type);
4202 if (nbitpos < 0)
4203 return 0;
4205 /* Single-bit compares should always be against zero. */
4206 if (lbitsize == 1 && ! integer_zerop (rhs))
4208 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4209 rhs = build_int_cst (type, 0);
4212 /* Make a new bitfield reference, shift the constant over the
4213 appropriate number of bits and mask it with the computed mask
4214 (in case this was a signed field). If we changed it, make a new one. */
4215 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4216 nbitsize, nbitpos, 1, lreversep);
4218 rhs = const_binop (BIT_AND_EXPR,
4219 const_binop (LSHIFT_EXPR,
4220 fold_convert_loc (loc, unsigned_type, rhs),
4221 size_int (lbitpos)),
4222 mask);
4224 lhs = build2_loc (loc, code, compare_type,
4225 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4226 return lhs;
4229 /* Subroutine for fold_truth_andor_1: decode a field reference.
4231 If EXP is a comparison reference, we return the innermost reference.
4233 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4234 set to the starting bit number.
4236 If the innermost field can be completely contained in a mode-sized
4237 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4239 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4240 otherwise it is not changed.
4242 *PUNSIGNEDP is set to the signedness of the field.
4244 *PREVERSEP is set to the storage order of the field.
4246 *PMASK is set to the mask used. This is either contained in a
4247 BIT_AND_EXPR or derived from the width of the field.
4249 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4251 Return 0 if this is not a component reference or is one that we can't
4252 do anything with. */
4254 static tree
4255 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4256 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4257 int *punsignedp, int *preversep, int *pvolatilep,
4258 tree *pmask, tree *pand_mask)
4260 tree exp = *exp_;
4261 tree outer_type = 0;
4262 tree and_mask = 0;
4263 tree mask, inner, offset;
4264 tree unsigned_type;
4265 unsigned int precision;
4267 /* All the optimizations using this function assume integer fields.
4268 There are problems with FP fields since the type_for_size call
4269 below can fail for, e.g., XFmode. */
4270 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4271 return 0;
4273 /* We are interested in the bare arrangement of bits, so strip everything
4274 that doesn't affect the machine mode. However, record the type of the
4275 outermost expression if it may matter below. */
4276 if (CONVERT_EXPR_P (exp)
4277 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4278 outer_type = TREE_TYPE (exp);
4279 STRIP_NOPS (exp);
4281 if (TREE_CODE (exp) == BIT_AND_EXPR)
4283 and_mask = TREE_OPERAND (exp, 1);
4284 exp = TREE_OPERAND (exp, 0);
4285 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4286 if (TREE_CODE (and_mask) != INTEGER_CST)
4287 return 0;
4290 poly_int64 poly_bitsize, poly_bitpos;
4291 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4292 pmode, punsignedp, preversep, pvolatilep);
4293 if ((inner == exp && and_mask == 0)
4294 || !poly_bitsize.is_constant (pbitsize)
4295 || !poly_bitpos.is_constant (pbitpos)
4296 || *pbitsize < 0
4297 || offset != 0
4298 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4299 /* Reject out-of-bound accesses (PR79731). */
4300 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4301 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4302 *pbitpos + *pbitsize) < 0))
4303 return 0;
4305 *exp_ = exp;
4307 /* If the number of bits in the reference is the same as the bitsize of
4308 the outer type, then the outer type gives the signedness. Otherwise
4309 (in case of a small bitfield) the signedness is unchanged. */
4310 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4311 *punsignedp = TYPE_UNSIGNED (outer_type);
4313 /* Compute the mask to access the bitfield. */
4314 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4315 precision = TYPE_PRECISION (unsigned_type);
4317 mask = build_int_cst_type (unsigned_type, -1);
4319 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4320 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4322 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4323 if (and_mask != 0)
4324 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4325 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4327 *pmask = mask;
4328 *pand_mask = and_mask;
4329 return inner;
4332 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4333 bit positions and MASK is SIGNED. */
4335 static int
4336 all_ones_mask_p (const_tree mask, unsigned int size)
4338 tree type = TREE_TYPE (mask);
4339 unsigned int precision = TYPE_PRECISION (type);
4341 /* If this function returns true when the type of the mask is
4342 UNSIGNED, then there will be errors. In particular see
4343 gcc.c-torture/execute/990326-1.c. There does not appear to be
4344 any documentation paper trail as to why this is so. But the pre
4345 wide-int worked with that restriction and it has been preserved
4346 here. */
4347 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4348 return false;
4350 return wi::mask (size, false, precision) == wi::to_wide (mask);
4353 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4354 represents the sign bit of EXP's type. If EXP represents a sign
4355 or zero extension, also test VAL against the unextended type.
4356 The return value is the (sub)expression whose sign bit is VAL,
4357 or NULL_TREE otherwise. */
4359 tree
4360 sign_bit_p (tree exp, const_tree val)
4362 int width;
4363 tree t;
4365 /* Tree EXP must have an integral type. */
4366 t = TREE_TYPE (exp);
4367 if (! INTEGRAL_TYPE_P (t))
4368 return NULL_TREE;
4370 /* Tree VAL must be an integer constant. */
4371 if (TREE_CODE (val) != INTEGER_CST
4372 || TREE_OVERFLOW (val))
4373 return NULL_TREE;
4375 width = TYPE_PRECISION (t);
4376 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4377 return exp;
4379 /* Handle extension from a narrower type. */
4380 if (TREE_CODE (exp) == NOP_EXPR
4381 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4382 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4384 return NULL_TREE;
4387 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4388 to be evaluated unconditionally. */
4390 static int
4391 simple_operand_p (const_tree exp)
4393 /* Strip any conversions that don't change the machine mode. */
4394 STRIP_NOPS (exp);
4396 return (CONSTANT_CLASS_P (exp)
4397 || TREE_CODE (exp) == SSA_NAME
4398 || (DECL_P (exp)
4399 && ! TREE_ADDRESSABLE (exp)
4400 && ! TREE_THIS_VOLATILE (exp)
4401 && ! DECL_NONLOCAL (exp)
4402 /* Don't regard global variables as simple. They may be
4403 allocated in ways unknown to the compiler (shared memory,
4404 #pragma weak, etc). */
4405 && ! TREE_PUBLIC (exp)
4406 && ! DECL_EXTERNAL (exp)
4407 /* Weakrefs are not safe to be read, since they can be NULL.
4408 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4409 have DECL_WEAK flag set. */
4410 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4411 /* Loading a static variable is unduly expensive, but global
4412 registers aren't expensive. */
4413 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4416 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4417 to be evaluated unconditionally.
4418 I addition to simple_operand_p, we assume that comparisons, conversions,
4419 and logic-not operations are simple, if their operands are simple, too. */
4421 static bool
4422 simple_operand_p_2 (tree exp)
4424 enum tree_code code;
4426 if (TREE_SIDE_EFFECTS (exp)
4427 || tree_could_trap_p (exp))
4428 return false;
4430 while (CONVERT_EXPR_P (exp))
4431 exp = TREE_OPERAND (exp, 0);
4433 code = TREE_CODE (exp);
4435 if (TREE_CODE_CLASS (code) == tcc_comparison)
4436 return (simple_operand_p (TREE_OPERAND (exp, 0))
4437 && simple_operand_p (TREE_OPERAND (exp, 1)));
4439 if (code == TRUTH_NOT_EXPR)
4440 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4442 return simple_operand_p (exp);
4446 /* The following functions are subroutines to fold_range_test and allow it to
4447 try to change a logical combination of comparisons into a range test.
4449 For example, both
4450 X == 2 || X == 3 || X == 4 || X == 5
4452 X >= 2 && X <= 5
4453 are converted to
4454 (unsigned) (X - 2) <= 3
4456 We describe each set of comparisons as being either inside or outside
4457 a range, using a variable named like IN_P, and then describe the
4458 range with a lower and upper bound. If one of the bounds is omitted,
4459 it represents either the highest or lowest value of the type.
4461 In the comments below, we represent a range by two numbers in brackets
4462 preceded by a "+" to designate being inside that range, or a "-" to
4463 designate being outside that range, so the condition can be inverted by
4464 flipping the prefix. An omitted bound is represented by a "-". For
4465 example, "- [-, 10]" means being outside the range starting at the lowest
4466 possible value and ending at 10, in other words, being greater than 10.
4467 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4468 always false.
4470 We set up things so that the missing bounds are handled in a consistent
4471 manner so neither a missing bound nor "true" and "false" need to be
4472 handled using a special case. */
4474 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4475 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4476 and UPPER1_P are nonzero if the respective argument is an upper bound
4477 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4478 must be specified for a comparison. ARG1 will be converted to ARG0's
4479 type if both are specified. */
4481 static tree
4482 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4483 tree arg1, int upper1_p)
4485 tree tem;
4486 int result;
4487 int sgn0, sgn1;
4489 /* If neither arg represents infinity, do the normal operation.
4490 Else, if not a comparison, return infinity. Else handle the special
4491 comparison rules. Note that most of the cases below won't occur, but
4492 are handled for consistency. */
4494 if (arg0 != 0 && arg1 != 0)
4496 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4497 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4498 STRIP_NOPS (tem);
4499 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4502 if (TREE_CODE_CLASS (code) != tcc_comparison)
4503 return 0;
4505 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4506 for neither. In real maths, we cannot assume open ended ranges are
4507 the same. But, this is computer arithmetic, where numbers are finite.
4508 We can therefore make the transformation of any unbounded range with
4509 the value Z, Z being greater than any representable number. This permits
4510 us to treat unbounded ranges as equal. */
4511 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4512 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4513 switch (code)
4515 case EQ_EXPR:
4516 result = sgn0 == sgn1;
4517 break;
4518 case NE_EXPR:
4519 result = sgn0 != sgn1;
4520 break;
4521 case LT_EXPR:
4522 result = sgn0 < sgn1;
4523 break;
4524 case LE_EXPR:
4525 result = sgn0 <= sgn1;
4526 break;
4527 case GT_EXPR:
4528 result = sgn0 > sgn1;
4529 break;
4530 case GE_EXPR:
4531 result = sgn0 >= sgn1;
4532 break;
4533 default:
4534 gcc_unreachable ();
4537 return constant_boolean_node (result, type);
4540 /* Helper routine for make_range. Perform one step for it, return
4541 new expression if the loop should continue or NULL_TREE if it should
4542 stop. */
4544 tree
4545 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4546 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4547 bool *strict_overflow_p)
4549 tree arg0_type = TREE_TYPE (arg0);
4550 tree n_low, n_high, low = *p_low, high = *p_high;
4551 int in_p = *p_in_p, n_in_p;
4553 switch (code)
4555 case TRUTH_NOT_EXPR:
4556 /* We can only do something if the range is testing for zero. */
4557 if (low == NULL_TREE || high == NULL_TREE
4558 || ! integer_zerop (low) || ! integer_zerop (high))
4559 return NULL_TREE;
4560 *p_in_p = ! in_p;
4561 return arg0;
4563 case EQ_EXPR: case NE_EXPR:
4564 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4565 /* We can only do something if the range is testing for zero
4566 and if the second operand is an integer constant. Note that
4567 saying something is "in" the range we make is done by
4568 complementing IN_P since it will set in the initial case of
4569 being not equal to zero; "out" is leaving it alone. */
4570 if (low == NULL_TREE || high == NULL_TREE
4571 || ! integer_zerop (low) || ! integer_zerop (high)
4572 || TREE_CODE (arg1) != INTEGER_CST)
4573 return NULL_TREE;
4575 switch (code)
4577 case NE_EXPR: /* - [c, c] */
4578 low = high = arg1;
4579 break;
4580 case EQ_EXPR: /* + [c, c] */
4581 in_p = ! in_p, low = high = arg1;
4582 break;
4583 case GT_EXPR: /* - [-, c] */
4584 low = 0, high = arg1;
4585 break;
4586 case GE_EXPR: /* + [c, -] */
4587 in_p = ! in_p, low = arg1, high = 0;
4588 break;
4589 case LT_EXPR: /* - [c, -] */
4590 low = arg1, high = 0;
4591 break;
4592 case LE_EXPR: /* + [-, c] */
4593 in_p = ! in_p, low = 0, high = arg1;
4594 break;
4595 default:
4596 gcc_unreachable ();
4599 /* If this is an unsigned comparison, we also know that EXP is
4600 greater than or equal to zero. We base the range tests we make
4601 on that fact, so we record it here so we can parse existing
4602 range tests. We test arg0_type since often the return type
4603 of, e.g. EQ_EXPR, is boolean. */
4604 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4606 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4607 in_p, low, high, 1,
4608 build_int_cst (arg0_type, 0),
4609 NULL_TREE))
4610 return NULL_TREE;
4612 in_p = n_in_p, low = n_low, high = n_high;
4614 /* If the high bound is missing, but we have a nonzero low
4615 bound, reverse the range so it goes from zero to the low bound
4616 minus 1. */
4617 if (high == 0 && low && ! integer_zerop (low))
4619 in_p = ! in_p;
4620 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4621 build_int_cst (TREE_TYPE (low), 1), 0);
4622 low = build_int_cst (arg0_type, 0);
4626 *p_low = low;
4627 *p_high = high;
4628 *p_in_p = in_p;
4629 return arg0;
4631 case NEGATE_EXPR:
4632 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4633 low and high are non-NULL, then normalize will DTRT. */
4634 if (!TYPE_UNSIGNED (arg0_type)
4635 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4637 if (low == NULL_TREE)
4638 low = TYPE_MIN_VALUE (arg0_type);
4639 if (high == NULL_TREE)
4640 high = TYPE_MAX_VALUE (arg0_type);
4643 /* (-x) IN [a,b] -> x in [-b, -a] */
4644 n_low = range_binop (MINUS_EXPR, exp_type,
4645 build_int_cst (exp_type, 0),
4646 0, high, 1);
4647 n_high = range_binop (MINUS_EXPR, exp_type,
4648 build_int_cst (exp_type, 0),
4649 0, low, 0);
4650 if (n_high != 0 && TREE_OVERFLOW (n_high))
4651 return NULL_TREE;
4652 goto normalize;
4654 case BIT_NOT_EXPR:
4655 /* ~ X -> -X - 1 */
4656 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4657 build_int_cst (exp_type, 1));
4659 case PLUS_EXPR:
4660 case MINUS_EXPR:
4661 if (TREE_CODE (arg1) != INTEGER_CST)
4662 return NULL_TREE;
4664 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4665 move a constant to the other side. */
4666 if (!TYPE_UNSIGNED (arg0_type)
4667 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4668 return NULL_TREE;
4670 /* If EXP is signed, any overflow in the computation is undefined,
4671 so we don't worry about it so long as our computations on
4672 the bounds don't overflow. For unsigned, overflow is defined
4673 and this is exactly the right thing. */
4674 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4675 arg0_type, low, 0, arg1, 0);
4676 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4677 arg0_type, high, 1, arg1, 0);
4678 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4679 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4680 return NULL_TREE;
4682 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4683 *strict_overflow_p = true;
4685 normalize:
4686 /* Check for an unsigned range which has wrapped around the maximum
4687 value thus making n_high < n_low, and normalize it. */
4688 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4690 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4691 build_int_cst (TREE_TYPE (n_high), 1), 0);
4692 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4693 build_int_cst (TREE_TYPE (n_low), 1), 0);
4695 /* If the range is of the form +/- [ x+1, x ], we won't
4696 be able to normalize it. But then, it represents the
4697 whole range or the empty set, so make it
4698 +/- [ -, - ]. */
4699 if (tree_int_cst_equal (n_low, low)
4700 && tree_int_cst_equal (n_high, high))
4701 low = high = 0;
4702 else
4703 in_p = ! in_p;
4705 else
4706 low = n_low, high = n_high;
4708 *p_low = low;
4709 *p_high = high;
4710 *p_in_p = in_p;
4711 return arg0;
4713 CASE_CONVERT:
4714 case NON_LVALUE_EXPR:
4715 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4716 return NULL_TREE;
4718 if (! INTEGRAL_TYPE_P (arg0_type)
4719 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4720 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4721 return NULL_TREE;
4723 n_low = low, n_high = high;
4725 if (n_low != 0)
4726 n_low = fold_convert_loc (loc, arg0_type, n_low);
4728 if (n_high != 0)
4729 n_high = fold_convert_loc (loc, arg0_type, n_high);
4731 /* If we're converting arg0 from an unsigned type, to exp,
4732 a signed type, we will be doing the comparison as unsigned.
4733 The tests above have already verified that LOW and HIGH
4734 are both positive.
4736 So we have to ensure that we will handle large unsigned
4737 values the same way that the current signed bounds treat
4738 negative values. */
4740 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4742 tree high_positive;
4743 tree equiv_type;
4744 /* For fixed-point modes, we need to pass the saturating flag
4745 as the 2nd parameter. */
4746 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4747 equiv_type
4748 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4749 TYPE_SATURATING (arg0_type));
4750 else
4751 equiv_type
4752 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4754 /* A range without an upper bound is, naturally, unbounded.
4755 Since convert would have cropped a very large value, use
4756 the max value for the destination type. */
4757 high_positive
4758 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4759 : TYPE_MAX_VALUE (arg0_type);
4761 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4762 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4763 fold_convert_loc (loc, arg0_type,
4764 high_positive),
4765 build_int_cst (arg0_type, 1));
4767 /* If the low bound is specified, "and" the range with the
4768 range for which the original unsigned value will be
4769 positive. */
4770 if (low != 0)
4772 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4773 1, fold_convert_loc (loc, arg0_type,
4774 integer_zero_node),
4775 high_positive))
4776 return NULL_TREE;
4778 in_p = (n_in_p == in_p);
4780 else
4782 /* Otherwise, "or" the range with the range of the input
4783 that will be interpreted as negative. */
4784 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4785 1, fold_convert_loc (loc, arg0_type,
4786 integer_zero_node),
4787 high_positive))
4788 return NULL_TREE;
4790 in_p = (in_p != n_in_p);
4794 *p_low = n_low;
4795 *p_high = n_high;
4796 *p_in_p = in_p;
4797 return arg0;
4799 default:
4800 return NULL_TREE;
4804 /* Given EXP, a logical expression, set the range it is testing into
4805 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4806 actually being tested. *PLOW and *PHIGH will be made of the same
4807 type as the returned expression. If EXP is not a comparison, we
4808 will most likely not be returning a useful value and range. Set
4809 *STRICT_OVERFLOW_P to true if the return value is only valid
4810 because signed overflow is undefined; otherwise, do not change
4811 *STRICT_OVERFLOW_P. */
4813 tree
4814 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4815 bool *strict_overflow_p)
4817 enum tree_code code;
4818 tree arg0, arg1 = NULL_TREE;
4819 tree exp_type, nexp;
4820 int in_p;
4821 tree low, high;
4822 location_t loc = EXPR_LOCATION (exp);
4824 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4825 and see if we can refine the range. Some of the cases below may not
4826 happen, but it doesn't seem worth worrying about this. We "continue"
4827 the outer loop when we've changed something; otherwise we "break"
4828 the switch, which will "break" the while. */
4830 in_p = 0;
4831 low = high = build_int_cst (TREE_TYPE (exp), 0);
4833 while (1)
4835 code = TREE_CODE (exp);
4836 exp_type = TREE_TYPE (exp);
4837 arg0 = NULL_TREE;
4839 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4841 if (TREE_OPERAND_LENGTH (exp) > 0)
4842 arg0 = TREE_OPERAND (exp, 0);
4843 if (TREE_CODE_CLASS (code) == tcc_binary
4844 || TREE_CODE_CLASS (code) == tcc_comparison
4845 || (TREE_CODE_CLASS (code) == tcc_expression
4846 && TREE_OPERAND_LENGTH (exp) > 1))
4847 arg1 = TREE_OPERAND (exp, 1);
4849 if (arg0 == NULL_TREE)
4850 break;
4852 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4853 &high, &in_p, strict_overflow_p);
4854 if (nexp == NULL_TREE)
4855 break;
4856 exp = nexp;
4859 /* If EXP is a constant, we can evaluate whether this is true or false. */
4860 if (TREE_CODE (exp) == INTEGER_CST)
4862 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4863 exp, 0, low, 0))
4864 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4865 exp, 1, high, 1)));
4866 low = high = 0;
4867 exp = 0;
4870 *pin_p = in_p, *plow = low, *phigh = high;
4871 return exp;
4874 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4875 a bitwise check i.e. when
4876 LOW == 0xXX...X00...0
4877 HIGH == 0xXX...X11...1
4878 Return corresponding mask in MASK and stem in VALUE. */
4880 static bool
4881 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4882 tree *value)
4884 if (TREE_CODE (low) != INTEGER_CST
4885 || TREE_CODE (high) != INTEGER_CST)
4886 return false;
4888 unsigned prec = TYPE_PRECISION (type);
4889 wide_int lo = wi::to_wide (low, prec);
4890 wide_int hi = wi::to_wide (high, prec);
4892 wide_int end_mask = lo ^ hi;
4893 if ((end_mask & (end_mask + 1)) != 0
4894 || (lo & end_mask) != 0)
4895 return false;
4897 wide_int stem_mask = ~end_mask;
4898 wide_int stem = lo & stem_mask;
4899 if (stem != (hi & stem_mask))
4900 return false;
4902 *mask = wide_int_to_tree (type, stem_mask);
4903 *value = wide_int_to_tree (type, stem);
4905 return true;
4908 /* Helper routine for build_range_check and match.pd. Return the type to
4909 perform the check or NULL if it shouldn't be optimized. */
4911 tree
4912 range_check_type (tree etype)
4914 /* First make sure that arithmetics in this type is valid, then make sure
4915 that it wraps around. */
4916 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4917 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4918 TYPE_UNSIGNED (etype));
4920 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4922 tree utype, minv, maxv;
4924 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4925 for the type in question, as we rely on this here. */
4926 utype = unsigned_type_for (etype);
4927 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4928 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4929 build_int_cst (TREE_TYPE (maxv), 1), 1);
4930 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4932 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4933 minv, 1, maxv, 1)))
4934 etype = utype;
4935 else
4936 return NULL_TREE;
4938 return etype;
4941 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4942 type, TYPE, return an expression to test if EXP is in (or out of, depending
4943 on IN_P) the range. Return 0 if the test couldn't be created. */
4945 tree
4946 build_range_check (location_t loc, tree type, tree exp, int in_p,
4947 tree low, tree high)
4949 tree etype = TREE_TYPE (exp), mask, value;
4951 /* Disable this optimization for function pointer expressions
4952 on targets that require function pointer canonicalization. */
4953 if (targetm.have_canonicalize_funcptr_for_compare ()
4954 && TREE_CODE (etype) == POINTER_TYPE
4955 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4956 return NULL_TREE;
4958 if (! in_p)
4960 value = build_range_check (loc, type, exp, 1, low, high);
4961 if (value != 0)
4962 return invert_truthvalue_loc (loc, value);
4964 return 0;
4967 if (low == 0 && high == 0)
4968 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4970 if (low == 0)
4971 return fold_build2_loc (loc, LE_EXPR, type, exp,
4972 fold_convert_loc (loc, etype, high));
4974 if (high == 0)
4975 return fold_build2_loc (loc, GE_EXPR, type, exp,
4976 fold_convert_loc (loc, etype, low));
4978 if (operand_equal_p (low, high, 0))
4979 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4980 fold_convert_loc (loc, etype, low));
4982 if (TREE_CODE (exp) == BIT_AND_EXPR
4983 && maskable_range_p (low, high, etype, &mask, &value))
4984 return fold_build2_loc (loc, EQ_EXPR, type,
4985 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4986 exp, mask),
4987 value);
4989 if (integer_zerop (low))
4991 if (! TYPE_UNSIGNED (etype))
4993 etype = unsigned_type_for (etype);
4994 high = fold_convert_loc (loc, etype, high);
4995 exp = fold_convert_loc (loc, etype, exp);
4997 return build_range_check (loc, type, exp, 1, 0, high);
5000 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5001 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5003 int prec = TYPE_PRECISION (etype);
5005 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5007 if (TYPE_UNSIGNED (etype))
5009 tree signed_etype = signed_type_for (etype);
5010 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5011 etype
5012 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5013 else
5014 etype = signed_etype;
5015 exp = fold_convert_loc (loc, etype, exp);
5017 return fold_build2_loc (loc, GT_EXPR, type, exp,
5018 build_int_cst (etype, 0));
5022 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5023 This requires wrap-around arithmetics for the type of the expression. */
5024 etype = range_check_type (etype);
5025 if (etype == NULL_TREE)
5026 return NULL_TREE;
5028 if (POINTER_TYPE_P (etype))
5029 etype = unsigned_type_for (etype);
5031 high = fold_convert_loc (loc, etype, high);
5032 low = fold_convert_loc (loc, etype, low);
5033 exp = fold_convert_loc (loc, etype, exp);
5035 value = const_binop (MINUS_EXPR, high, low);
5037 if (value != 0 && !TREE_OVERFLOW (value))
5038 return build_range_check (loc, type,
5039 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5040 1, build_int_cst (etype, 0), value);
5042 return 0;
5045 /* Return the predecessor of VAL in its type, handling the infinite case. */
5047 static tree
5048 range_predecessor (tree val)
5050 tree type = TREE_TYPE (val);
5052 if (INTEGRAL_TYPE_P (type)
5053 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5054 return 0;
5055 else
5056 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5057 build_int_cst (TREE_TYPE (val), 1), 0);
5060 /* Return the successor of VAL in its type, handling the infinite case. */
5062 static tree
5063 range_successor (tree val)
5065 tree type = TREE_TYPE (val);
5067 if (INTEGRAL_TYPE_P (type)
5068 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5069 return 0;
5070 else
5071 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5072 build_int_cst (TREE_TYPE (val), 1), 0);
5075 /* Given two ranges, see if we can merge them into one. Return 1 if we
5076 can, 0 if we can't. Set the output range into the specified parameters. */
5078 bool
5079 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5080 tree high0, int in1_p, tree low1, tree high1)
5082 int no_overlap;
5083 int subset;
5084 int temp;
5085 tree tem;
5086 int in_p;
5087 tree low, high;
5088 int lowequal = ((low0 == 0 && low1 == 0)
5089 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5090 low0, 0, low1, 0)));
5091 int highequal = ((high0 == 0 && high1 == 0)
5092 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5093 high0, 1, high1, 1)));
5095 /* Make range 0 be the range that starts first, or ends last if they
5096 start at the same value. Swap them if it isn't. */
5097 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5098 low0, 0, low1, 0))
5099 || (lowequal
5100 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5101 high1, 1, high0, 1))))
5103 temp = in0_p, in0_p = in1_p, in1_p = temp;
5104 tem = low0, low0 = low1, low1 = tem;
5105 tem = high0, high0 = high1, high1 = tem;
5108 /* If the second range is != high1 where high1 is the type maximum of
5109 the type, try first merging with < high1 range. */
5110 if (low1
5111 && high1
5112 && TREE_CODE (low1) == INTEGER_CST
5113 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5114 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5115 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5116 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5117 && operand_equal_p (low1, high1, 0))
5119 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5120 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5121 !in1_p, NULL_TREE, range_predecessor (low1)))
5122 return true;
5123 /* Similarly for the second range != low1 where low1 is the type minimum
5124 of the type, try first merging with > low1 range. */
5125 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5126 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5127 !in1_p, range_successor (low1), NULL_TREE))
5128 return true;
5131 /* Now flag two cases, whether the ranges are disjoint or whether the
5132 second range is totally subsumed in the first. Note that the tests
5133 below are simplified by the ones above. */
5134 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5135 high0, 1, low1, 0));
5136 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5137 high1, 1, high0, 1));
5139 /* We now have four cases, depending on whether we are including or
5140 excluding the two ranges. */
5141 if (in0_p && in1_p)
5143 /* If they don't overlap, the result is false. If the second range
5144 is a subset it is the result. Otherwise, the range is from the start
5145 of the second to the end of the first. */
5146 if (no_overlap)
5147 in_p = 0, low = high = 0;
5148 else if (subset)
5149 in_p = 1, low = low1, high = high1;
5150 else
5151 in_p = 1, low = low1, high = high0;
5154 else if (in0_p && ! in1_p)
5156 /* If they don't overlap, the result is the first range. If they are
5157 equal, the result is false. If the second range is a subset of the
5158 first, and the ranges begin at the same place, we go from just after
5159 the end of the second range to the end of the first. If the second
5160 range is not a subset of the first, or if it is a subset and both
5161 ranges end at the same place, the range starts at the start of the
5162 first range and ends just before the second range.
5163 Otherwise, we can't describe this as a single range. */
5164 if (no_overlap)
5165 in_p = 1, low = low0, high = high0;
5166 else if (lowequal && highequal)
5167 in_p = 0, low = high = 0;
5168 else if (subset && lowequal)
5170 low = range_successor (high1);
5171 high = high0;
5172 in_p = 1;
5173 if (low == 0)
5175 /* We are in the weird situation where high0 > high1 but
5176 high1 has no successor. Punt. */
5177 return 0;
5180 else if (! subset || highequal)
5182 low = low0;
5183 high = range_predecessor (low1);
5184 in_p = 1;
5185 if (high == 0)
5187 /* low0 < low1 but low1 has no predecessor. Punt. */
5188 return 0;
5191 else
5192 return 0;
5195 else if (! in0_p && in1_p)
5197 /* If they don't overlap, the result is the second range. If the second
5198 is a subset of the first, the result is false. Otherwise,
5199 the range starts just after the first range and ends at the
5200 end of the second. */
5201 if (no_overlap)
5202 in_p = 1, low = low1, high = high1;
5203 else if (subset || highequal)
5204 in_p = 0, low = high = 0;
5205 else
5207 low = range_successor (high0);
5208 high = high1;
5209 in_p = 1;
5210 if (low == 0)
5212 /* high1 > high0 but high0 has no successor. Punt. */
5213 return 0;
5218 else
5220 /* The case where we are excluding both ranges. Here the complex case
5221 is if they don't overlap. In that case, the only time we have a
5222 range is if they are adjacent. If the second is a subset of the
5223 first, the result is the first. Otherwise, the range to exclude
5224 starts at the beginning of the first range and ends at the end of the
5225 second. */
5226 if (no_overlap)
5228 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5229 range_successor (high0),
5230 1, low1, 0)))
5231 in_p = 0, low = low0, high = high1;
5232 else
5234 /* Canonicalize - [min, x] into - [-, x]. */
5235 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5236 switch (TREE_CODE (TREE_TYPE (low0)))
5238 case ENUMERAL_TYPE:
5239 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5240 GET_MODE_BITSIZE
5241 (TYPE_MODE (TREE_TYPE (low0)))))
5242 break;
5243 /* FALLTHROUGH */
5244 case INTEGER_TYPE:
5245 if (tree_int_cst_equal (low0,
5246 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5247 low0 = 0;
5248 break;
5249 case POINTER_TYPE:
5250 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5251 && integer_zerop (low0))
5252 low0 = 0;
5253 break;
5254 default:
5255 break;
5258 /* Canonicalize - [x, max] into - [x, -]. */
5259 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5260 switch (TREE_CODE (TREE_TYPE (high1)))
5262 case ENUMERAL_TYPE:
5263 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5264 GET_MODE_BITSIZE
5265 (TYPE_MODE (TREE_TYPE (high1)))))
5266 break;
5267 /* FALLTHROUGH */
5268 case INTEGER_TYPE:
5269 if (tree_int_cst_equal (high1,
5270 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5271 high1 = 0;
5272 break;
5273 case POINTER_TYPE:
5274 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5275 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5276 high1, 1,
5277 build_int_cst (TREE_TYPE (high1), 1),
5278 1)))
5279 high1 = 0;
5280 break;
5281 default:
5282 break;
5285 /* The ranges might be also adjacent between the maximum and
5286 minimum values of the given type. For
5287 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5288 return + [x + 1, y - 1]. */
5289 if (low0 == 0 && high1 == 0)
5291 low = range_successor (high0);
5292 high = range_predecessor (low1);
5293 if (low == 0 || high == 0)
5294 return 0;
5296 in_p = 1;
5298 else
5299 return 0;
5302 else if (subset)
5303 in_p = 0, low = low0, high = high0;
5304 else
5305 in_p = 0, low = low0, high = high1;
5308 *pin_p = in_p, *plow = low, *phigh = high;
5309 return 1;
5313 /* Subroutine of fold, looking inside expressions of the form
5314 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5315 of the COND_EXPR. This function is being used also to optimize
5316 A op B ? C : A, by reversing the comparison first.
5318 Return a folded expression whose code is not a COND_EXPR
5319 anymore, or NULL_TREE if no folding opportunity is found. */
5321 static tree
5322 fold_cond_expr_with_comparison (location_t loc, tree type,
5323 tree arg0, tree arg1, tree arg2)
5325 enum tree_code comp_code = TREE_CODE (arg0);
5326 tree arg00 = TREE_OPERAND (arg0, 0);
5327 tree arg01 = TREE_OPERAND (arg0, 1);
5328 tree arg1_type = TREE_TYPE (arg1);
5329 tree tem;
5331 STRIP_NOPS (arg1);
5332 STRIP_NOPS (arg2);
5334 /* If we have A op 0 ? A : -A, consider applying the following
5335 transformations:
5337 A == 0? A : -A same as -A
5338 A != 0? A : -A same as A
5339 A >= 0? A : -A same as abs (A)
5340 A > 0? A : -A same as abs (A)
5341 A <= 0? A : -A same as -abs (A)
5342 A < 0? A : -A same as -abs (A)
5344 None of these transformations work for modes with signed
5345 zeros. If A is +/-0, the first two transformations will
5346 change the sign of the result (from +0 to -0, or vice
5347 versa). The last four will fix the sign of the result,
5348 even though the original expressions could be positive or
5349 negative, depending on the sign of A.
5351 Note that all these transformations are correct if A is
5352 NaN, since the two alternatives (A and -A) are also NaNs. */
5353 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5354 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5355 ? real_zerop (arg01)
5356 : integer_zerop (arg01))
5357 && ((TREE_CODE (arg2) == NEGATE_EXPR
5358 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5359 /* In the case that A is of the form X-Y, '-A' (arg2) may
5360 have already been folded to Y-X, check for that. */
5361 || (TREE_CODE (arg1) == MINUS_EXPR
5362 && TREE_CODE (arg2) == MINUS_EXPR
5363 && operand_equal_p (TREE_OPERAND (arg1, 0),
5364 TREE_OPERAND (arg2, 1), 0)
5365 && operand_equal_p (TREE_OPERAND (arg1, 1),
5366 TREE_OPERAND (arg2, 0), 0))))
5367 switch (comp_code)
5369 case EQ_EXPR:
5370 case UNEQ_EXPR:
5371 tem = fold_convert_loc (loc, arg1_type, arg1);
5372 return fold_convert_loc (loc, type, negate_expr (tem));
5373 case NE_EXPR:
5374 case LTGT_EXPR:
5375 return fold_convert_loc (loc, type, arg1);
5376 case UNGE_EXPR:
5377 case UNGT_EXPR:
5378 if (flag_trapping_math)
5379 break;
5380 /* Fall through. */
5381 case GE_EXPR:
5382 case GT_EXPR:
5383 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5384 break;
5385 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5386 return fold_convert_loc (loc, type, tem);
5387 case UNLE_EXPR:
5388 case UNLT_EXPR:
5389 if (flag_trapping_math)
5390 break;
5391 /* FALLTHRU */
5392 case LE_EXPR:
5393 case LT_EXPR:
5394 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5395 break;
5396 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5397 return negate_expr (fold_convert_loc (loc, type, tem));
5398 default:
5399 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5400 break;
5403 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5404 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5405 both transformations are correct when A is NaN: A != 0
5406 is then true, and A == 0 is false. */
5408 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5409 && integer_zerop (arg01) && integer_zerop (arg2))
5411 if (comp_code == NE_EXPR)
5412 return fold_convert_loc (loc, type, arg1);
5413 else if (comp_code == EQ_EXPR)
5414 return build_zero_cst (type);
5417 /* Try some transformations of A op B ? A : B.
5419 A == B? A : B same as B
5420 A != B? A : B same as A
5421 A >= B? A : B same as max (A, B)
5422 A > B? A : B same as max (B, A)
5423 A <= B? A : B same as min (A, B)
5424 A < B? A : B same as min (B, A)
5426 As above, these transformations don't work in the presence
5427 of signed zeros. For example, if A and B are zeros of
5428 opposite sign, the first two transformations will change
5429 the sign of the result. In the last four, the original
5430 expressions give different results for (A=+0, B=-0) and
5431 (A=-0, B=+0), but the transformed expressions do not.
5433 The first two transformations are correct if either A or B
5434 is a NaN. In the first transformation, the condition will
5435 be false, and B will indeed be chosen. In the case of the
5436 second transformation, the condition A != B will be true,
5437 and A will be chosen.
5439 The conversions to max() and min() are not correct if B is
5440 a number and A is not. The conditions in the original
5441 expressions will be false, so all four give B. The min()
5442 and max() versions would give a NaN instead. */
5443 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5444 && operand_equal_for_comparison_p (arg01, arg2)
5445 /* Avoid these transformations if the COND_EXPR may be used
5446 as an lvalue in the C++ front-end. PR c++/19199. */
5447 && (in_gimple_form
5448 || VECTOR_TYPE_P (type)
5449 || (! lang_GNU_CXX ()
5450 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5451 || ! maybe_lvalue_p (arg1)
5452 || ! maybe_lvalue_p (arg2)))
5454 tree comp_op0 = arg00;
5455 tree comp_op1 = arg01;
5456 tree comp_type = TREE_TYPE (comp_op0);
5458 switch (comp_code)
5460 case EQ_EXPR:
5461 return fold_convert_loc (loc, type, arg2);
5462 case NE_EXPR:
5463 return fold_convert_loc (loc, type, arg1);
5464 case LE_EXPR:
5465 case LT_EXPR:
5466 case UNLE_EXPR:
5467 case UNLT_EXPR:
5468 /* In C++ a ?: expression can be an lvalue, so put the
5469 operand which will be used if they are equal first
5470 so that we can convert this back to the
5471 corresponding COND_EXPR. */
5472 if (!HONOR_NANS (arg1))
5474 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5475 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5476 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5477 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5478 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5479 comp_op1, comp_op0);
5480 return fold_convert_loc (loc, type, tem);
5482 break;
5483 case GE_EXPR:
5484 case GT_EXPR:
5485 case UNGE_EXPR:
5486 case UNGT_EXPR:
5487 if (!HONOR_NANS (arg1))
5489 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5490 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5491 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5492 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5493 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5494 comp_op1, comp_op0);
5495 return fold_convert_loc (loc, type, tem);
5497 break;
5498 case UNEQ_EXPR:
5499 if (!HONOR_NANS (arg1))
5500 return fold_convert_loc (loc, type, arg2);
5501 break;
5502 case LTGT_EXPR:
5503 if (!HONOR_NANS (arg1))
5504 return fold_convert_loc (loc, type, arg1);
5505 break;
5506 default:
5507 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5508 break;
5512 return NULL_TREE;
5517 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5518 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5519 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5520 false) >= 2)
5521 #endif
5523 /* EXP is some logical combination of boolean tests. See if we can
5524 merge it into some range test. Return the new tree if so. */
5526 static tree
5527 fold_range_test (location_t loc, enum tree_code code, tree type,
5528 tree op0, tree op1)
5530 int or_op = (code == TRUTH_ORIF_EXPR
5531 || code == TRUTH_OR_EXPR);
5532 int in0_p, in1_p, in_p;
5533 tree low0, low1, low, high0, high1, high;
5534 bool strict_overflow_p = false;
5535 tree tem, lhs, rhs;
5536 const char * const warnmsg = G_("assuming signed overflow does not occur "
5537 "when simplifying range test");
5539 if (!INTEGRAL_TYPE_P (type))
5540 return 0;
5542 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5543 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5545 /* If this is an OR operation, invert both sides; we will invert
5546 again at the end. */
5547 if (or_op)
5548 in0_p = ! in0_p, in1_p = ! in1_p;
5550 /* If both expressions are the same, if we can merge the ranges, and we
5551 can build the range test, return it or it inverted. If one of the
5552 ranges is always true or always false, consider it to be the same
5553 expression as the other. */
5554 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5555 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5556 in1_p, low1, high1)
5557 && (tem = (build_range_check (loc, type,
5558 lhs != 0 ? lhs
5559 : rhs != 0 ? rhs : integer_zero_node,
5560 in_p, low, high))) != 0)
5562 if (strict_overflow_p)
5563 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5564 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5567 /* On machines where the branch cost is expensive, if this is a
5568 short-circuited branch and the underlying object on both sides
5569 is the same, make a non-short-circuit operation. */
5570 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5571 && !flag_sanitize_coverage
5572 && lhs != 0 && rhs != 0
5573 && (code == TRUTH_ANDIF_EXPR
5574 || code == TRUTH_ORIF_EXPR)
5575 && operand_equal_p (lhs, rhs, 0))
5577 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5578 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5579 which cases we can't do this. */
5580 if (simple_operand_p (lhs))
5581 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5582 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5583 type, op0, op1);
5585 else if (!lang_hooks.decls.global_bindings_p ()
5586 && !CONTAINS_PLACEHOLDER_P (lhs))
5588 tree common = save_expr (lhs);
5590 if ((lhs = build_range_check (loc, type, common,
5591 or_op ? ! in0_p : in0_p,
5592 low0, high0)) != 0
5593 && (rhs = build_range_check (loc, type, common,
5594 or_op ? ! in1_p : in1_p,
5595 low1, high1)) != 0)
5597 if (strict_overflow_p)
5598 fold_overflow_warning (warnmsg,
5599 WARN_STRICT_OVERFLOW_COMPARISON);
5600 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5601 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5602 type, lhs, rhs);
5607 return 0;
5610 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5611 bit value. Arrange things so the extra bits will be set to zero if and
5612 only if C is signed-extended to its full width. If MASK is nonzero,
5613 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5615 static tree
5616 unextend (tree c, int p, int unsignedp, tree mask)
5618 tree type = TREE_TYPE (c);
5619 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5620 tree temp;
5622 if (p == modesize || unsignedp)
5623 return c;
5625 /* We work by getting just the sign bit into the low-order bit, then
5626 into the high-order bit, then sign-extend. We then XOR that value
5627 with C. */
5628 temp = build_int_cst (TREE_TYPE (c),
5629 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5631 /* We must use a signed type in order to get an arithmetic right shift.
5632 However, we must also avoid introducing accidental overflows, so that
5633 a subsequent call to integer_zerop will work. Hence we must
5634 do the type conversion here. At this point, the constant is either
5635 zero or one, and the conversion to a signed type can never overflow.
5636 We could get an overflow if this conversion is done anywhere else. */
5637 if (TYPE_UNSIGNED (type))
5638 temp = fold_convert (signed_type_for (type), temp);
5640 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5641 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5642 if (mask != 0)
5643 temp = const_binop (BIT_AND_EXPR, temp,
5644 fold_convert (TREE_TYPE (c), mask));
5645 /* If necessary, convert the type back to match the type of C. */
5646 if (TYPE_UNSIGNED (type))
5647 temp = fold_convert (type, temp);
5649 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5652 /* For an expression that has the form
5653 (A && B) || ~B
5655 (A || B) && ~B,
5656 we can drop one of the inner expressions and simplify to
5657 A || ~B
5659 A && ~B
5660 LOC is the location of the resulting expression. OP is the inner
5661 logical operation; the left-hand side in the examples above, while CMPOP
5662 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5663 removing a condition that guards another, as in
5664 (A != NULL && A->...) || A == NULL
5665 which we must not transform. If RHS_ONLY is true, only eliminate the
5666 right-most operand of the inner logical operation. */
5668 static tree
5669 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5670 bool rhs_only)
5672 tree type = TREE_TYPE (cmpop);
5673 enum tree_code code = TREE_CODE (cmpop);
5674 enum tree_code truthop_code = TREE_CODE (op);
5675 tree lhs = TREE_OPERAND (op, 0);
5676 tree rhs = TREE_OPERAND (op, 1);
5677 tree orig_lhs = lhs, orig_rhs = rhs;
5678 enum tree_code rhs_code = TREE_CODE (rhs);
5679 enum tree_code lhs_code = TREE_CODE (lhs);
5680 enum tree_code inv_code;
5682 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5683 return NULL_TREE;
5685 if (TREE_CODE_CLASS (code) != tcc_comparison)
5686 return NULL_TREE;
5688 if (rhs_code == truthop_code)
5690 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5691 if (newrhs != NULL_TREE)
5693 rhs = newrhs;
5694 rhs_code = TREE_CODE (rhs);
5697 if (lhs_code == truthop_code && !rhs_only)
5699 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5700 if (newlhs != NULL_TREE)
5702 lhs = newlhs;
5703 lhs_code = TREE_CODE (lhs);
5707 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5708 if (inv_code == rhs_code
5709 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5710 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5711 return lhs;
5712 if (!rhs_only && inv_code == lhs_code
5713 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5714 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5715 return rhs;
5716 if (rhs != orig_rhs || lhs != orig_lhs)
5717 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5718 lhs, rhs);
5719 return NULL_TREE;
5722 /* Find ways of folding logical expressions of LHS and RHS:
5723 Try to merge two comparisons to the same innermost item.
5724 Look for range tests like "ch >= '0' && ch <= '9'".
5725 Look for combinations of simple terms on machines with expensive branches
5726 and evaluate the RHS unconditionally.
5728 For example, if we have p->a == 2 && p->b == 4 and we can make an
5729 object large enough to span both A and B, we can do this with a comparison
5730 against the object ANDed with the a mask.
5732 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5733 operations to do this with one comparison.
5735 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5736 function and the one above.
5738 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5739 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5741 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5742 two operands.
5744 We return the simplified tree or 0 if no optimization is possible. */
5746 static tree
5747 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5748 tree lhs, tree rhs)
5750 /* If this is the "or" of two comparisons, we can do something if
5751 the comparisons are NE_EXPR. If this is the "and", we can do something
5752 if the comparisons are EQ_EXPR. I.e.,
5753 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5755 WANTED_CODE is this operation code. For single bit fields, we can
5756 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5757 comparison for one-bit fields. */
5759 enum tree_code wanted_code;
5760 enum tree_code lcode, rcode;
5761 tree ll_arg, lr_arg, rl_arg, rr_arg;
5762 tree ll_inner, lr_inner, rl_inner, rr_inner;
5763 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5764 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5765 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5766 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5767 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5768 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5769 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5770 scalar_int_mode lnmode, rnmode;
5771 tree ll_mask, lr_mask, rl_mask, rr_mask;
5772 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5773 tree l_const, r_const;
5774 tree lntype, rntype, result;
5775 HOST_WIDE_INT first_bit, end_bit;
5776 int volatilep;
5778 /* Start by getting the comparison codes. Fail if anything is volatile.
5779 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5780 it were surrounded with a NE_EXPR. */
5782 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5783 return 0;
5785 lcode = TREE_CODE (lhs);
5786 rcode = TREE_CODE (rhs);
5788 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5790 lhs = build2 (NE_EXPR, truth_type, lhs,
5791 build_int_cst (TREE_TYPE (lhs), 0));
5792 lcode = NE_EXPR;
5795 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5797 rhs = build2 (NE_EXPR, truth_type, rhs,
5798 build_int_cst (TREE_TYPE (rhs), 0));
5799 rcode = NE_EXPR;
5802 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5803 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5804 return 0;
5806 ll_arg = TREE_OPERAND (lhs, 0);
5807 lr_arg = TREE_OPERAND (lhs, 1);
5808 rl_arg = TREE_OPERAND (rhs, 0);
5809 rr_arg = TREE_OPERAND (rhs, 1);
5811 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5812 if (simple_operand_p (ll_arg)
5813 && simple_operand_p (lr_arg))
5815 if (operand_equal_p (ll_arg, rl_arg, 0)
5816 && operand_equal_p (lr_arg, rr_arg, 0))
5818 result = combine_comparisons (loc, code, lcode, rcode,
5819 truth_type, ll_arg, lr_arg);
5820 if (result)
5821 return result;
5823 else if (operand_equal_p (ll_arg, rr_arg, 0)
5824 && operand_equal_p (lr_arg, rl_arg, 0))
5826 result = combine_comparisons (loc, code, lcode,
5827 swap_tree_comparison (rcode),
5828 truth_type, ll_arg, lr_arg);
5829 if (result)
5830 return result;
5834 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5835 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5837 /* If the RHS can be evaluated unconditionally and its operands are
5838 simple, it wins to evaluate the RHS unconditionally on machines
5839 with expensive branches. In this case, this isn't a comparison
5840 that can be merged. */
5842 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5843 false) >= 2
5844 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5845 && simple_operand_p (rl_arg)
5846 && simple_operand_p (rr_arg))
5848 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5849 if (code == TRUTH_OR_EXPR
5850 && lcode == NE_EXPR && integer_zerop (lr_arg)
5851 && rcode == NE_EXPR && integer_zerop (rr_arg)
5852 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5853 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5854 return build2_loc (loc, NE_EXPR, truth_type,
5855 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5856 ll_arg, rl_arg),
5857 build_int_cst (TREE_TYPE (ll_arg), 0));
5859 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5860 if (code == TRUTH_AND_EXPR
5861 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5862 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5863 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5864 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5865 return build2_loc (loc, EQ_EXPR, truth_type,
5866 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5867 ll_arg, rl_arg),
5868 build_int_cst (TREE_TYPE (ll_arg), 0));
5871 /* See if the comparisons can be merged. Then get all the parameters for
5872 each side. */
5874 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5875 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5876 return 0;
5878 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5879 volatilep = 0;
5880 ll_inner = decode_field_reference (loc, &ll_arg,
5881 &ll_bitsize, &ll_bitpos, &ll_mode,
5882 &ll_unsignedp, &ll_reversep, &volatilep,
5883 &ll_mask, &ll_and_mask);
5884 lr_inner = decode_field_reference (loc, &lr_arg,
5885 &lr_bitsize, &lr_bitpos, &lr_mode,
5886 &lr_unsignedp, &lr_reversep, &volatilep,
5887 &lr_mask, &lr_and_mask);
5888 rl_inner = decode_field_reference (loc, &rl_arg,
5889 &rl_bitsize, &rl_bitpos, &rl_mode,
5890 &rl_unsignedp, &rl_reversep, &volatilep,
5891 &rl_mask, &rl_and_mask);
5892 rr_inner = decode_field_reference (loc, &rr_arg,
5893 &rr_bitsize, &rr_bitpos, &rr_mode,
5894 &rr_unsignedp, &rr_reversep, &volatilep,
5895 &rr_mask, &rr_and_mask);
5897 /* It must be true that the inner operation on the lhs of each
5898 comparison must be the same if we are to be able to do anything.
5899 Then see if we have constants. If not, the same must be true for
5900 the rhs's. */
5901 if (volatilep
5902 || ll_reversep != rl_reversep
5903 || ll_inner == 0 || rl_inner == 0
5904 || ! operand_equal_p (ll_inner, rl_inner, 0))
5905 return 0;
5907 if (TREE_CODE (lr_arg) == INTEGER_CST
5908 && TREE_CODE (rr_arg) == INTEGER_CST)
5910 l_const = lr_arg, r_const = rr_arg;
5911 lr_reversep = ll_reversep;
5913 else if (lr_reversep != rr_reversep
5914 || lr_inner == 0 || rr_inner == 0
5915 || ! operand_equal_p (lr_inner, rr_inner, 0))
5916 return 0;
5917 else
5918 l_const = r_const = 0;
5920 /* If either comparison code is not correct for our logical operation,
5921 fail. However, we can convert a one-bit comparison against zero into
5922 the opposite comparison against that bit being set in the field. */
5924 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5925 if (lcode != wanted_code)
5927 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5929 /* Make the left operand unsigned, since we are only interested
5930 in the value of one bit. Otherwise we are doing the wrong
5931 thing below. */
5932 ll_unsignedp = 1;
5933 l_const = ll_mask;
5935 else
5936 return 0;
5939 /* This is analogous to the code for l_const above. */
5940 if (rcode != wanted_code)
5942 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5944 rl_unsignedp = 1;
5945 r_const = rl_mask;
5947 else
5948 return 0;
5951 /* See if we can find a mode that contains both fields being compared on
5952 the left. If we can't, fail. Otherwise, update all constants and masks
5953 to be relative to a field of that size. */
5954 first_bit = MIN (ll_bitpos, rl_bitpos);
5955 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5956 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5957 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5958 volatilep, &lnmode))
5959 return 0;
5961 lnbitsize = GET_MODE_BITSIZE (lnmode);
5962 lnbitpos = first_bit & ~ (lnbitsize - 1);
5963 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5964 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5966 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5968 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5969 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5972 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5973 size_int (xll_bitpos));
5974 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5975 size_int (xrl_bitpos));
5977 if (l_const)
5979 l_const = fold_convert_loc (loc, lntype, l_const);
5980 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5981 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5982 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5983 fold_build1_loc (loc, BIT_NOT_EXPR,
5984 lntype, ll_mask))))
5986 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5988 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5991 if (r_const)
5993 r_const = fold_convert_loc (loc, lntype, r_const);
5994 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5995 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5996 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5997 fold_build1_loc (loc, BIT_NOT_EXPR,
5998 lntype, rl_mask))))
6000 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6002 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6006 /* If the right sides are not constant, do the same for it. Also,
6007 disallow this optimization if a size or signedness mismatch occurs
6008 between the left and right sides. */
6009 if (l_const == 0)
6011 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6012 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6013 /* Make sure the two fields on the right
6014 correspond to the left without being swapped. */
6015 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6016 return 0;
6018 first_bit = MIN (lr_bitpos, rr_bitpos);
6019 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6020 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6021 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6022 volatilep, &rnmode))
6023 return 0;
6025 rnbitsize = GET_MODE_BITSIZE (rnmode);
6026 rnbitpos = first_bit & ~ (rnbitsize - 1);
6027 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6028 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6030 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6032 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6033 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6036 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6037 rntype, lr_mask),
6038 size_int (xlr_bitpos));
6039 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6040 rntype, rr_mask),
6041 size_int (xrr_bitpos));
6043 /* Make a mask that corresponds to both fields being compared.
6044 Do this for both items being compared. If the operands are the
6045 same size and the bits being compared are in the same position
6046 then we can do this by masking both and comparing the masked
6047 results. */
6048 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6049 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6050 if (lnbitsize == rnbitsize
6051 && xll_bitpos == xlr_bitpos
6052 && lnbitpos >= 0
6053 && rnbitpos >= 0)
6055 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6056 lntype, lnbitsize, lnbitpos,
6057 ll_unsignedp || rl_unsignedp, ll_reversep);
6058 if (! all_ones_mask_p (ll_mask, lnbitsize))
6059 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6061 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6062 rntype, rnbitsize, rnbitpos,
6063 lr_unsignedp || rr_unsignedp, lr_reversep);
6064 if (! all_ones_mask_p (lr_mask, rnbitsize))
6065 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6067 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6070 /* There is still another way we can do something: If both pairs of
6071 fields being compared are adjacent, we may be able to make a wider
6072 field containing them both.
6074 Note that we still must mask the lhs/rhs expressions. Furthermore,
6075 the mask must be shifted to account for the shift done by
6076 make_bit_field_ref. */
6077 if (((ll_bitsize + ll_bitpos == rl_bitpos
6078 && lr_bitsize + lr_bitpos == rr_bitpos)
6079 || (ll_bitpos == rl_bitpos + rl_bitsize
6080 && lr_bitpos == rr_bitpos + rr_bitsize))
6081 && ll_bitpos >= 0
6082 && rl_bitpos >= 0
6083 && lr_bitpos >= 0
6084 && rr_bitpos >= 0)
6086 tree type;
6088 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6089 ll_bitsize + rl_bitsize,
6090 MIN (ll_bitpos, rl_bitpos),
6091 ll_unsignedp, ll_reversep);
6092 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6093 lr_bitsize + rr_bitsize,
6094 MIN (lr_bitpos, rr_bitpos),
6095 lr_unsignedp, lr_reversep);
6097 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6098 size_int (MIN (xll_bitpos, xrl_bitpos)));
6099 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6100 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6102 /* Convert to the smaller type before masking out unwanted bits. */
6103 type = lntype;
6104 if (lntype != rntype)
6106 if (lnbitsize > rnbitsize)
6108 lhs = fold_convert_loc (loc, rntype, lhs);
6109 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6110 type = rntype;
6112 else if (lnbitsize < rnbitsize)
6114 rhs = fold_convert_loc (loc, lntype, rhs);
6115 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6116 type = lntype;
6120 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6121 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6123 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6124 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6126 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6129 return 0;
6132 /* Handle the case of comparisons with constants. If there is something in
6133 common between the masks, those bits of the constants must be the same.
6134 If not, the condition is always false. Test for this to avoid generating
6135 incorrect code below. */
6136 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6137 if (! integer_zerop (result)
6138 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6139 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6141 if (wanted_code == NE_EXPR)
6143 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6144 return constant_boolean_node (true, truth_type);
6146 else
6148 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6149 return constant_boolean_node (false, truth_type);
6153 if (lnbitpos < 0)
6154 return 0;
6156 /* Construct the expression we will return. First get the component
6157 reference we will make. Unless the mask is all ones the width of
6158 that field, perform the mask operation. Then compare with the
6159 merged constant. */
6160 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6161 lntype, lnbitsize, lnbitpos,
6162 ll_unsignedp || rl_unsignedp, ll_reversep);
6164 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6165 if (! all_ones_mask_p (ll_mask, lnbitsize))
6166 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6168 return build2_loc (loc, wanted_code, truth_type, result,
6169 const_binop (BIT_IOR_EXPR, l_const, r_const));
6172 /* T is an integer expression that is being multiplied, divided, or taken a
6173 modulus (CODE says which and what kind of divide or modulus) by a
6174 constant C. See if we can eliminate that operation by folding it with
6175 other operations already in T. WIDE_TYPE, if non-null, is a type that
6176 should be used for the computation if wider than our type.
6178 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6179 (X * 2) + (Y * 4). We must, however, be assured that either the original
6180 expression would not overflow or that overflow is undefined for the type
6181 in the language in question.
6183 If we return a non-null expression, it is an equivalent form of the
6184 original computation, but need not be in the original type.
6186 We set *STRICT_OVERFLOW_P to true if the return values depends on
6187 signed overflow being undefined. Otherwise we do not change
6188 *STRICT_OVERFLOW_P. */
6190 static tree
6191 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6192 bool *strict_overflow_p)
6194 /* To avoid exponential search depth, refuse to allow recursion past
6195 three levels. Beyond that (1) it's highly unlikely that we'll find
6196 something interesting and (2) we've probably processed it before
6197 when we built the inner expression. */
6199 static int depth;
6200 tree ret;
6202 if (depth > 3)
6203 return NULL;
6205 depth++;
6206 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6207 depth--;
6209 return ret;
6212 static tree
6213 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6214 bool *strict_overflow_p)
6216 tree type = TREE_TYPE (t);
6217 enum tree_code tcode = TREE_CODE (t);
6218 tree ctype = (wide_type != 0
6219 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6220 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6221 ? wide_type : type);
6222 tree t1, t2;
6223 int same_p = tcode == code;
6224 tree op0 = NULL_TREE, op1 = NULL_TREE;
6225 bool sub_strict_overflow_p;
6227 /* Don't deal with constants of zero here; they confuse the code below. */
6228 if (integer_zerop (c))
6229 return NULL_TREE;
6231 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6232 op0 = TREE_OPERAND (t, 0);
6234 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6235 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6237 /* Note that we need not handle conditional operations here since fold
6238 already handles those cases. So just do arithmetic here. */
6239 switch (tcode)
6241 case INTEGER_CST:
6242 /* For a constant, we can always simplify if we are a multiply
6243 or (for divide and modulus) if it is a multiple of our constant. */
6244 if (code == MULT_EXPR
6245 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6246 TYPE_SIGN (type)))
6248 tree tem = const_binop (code, fold_convert (ctype, t),
6249 fold_convert (ctype, c));
6250 /* If the multiplication overflowed, we lost information on it.
6251 See PR68142 and PR69845. */
6252 if (TREE_OVERFLOW (tem))
6253 return NULL_TREE;
6254 return tem;
6256 break;
6258 CASE_CONVERT: case NON_LVALUE_EXPR:
6259 /* If op0 is an expression ... */
6260 if ((COMPARISON_CLASS_P (op0)
6261 || UNARY_CLASS_P (op0)
6262 || BINARY_CLASS_P (op0)
6263 || VL_EXP_CLASS_P (op0)
6264 || EXPRESSION_CLASS_P (op0))
6265 /* ... and has wrapping overflow, and its type is smaller
6266 than ctype, then we cannot pass through as widening. */
6267 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6268 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6269 && (TYPE_PRECISION (ctype)
6270 > TYPE_PRECISION (TREE_TYPE (op0))))
6271 /* ... or this is a truncation (t is narrower than op0),
6272 then we cannot pass through this narrowing. */
6273 || (TYPE_PRECISION (type)
6274 < TYPE_PRECISION (TREE_TYPE (op0)))
6275 /* ... or signedness changes for division or modulus,
6276 then we cannot pass through this conversion. */
6277 || (code != MULT_EXPR
6278 && (TYPE_UNSIGNED (ctype)
6279 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6280 /* ... or has undefined overflow while the converted to
6281 type has not, we cannot do the operation in the inner type
6282 as that would introduce undefined overflow. */
6283 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6284 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6285 && !TYPE_OVERFLOW_UNDEFINED (type))))
6286 break;
6288 /* Pass the constant down and see if we can make a simplification. If
6289 we can, replace this expression with the inner simplification for
6290 possible later conversion to our or some other type. */
6291 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6292 && TREE_CODE (t2) == INTEGER_CST
6293 && !TREE_OVERFLOW (t2)
6294 && (t1 = extract_muldiv (op0, t2, code,
6295 code == MULT_EXPR ? ctype : NULL_TREE,
6296 strict_overflow_p)) != 0)
6297 return t1;
6298 break;
6300 case ABS_EXPR:
6301 /* If widening the type changes it from signed to unsigned, then we
6302 must avoid building ABS_EXPR itself as unsigned. */
6303 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6305 tree cstype = (*signed_type_for) (ctype);
6306 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6307 != 0)
6309 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6310 return fold_convert (ctype, t1);
6312 break;
6314 /* If the constant is negative, we cannot simplify this. */
6315 if (tree_int_cst_sgn (c) == -1)
6316 break;
6317 /* FALLTHROUGH */
6318 case NEGATE_EXPR:
6319 /* For division and modulus, type can't be unsigned, as e.g.
6320 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6321 For signed types, even with wrapping overflow, this is fine. */
6322 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6323 break;
6324 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6325 != 0)
6326 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6327 break;
6329 case MIN_EXPR: case MAX_EXPR:
6330 /* If widening the type changes the signedness, then we can't perform
6331 this optimization as that changes the result. */
6332 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6333 break;
6335 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6336 sub_strict_overflow_p = false;
6337 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6338 &sub_strict_overflow_p)) != 0
6339 && (t2 = extract_muldiv (op1, c, code, wide_type,
6340 &sub_strict_overflow_p)) != 0)
6342 if (tree_int_cst_sgn (c) < 0)
6343 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6344 if (sub_strict_overflow_p)
6345 *strict_overflow_p = true;
6346 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6347 fold_convert (ctype, t2));
6349 break;
6351 case LSHIFT_EXPR: case RSHIFT_EXPR:
6352 /* If the second operand is constant, this is a multiplication
6353 or floor division, by a power of two, so we can treat it that
6354 way unless the multiplier or divisor overflows. Signed
6355 left-shift overflow is implementation-defined rather than
6356 undefined in C90, so do not convert signed left shift into
6357 multiplication. */
6358 if (TREE_CODE (op1) == INTEGER_CST
6359 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6360 /* const_binop may not detect overflow correctly,
6361 so check for it explicitly here. */
6362 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6363 wi::to_wide (op1))
6364 && (t1 = fold_convert (ctype,
6365 const_binop (LSHIFT_EXPR, size_one_node,
6366 op1))) != 0
6367 && !TREE_OVERFLOW (t1))
6368 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6369 ? MULT_EXPR : FLOOR_DIV_EXPR,
6370 ctype,
6371 fold_convert (ctype, op0),
6372 t1),
6373 c, code, wide_type, strict_overflow_p);
6374 break;
6376 case PLUS_EXPR: case MINUS_EXPR:
6377 /* See if we can eliminate the operation on both sides. If we can, we
6378 can return a new PLUS or MINUS. If we can't, the only remaining
6379 cases where we can do anything are if the second operand is a
6380 constant. */
6381 sub_strict_overflow_p = false;
6382 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6383 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6384 if (t1 != 0 && t2 != 0
6385 && TYPE_OVERFLOW_WRAPS (ctype)
6386 && (code == MULT_EXPR
6387 /* If not multiplication, we can only do this if both operands
6388 are divisible by c. */
6389 || (multiple_of_p (ctype, op0, c)
6390 && multiple_of_p (ctype, op1, c))))
6392 if (sub_strict_overflow_p)
6393 *strict_overflow_p = true;
6394 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6395 fold_convert (ctype, t2));
6398 /* If this was a subtraction, negate OP1 and set it to be an addition.
6399 This simplifies the logic below. */
6400 if (tcode == MINUS_EXPR)
6402 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6403 /* If OP1 was not easily negatable, the constant may be OP0. */
6404 if (TREE_CODE (op0) == INTEGER_CST)
6406 std::swap (op0, op1);
6407 std::swap (t1, t2);
6411 if (TREE_CODE (op1) != INTEGER_CST)
6412 break;
6414 /* If either OP1 or C are negative, this optimization is not safe for
6415 some of the division and remainder types while for others we need
6416 to change the code. */
6417 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6419 if (code == CEIL_DIV_EXPR)
6420 code = FLOOR_DIV_EXPR;
6421 else if (code == FLOOR_DIV_EXPR)
6422 code = CEIL_DIV_EXPR;
6423 else if (code != MULT_EXPR
6424 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6425 break;
6428 /* If it's a multiply or a division/modulus operation of a multiple
6429 of our constant, do the operation and verify it doesn't overflow. */
6430 if (code == MULT_EXPR
6431 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6432 TYPE_SIGN (type)))
6434 op1 = const_binop (code, fold_convert (ctype, op1),
6435 fold_convert (ctype, c));
6436 /* We allow the constant to overflow with wrapping semantics. */
6437 if (op1 == 0
6438 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6439 break;
6441 else
6442 break;
6444 /* If we have an unsigned type, we cannot widen the operation since it
6445 will change the result if the original computation overflowed. */
6446 if (TYPE_UNSIGNED (ctype) && ctype != type)
6447 break;
6449 /* The last case is if we are a multiply. In that case, we can
6450 apply the distributive law to commute the multiply and addition
6451 if the multiplication of the constants doesn't overflow
6452 and overflow is defined. With undefined overflow
6453 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6454 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6455 return fold_build2 (tcode, ctype,
6456 fold_build2 (code, ctype,
6457 fold_convert (ctype, op0),
6458 fold_convert (ctype, c)),
6459 op1);
6461 break;
6463 case MULT_EXPR:
6464 /* We have a special case here if we are doing something like
6465 (C * 8) % 4 since we know that's zero. */
6466 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6467 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6468 /* If the multiplication can overflow we cannot optimize this. */
6469 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6470 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6471 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6472 TYPE_SIGN (type)))
6474 *strict_overflow_p = true;
6475 return omit_one_operand (type, integer_zero_node, op0);
6478 /* ... fall through ... */
6480 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6481 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6482 /* If we can extract our operation from the LHS, do so and return a
6483 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6484 do something only if the second operand is a constant. */
6485 if (same_p
6486 && TYPE_OVERFLOW_WRAPS (ctype)
6487 && (t1 = extract_muldiv (op0, c, code, wide_type,
6488 strict_overflow_p)) != 0)
6489 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6490 fold_convert (ctype, op1));
6491 else if (tcode == MULT_EXPR && code == MULT_EXPR
6492 && TYPE_OVERFLOW_WRAPS (ctype)
6493 && (t1 = extract_muldiv (op1, c, code, wide_type,
6494 strict_overflow_p)) != 0)
6495 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6496 fold_convert (ctype, t1));
6497 else if (TREE_CODE (op1) != INTEGER_CST)
6498 return 0;
6500 /* If these are the same operation types, we can associate them
6501 assuming no overflow. */
6502 if (tcode == code)
6504 bool overflow_p = false;
6505 wi::overflow_type overflow_mul;
6506 signop sign = TYPE_SIGN (ctype);
6507 unsigned prec = TYPE_PRECISION (ctype);
6508 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6509 wi::to_wide (c, prec),
6510 sign, &overflow_mul);
6511 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6512 if (overflow_mul
6513 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6514 overflow_p = true;
6515 if (!overflow_p)
6516 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6517 wide_int_to_tree (ctype, mul));
6520 /* If these operations "cancel" each other, we have the main
6521 optimizations of this pass, which occur when either constant is a
6522 multiple of the other, in which case we replace this with either an
6523 operation or CODE or TCODE.
6525 If we have an unsigned type, we cannot do this since it will change
6526 the result if the original computation overflowed. */
6527 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6528 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6529 || (tcode == MULT_EXPR
6530 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6531 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6532 && code != MULT_EXPR)))
6534 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6535 TYPE_SIGN (type)))
6537 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6538 *strict_overflow_p = true;
6539 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6540 fold_convert (ctype,
6541 const_binop (TRUNC_DIV_EXPR,
6542 op1, c)));
6544 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6545 TYPE_SIGN (type)))
6547 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6548 *strict_overflow_p = true;
6549 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6550 fold_convert (ctype,
6551 const_binop (TRUNC_DIV_EXPR,
6552 c, op1)));
6555 break;
6557 default:
6558 break;
6561 return 0;
6564 /* Return a node which has the indicated constant VALUE (either 0 or
6565 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6566 and is of the indicated TYPE. */
6568 tree
6569 constant_boolean_node (bool value, tree type)
6571 if (type == integer_type_node)
6572 return value ? integer_one_node : integer_zero_node;
6573 else if (type == boolean_type_node)
6574 return value ? boolean_true_node : boolean_false_node;
6575 else if (TREE_CODE (type) == VECTOR_TYPE)
6576 return build_vector_from_val (type,
6577 build_int_cst (TREE_TYPE (type),
6578 value ? -1 : 0));
6579 else
6580 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6584 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6585 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6586 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6587 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6588 COND is the first argument to CODE; otherwise (as in the example
6589 given here), it is the second argument. TYPE is the type of the
6590 original expression. Return NULL_TREE if no simplification is
6591 possible. */
6593 static tree
6594 fold_binary_op_with_conditional_arg (location_t loc,
6595 enum tree_code code,
6596 tree type, tree op0, tree op1,
6597 tree cond, tree arg, int cond_first_p)
6599 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6600 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6601 tree test, true_value, false_value;
6602 tree lhs = NULL_TREE;
6603 tree rhs = NULL_TREE;
6604 enum tree_code cond_code = COND_EXPR;
6606 /* Do not move possibly trapping operations into the conditional as this
6607 pessimizes code and causes gimplification issues when applied late. */
6608 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6609 ANY_INTEGRAL_TYPE_P (type)
6610 && TYPE_OVERFLOW_TRAPS (type), op1))
6611 return NULL_TREE;
6613 if (TREE_CODE (cond) == COND_EXPR
6614 || TREE_CODE (cond) == VEC_COND_EXPR)
6616 test = TREE_OPERAND (cond, 0);
6617 true_value = TREE_OPERAND (cond, 1);
6618 false_value = TREE_OPERAND (cond, 2);
6619 /* If this operand throws an expression, then it does not make
6620 sense to try to perform a logical or arithmetic operation
6621 involving it. */
6622 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6623 lhs = true_value;
6624 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6625 rhs = false_value;
6627 else if (!(TREE_CODE (type) != VECTOR_TYPE
6628 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6630 tree testtype = TREE_TYPE (cond);
6631 test = cond;
6632 true_value = constant_boolean_node (true, testtype);
6633 false_value = constant_boolean_node (false, testtype);
6635 else
6636 /* Detect the case of mixing vector and scalar types - bail out. */
6637 return NULL_TREE;
6639 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6640 cond_code = VEC_COND_EXPR;
6642 /* This transformation is only worthwhile if we don't have to wrap ARG
6643 in a SAVE_EXPR and the operation can be simplified without recursing
6644 on at least one of the branches once its pushed inside the COND_EXPR. */
6645 if (!TREE_CONSTANT (arg)
6646 && (TREE_SIDE_EFFECTS (arg)
6647 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6648 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6649 return NULL_TREE;
6651 arg = fold_convert_loc (loc, arg_type, arg);
6652 if (lhs == 0)
6654 true_value = fold_convert_loc (loc, cond_type, true_value);
6655 if (cond_first_p)
6656 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6657 else
6658 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6660 if (rhs == 0)
6662 false_value = fold_convert_loc (loc, cond_type, false_value);
6663 if (cond_first_p)
6664 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6665 else
6666 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6669 /* Check that we have simplified at least one of the branches. */
6670 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6671 return NULL_TREE;
6673 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6677 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6679 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6680 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6681 ADDEND is the same as X.
6683 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6684 and finite. The problematic cases are when X is zero, and its mode
6685 has signed zeros. In the case of rounding towards -infinity,
6686 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6687 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6689 bool
6690 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6692 if (!real_zerop (addend))
6693 return false;
6695 /* Don't allow the fold with -fsignaling-nans. */
6696 if (HONOR_SNANS (element_mode (type)))
6697 return false;
6699 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6700 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6701 return true;
6703 /* In a vector or complex, we would need to check the sign of all zeros. */
6704 if (TREE_CODE (addend) != REAL_CST)
6705 return false;
6707 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6708 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6709 negate = !negate;
6711 /* The mode has signed zeros, and we have to honor their sign.
6712 In this situation, there is only one case we can return true for.
6713 X - 0 is the same as X unless rounding towards -infinity is
6714 supported. */
6715 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6718 /* Subroutine of match.pd that optimizes comparisons of a division by
6719 a nonzero integer constant against an integer constant, i.e.
6720 X/C1 op C2.
6722 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6723 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6725 enum tree_code
6726 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6727 tree *hi, bool *neg_overflow)
6729 tree prod, tmp, type = TREE_TYPE (c1);
6730 signop sign = TYPE_SIGN (type);
6731 wi::overflow_type overflow;
6733 /* We have to do this the hard way to detect unsigned overflow.
6734 prod = int_const_binop (MULT_EXPR, c1, c2); */
6735 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6736 prod = force_fit_type (type, val, -1, overflow);
6737 *neg_overflow = false;
6739 if (sign == UNSIGNED)
6741 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6742 *lo = prod;
6744 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6745 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6746 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6748 else if (tree_int_cst_sgn (c1) >= 0)
6750 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6751 switch (tree_int_cst_sgn (c2))
6753 case -1:
6754 *neg_overflow = true;
6755 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6756 *hi = prod;
6757 break;
6759 case 0:
6760 *lo = fold_negate_const (tmp, type);
6761 *hi = tmp;
6762 break;
6764 case 1:
6765 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6766 *lo = prod;
6767 break;
6769 default:
6770 gcc_unreachable ();
6773 else
6775 /* A negative divisor reverses the relational operators. */
6776 code = swap_tree_comparison (code);
6778 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6779 switch (tree_int_cst_sgn (c2))
6781 case -1:
6782 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6783 *lo = prod;
6784 break;
6786 case 0:
6787 *hi = fold_negate_const (tmp, type);
6788 *lo = tmp;
6789 break;
6791 case 1:
6792 *neg_overflow = true;
6793 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6794 *hi = prod;
6795 break;
6797 default:
6798 gcc_unreachable ();
6802 if (code != EQ_EXPR && code != NE_EXPR)
6803 return code;
6805 if (TREE_OVERFLOW (*lo)
6806 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6807 *lo = NULL_TREE;
6808 if (TREE_OVERFLOW (*hi)
6809 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6810 *hi = NULL_TREE;
6812 return code;
6816 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6817 equality/inequality test, then return a simplified form of the test
6818 using a sign testing. Otherwise return NULL. TYPE is the desired
6819 result type. */
6821 static tree
6822 fold_single_bit_test_into_sign_test (location_t loc,
6823 enum tree_code code, tree arg0, tree arg1,
6824 tree result_type)
6826 /* If this is testing a single bit, we can optimize the test. */
6827 if ((code == NE_EXPR || code == EQ_EXPR)
6828 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6829 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6831 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6832 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6833 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6835 if (arg00 != NULL_TREE
6836 /* This is only a win if casting to a signed type is cheap,
6837 i.e. when arg00's type is not a partial mode. */
6838 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6840 tree stype = signed_type_for (TREE_TYPE (arg00));
6841 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6842 result_type,
6843 fold_convert_loc (loc, stype, arg00),
6844 build_int_cst (stype, 0));
6848 return NULL_TREE;
6851 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6852 equality/inequality test, then return a simplified form of
6853 the test using shifts and logical operations. Otherwise return
6854 NULL. TYPE is the desired result type. */
6856 tree
6857 fold_single_bit_test (location_t loc, enum tree_code code,
6858 tree arg0, tree arg1, tree result_type)
6860 /* If this is testing a single bit, we can optimize the test. */
6861 if ((code == NE_EXPR || code == EQ_EXPR)
6862 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6863 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6865 tree inner = TREE_OPERAND (arg0, 0);
6866 tree type = TREE_TYPE (arg0);
6867 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6868 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6869 int ops_unsigned;
6870 tree signed_type, unsigned_type, intermediate_type;
6871 tree tem, one;
6873 /* First, see if we can fold the single bit test into a sign-bit
6874 test. */
6875 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6876 result_type);
6877 if (tem)
6878 return tem;
6880 /* Otherwise we have (A & C) != 0 where C is a single bit,
6881 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6882 Similarly for (A & C) == 0. */
6884 /* If INNER is a right shift of a constant and it plus BITNUM does
6885 not overflow, adjust BITNUM and INNER. */
6886 if (TREE_CODE (inner) == RSHIFT_EXPR
6887 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6888 && bitnum < TYPE_PRECISION (type)
6889 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6890 TYPE_PRECISION (type) - bitnum))
6892 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6893 inner = TREE_OPERAND (inner, 0);
6896 /* If we are going to be able to omit the AND below, we must do our
6897 operations as unsigned. If we must use the AND, we have a choice.
6898 Normally unsigned is faster, but for some machines signed is. */
6899 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6900 && !flag_syntax_only) ? 0 : 1;
6902 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6903 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6904 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6905 inner = fold_convert_loc (loc, intermediate_type, inner);
6907 if (bitnum != 0)
6908 inner = build2 (RSHIFT_EXPR, intermediate_type,
6909 inner, size_int (bitnum));
6911 one = build_int_cst (intermediate_type, 1);
6913 if (code == EQ_EXPR)
6914 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6916 /* Put the AND last so it can combine with more things. */
6917 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6919 /* Make sure to return the proper type. */
6920 inner = fold_convert_loc (loc, result_type, inner);
6922 return inner;
6924 return NULL_TREE;
6927 /* Test whether it is preferable two swap two operands, ARG0 and
6928 ARG1, for example because ARG0 is an integer constant and ARG1
6929 isn't. */
6931 bool
6932 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6934 if (CONSTANT_CLASS_P (arg1))
6935 return 0;
6936 if (CONSTANT_CLASS_P (arg0))
6937 return 1;
6939 STRIP_NOPS (arg0);
6940 STRIP_NOPS (arg1);
6942 if (TREE_CONSTANT (arg1))
6943 return 0;
6944 if (TREE_CONSTANT (arg0))
6945 return 1;
6947 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6948 for commutative and comparison operators. Ensuring a canonical
6949 form allows the optimizers to find additional redundancies without
6950 having to explicitly check for both orderings. */
6951 if (TREE_CODE (arg0) == SSA_NAME
6952 && TREE_CODE (arg1) == SSA_NAME
6953 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6954 return 1;
6956 /* Put SSA_NAMEs last. */
6957 if (TREE_CODE (arg1) == SSA_NAME)
6958 return 0;
6959 if (TREE_CODE (arg0) == SSA_NAME)
6960 return 1;
6962 /* Put variables last. */
6963 if (DECL_P (arg1))
6964 return 0;
6965 if (DECL_P (arg0))
6966 return 1;
6968 return 0;
6972 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6973 means A >= Y && A != MAX, but in this case we know that
6974 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6976 static tree
6977 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6979 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6981 if (TREE_CODE (bound) == LT_EXPR)
6982 a = TREE_OPERAND (bound, 0);
6983 else if (TREE_CODE (bound) == GT_EXPR)
6984 a = TREE_OPERAND (bound, 1);
6985 else
6986 return NULL_TREE;
6988 typea = TREE_TYPE (a);
6989 if (!INTEGRAL_TYPE_P (typea)
6990 && !POINTER_TYPE_P (typea))
6991 return NULL_TREE;
6993 if (TREE_CODE (ineq) == LT_EXPR)
6995 a1 = TREE_OPERAND (ineq, 1);
6996 y = TREE_OPERAND (ineq, 0);
6998 else if (TREE_CODE (ineq) == GT_EXPR)
7000 a1 = TREE_OPERAND (ineq, 0);
7001 y = TREE_OPERAND (ineq, 1);
7003 else
7004 return NULL_TREE;
7006 if (TREE_TYPE (a1) != typea)
7007 return NULL_TREE;
7009 if (POINTER_TYPE_P (typea))
7011 /* Convert the pointer types into integer before taking the difference. */
7012 tree ta = fold_convert_loc (loc, ssizetype, a);
7013 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7014 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7016 else
7017 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7019 if (!diff || !integer_onep (diff))
7020 return NULL_TREE;
7022 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7025 /* Fold a sum or difference of at least one multiplication.
7026 Returns the folded tree or NULL if no simplification could be made. */
7028 static tree
7029 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7030 tree arg0, tree arg1)
7032 tree arg00, arg01, arg10, arg11;
7033 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7035 /* (A * C) +- (B * C) -> (A+-B) * C.
7036 (A * C) +- A -> A * (C+-1).
7037 We are most concerned about the case where C is a constant,
7038 but other combinations show up during loop reduction. Since
7039 it is not difficult, try all four possibilities. */
7041 if (TREE_CODE (arg0) == MULT_EXPR)
7043 arg00 = TREE_OPERAND (arg0, 0);
7044 arg01 = TREE_OPERAND (arg0, 1);
7046 else if (TREE_CODE (arg0) == INTEGER_CST)
7048 arg00 = build_one_cst (type);
7049 arg01 = arg0;
7051 else
7053 /* We cannot generate constant 1 for fract. */
7054 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7055 return NULL_TREE;
7056 arg00 = arg0;
7057 arg01 = build_one_cst (type);
7059 if (TREE_CODE (arg1) == MULT_EXPR)
7061 arg10 = TREE_OPERAND (arg1, 0);
7062 arg11 = TREE_OPERAND (arg1, 1);
7064 else if (TREE_CODE (arg1) == INTEGER_CST)
7066 arg10 = build_one_cst (type);
7067 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7068 the purpose of this canonicalization. */
7069 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7070 && negate_expr_p (arg1)
7071 && code == PLUS_EXPR)
7073 arg11 = negate_expr (arg1);
7074 code = MINUS_EXPR;
7076 else
7077 arg11 = arg1;
7079 else
7081 /* We cannot generate constant 1 for fract. */
7082 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7083 return NULL_TREE;
7084 arg10 = arg1;
7085 arg11 = build_one_cst (type);
7087 same = NULL_TREE;
7089 /* Prefer factoring a common non-constant. */
7090 if (operand_equal_p (arg00, arg10, 0))
7091 same = arg00, alt0 = arg01, alt1 = arg11;
7092 else if (operand_equal_p (arg01, arg11, 0))
7093 same = arg01, alt0 = arg00, alt1 = arg10;
7094 else if (operand_equal_p (arg00, arg11, 0))
7095 same = arg00, alt0 = arg01, alt1 = arg10;
7096 else if (operand_equal_p (arg01, arg10, 0))
7097 same = arg01, alt0 = arg00, alt1 = arg11;
7099 /* No identical multiplicands; see if we can find a common
7100 power-of-two factor in non-power-of-two multiplies. This
7101 can help in multi-dimensional array access. */
7102 else if (tree_fits_shwi_p (arg01)
7103 && tree_fits_shwi_p (arg11))
7105 HOST_WIDE_INT int01, int11, tmp;
7106 bool swap = false;
7107 tree maybe_same;
7108 int01 = tree_to_shwi (arg01);
7109 int11 = tree_to_shwi (arg11);
7111 /* Move min of absolute values to int11. */
7112 if (absu_hwi (int01) < absu_hwi (int11))
7114 tmp = int01, int01 = int11, int11 = tmp;
7115 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7116 maybe_same = arg01;
7117 swap = true;
7119 else
7120 maybe_same = arg11;
7122 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7123 /* The remainder should not be a constant, otherwise we
7124 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7125 increased the number of multiplications necessary. */
7126 && TREE_CODE (arg10) != INTEGER_CST)
7128 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7129 build_int_cst (TREE_TYPE (arg00),
7130 int01 / int11));
7131 alt1 = arg10;
7132 same = maybe_same;
7133 if (swap)
7134 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7138 if (!same)
7139 return NULL_TREE;
7141 if (! INTEGRAL_TYPE_P (type)
7142 || TYPE_OVERFLOW_WRAPS (type)
7143 /* We are neither factoring zero nor minus one. */
7144 || TREE_CODE (same) == INTEGER_CST)
7145 return fold_build2_loc (loc, MULT_EXPR, type,
7146 fold_build2_loc (loc, code, type,
7147 fold_convert_loc (loc, type, alt0),
7148 fold_convert_loc (loc, type, alt1)),
7149 fold_convert_loc (loc, type, same));
7151 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7152 same may be minus one and thus the multiplication may overflow. Perform
7153 the sum operation in an unsigned type. */
7154 tree utype = unsigned_type_for (type);
7155 tree tem = fold_build2_loc (loc, code, utype,
7156 fold_convert_loc (loc, utype, alt0),
7157 fold_convert_loc (loc, utype, alt1));
7158 /* If the sum evaluated to a constant that is not -INF the multiplication
7159 cannot overflow. */
7160 if (TREE_CODE (tem) == INTEGER_CST
7161 && (wi::to_wide (tem)
7162 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7163 return fold_build2_loc (loc, MULT_EXPR, type,
7164 fold_convert (type, tem), same);
7166 /* Do not resort to unsigned multiplication because
7167 we lose the no-overflow property of the expression. */
7168 return NULL_TREE;
7171 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7172 specified by EXPR into the buffer PTR of length LEN bytes.
7173 Return the number of bytes placed in the buffer, or zero
7174 upon failure. */
7176 static int
7177 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7179 tree type = TREE_TYPE (expr);
7180 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7181 int byte, offset, word, words;
7182 unsigned char value;
7184 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7185 return 0;
7186 if (off == -1)
7187 off = 0;
7189 if (ptr == NULL)
7190 /* Dry run. */
7191 return MIN (len, total_bytes - off);
7193 words = total_bytes / UNITS_PER_WORD;
7195 for (byte = 0; byte < total_bytes; byte++)
7197 int bitpos = byte * BITS_PER_UNIT;
7198 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7199 number of bytes. */
7200 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7202 if (total_bytes > UNITS_PER_WORD)
7204 word = byte / UNITS_PER_WORD;
7205 if (WORDS_BIG_ENDIAN)
7206 word = (words - 1) - word;
7207 offset = word * UNITS_PER_WORD;
7208 if (BYTES_BIG_ENDIAN)
7209 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7210 else
7211 offset += byte % UNITS_PER_WORD;
7213 else
7214 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7215 if (offset >= off && offset - off < len)
7216 ptr[offset - off] = value;
7218 return MIN (len, total_bytes - off);
7222 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7223 specified by EXPR into the buffer PTR of length LEN bytes.
7224 Return the number of bytes placed in the buffer, or zero
7225 upon failure. */
7227 static int
7228 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7230 tree type = TREE_TYPE (expr);
7231 scalar_mode mode = SCALAR_TYPE_MODE (type);
7232 int total_bytes = GET_MODE_SIZE (mode);
7233 FIXED_VALUE_TYPE value;
7234 tree i_value, i_type;
7236 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7237 return 0;
7239 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7241 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7242 return 0;
7244 value = TREE_FIXED_CST (expr);
7245 i_value = double_int_to_tree (i_type, value.data);
7247 return native_encode_int (i_value, ptr, len, off);
7251 /* Subroutine of native_encode_expr. Encode the REAL_CST
7252 specified by EXPR into the buffer PTR of length LEN bytes.
7253 Return the number of bytes placed in the buffer, or zero
7254 upon failure. */
7256 static int
7257 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7259 tree type = TREE_TYPE (expr);
7260 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7261 int byte, offset, word, words, bitpos;
7262 unsigned char value;
7264 /* There are always 32 bits in each long, no matter the size of
7265 the hosts long. We handle floating point representations with
7266 up to 192 bits. */
7267 long tmp[6];
7269 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7270 return 0;
7271 if (off == -1)
7272 off = 0;
7274 if (ptr == NULL)
7275 /* Dry run. */
7276 return MIN (len, total_bytes - off);
7278 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7280 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7282 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7283 bitpos += BITS_PER_UNIT)
7285 byte = (bitpos / BITS_PER_UNIT) & 3;
7286 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7288 if (UNITS_PER_WORD < 4)
7290 word = byte / UNITS_PER_WORD;
7291 if (WORDS_BIG_ENDIAN)
7292 word = (words - 1) - word;
7293 offset = word * UNITS_PER_WORD;
7294 if (BYTES_BIG_ENDIAN)
7295 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7296 else
7297 offset += byte % UNITS_PER_WORD;
7299 else
7301 offset = byte;
7302 if (BYTES_BIG_ENDIAN)
7304 /* Reverse bytes within each long, or within the entire float
7305 if it's smaller than a long (for HFmode). */
7306 offset = MIN (3, total_bytes - 1) - offset;
7307 gcc_assert (offset >= 0);
7310 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7311 if (offset >= off
7312 && offset - off < len)
7313 ptr[offset - off] = value;
7315 return MIN (len, total_bytes - off);
7318 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7319 specified by EXPR into the buffer PTR of length LEN bytes.
7320 Return the number of bytes placed in the buffer, or zero
7321 upon failure. */
7323 static int
7324 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7326 int rsize, isize;
7327 tree part;
7329 part = TREE_REALPART (expr);
7330 rsize = native_encode_expr (part, ptr, len, off);
7331 if (off == -1 && rsize == 0)
7332 return 0;
7333 part = TREE_IMAGPART (expr);
7334 if (off != -1)
7335 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7336 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7337 len - rsize, off);
7338 if (off == -1 && isize != rsize)
7339 return 0;
7340 return rsize + isize;
7344 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7345 specified by EXPR into the buffer PTR of length LEN bytes.
7346 Return the number of bytes placed in the buffer, or zero
7347 upon failure. */
7349 static int
7350 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7352 unsigned HOST_WIDE_INT i, count;
7353 int size, offset;
7354 tree itype, elem;
7356 offset = 0;
7357 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7358 return 0;
7359 itype = TREE_TYPE (TREE_TYPE (expr));
7360 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7361 for (i = 0; i < count; i++)
7363 if (off >= size)
7365 off -= size;
7366 continue;
7368 elem = VECTOR_CST_ELT (expr, i);
7369 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7370 len - offset, off);
7371 if ((off == -1 && res != size) || res == 0)
7372 return 0;
7373 offset += res;
7374 if (offset >= len)
7375 return (off == -1 && i < count - 1) ? 0 : offset;
7376 if (off != -1)
7377 off = 0;
7379 return offset;
7383 /* Subroutine of native_encode_expr. Encode the STRING_CST
7384 specified by EXPR into the buffer PTR of length LEN bytes.
7385 Return the number of bytes placed in the buffer, or zero
7386 upon failure. */
7388 static int
7389 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7391 tree type = TREE_TYPE (expr);
7393 /* Wide-char strings are encoded in target byte-order so native
7394 encoding them is trivial. */
7395 if (BITS_PER_UNIT != CHAR_BIT
7396 || TREE_CODE (type) != ARRAY_TYPE
7397 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7398 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7399 return 0;
7401 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7402 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7403 return 0;
7404 if (off == -1)
7405 off = 0;
7406 if (ptr == NULL)
7407 /* Dry run. */;
7408 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7410 int written = 0;
7411 if (off < TREE_STRING_LENGTH (expr))
7413 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7414 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7416 memset (ptr + written, 0,
7417 MIN (total_bytes - written, len - written));
7419 else
7420 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7421 return MIN (total_bytes - off, len);
7425 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7426 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7427 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7428 anything, just do a dry run. If OFF is not -1 then start
7429 the encoding at byte offset OFF and encode at most LEN bytes.
7430 Return the number of bytes placed in the buffer, or zero upon failure. */
7433 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7435 /* We don't support starting at negative offset and -1 is special. */
7436 if (off < -1)
7437 return 0;
7439 switch (TREE_CODE (expr))
7441 case INTEGER_CST:
7442 return native_encode_int (expr, ptr, len, off);
7444 case REAL_CST:
7445 return native_encode_real (expr, ptr, len, off);
7447 case FIXED_CST:
7448 return native_encode_fixed (expr, ptr, len, off);
7450 case COMPLEX_CST:
7451 return native_encode_complex (expr, ptr, len, off);
7453 case VECTOR_CST:
7454 return native_encode_vector (expr, ptr, len, off);
7456 case STRING_CST:
7457 return native_encode_string (expr, ptr, len, off);
7459 default:
7460 return 0;
7465 /* Subroutine of native_interpret_expr. Interpret the contents of
7466 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7467 If the buffer cannot be interpreted, return NULL_TREE. */
7469 static tree
7470 native_interpret_int (tree type, const unsigned char *ptr, int len)
7472 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7474 if (total_bytes > len
7475 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7476 return NULL_TREE;
7478 wide_int result = wi::from_buffer (ptr, total_bytes);
7480 return wide_int_to_tree (type, result);
7484 /* Subroutine of native_interpret_expr. Interpret the contents of
7485 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7486 If the buffer cannot be interpreted, return NULL_TREE. */
7488 static tree
7489 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7491 scalar_mode mode = SCALAR_TYPE_MODE (type);
7492 int total_bytes = GET_MODE_SIZE (mode);
7493 double_int result;
7494 FIXED_VALUE_TYPE fixed_value;
7496 if (total_bytes > len
7497 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7498 return NULL_TREE;
7500 result = double_int::from_buffer (ptr, total_bytes);
7501 fixed_value = fixed_from_double_int (result, mode);
7503 return build_fixed (type, fixed_value);
7507 /* Subroutine of native_interpret_expr. Interpret the contents of
7508 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7509 If the buffer cannot be interpreted, return NULL_TREE. */
7511 static tree
7512 native_interpret_real (tree type, const unsigned char *ptr, int len)
7514 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7515 int total_bytes = GET_MODE_SIZE (mode);
7516 unsigned char value;
7517 /* There are always 32 bits in each long, no matter the size of
7518 the hosts long. We handle floating point representations with
7519 up to 192 bits. */
7520 REAL_VALUE_TYPE r;
7521 long tmp[6];
7523 if (total_bytes > len || total_bytes > 24)
7524 return NULL_TREE;
7525 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7527 memset (tmp, 0, sizeof (tmp));
7528 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7529 bitpos += BITS_PER_UNIT)
7531 /* Both OFFSET and BYTE index within a long;
7532 bitpos indexes the whole float. */
7533 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7534 if (UNITS_PER_WORD < 4)
7536 int word = byte / UNITS_PER_WORD;
7537 if (WORDS_BIG_ENDIAN)
7538 word = (words - 1) - word;
7539 offset = word * UNITS_PER_WORD;
7540 if (BYTES_BIG_ENDIAN)
7541 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7542 else
7543 offset += byte % UNITS_PER_WORD;
7545 else
7547 offset = byte;
7548 if (BYTES_BIG_ENDIAN)
7550 /* Reverse bytes within each long, or within the entire float
7551 if it's smaller than a long (for HFmode). */
7552 offset = MIN (3, total_bytes - 1) - offset;
7553 gcc_assert (offset >= 0);
7556 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7558 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7561 real_from_target (&r, tmp, mode);
7562 return build_real (type, r);
7566 /* Subroutine of native_interpret_expr. Interpret the contents of
7567 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7568 If the buffer cannot be interpreted, return NULL_TREE. */
7570 static tree
7571 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7573 tree etype, rpart, ipart;
7574 int size;
7576 etype = TREE_TYPE (type);
7577 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7578 if (size * 2 > len)
7579 return NULL_TREE;
7580 rpart = native_interpret_expr (etype, ptr, size);
7581 if (!rpart)
7582 return NULL_TREE;
7583 ipart = native_interpret_expr (etype, ptr+size, size);
7584 if (!ipart)
7585 return NULL_TREE;
7586 return build_complex (type, rpart, ipart);
7590 /* Subroutine of native_interpret_expr. Interpret the contents of
7591 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7592 If the buffer cannot be interpreted, return NULL_TREE. */
7594 static tree
7595 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7597 tree etype, elem;
7598 unsigned int i, size;
7599 unsigned HOST_WIDE_INT count;
7601 etype = TREE_TYPE (type);
7602 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7603 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7604 || size * count > len)
7605 return NULL_TREE;
7607 tree_vector_builder elements (type, count, 1);
7608 for (i = 0; i < count; ++i)
7610 elem = native_interpret_expr (etype, ptr+(i*size), size);
7611 if (!elem)
7612 return NULL_TREE;
7613 elements.quick_push (elem);
7615 return elements.build ();
7619 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7620 the buffer PTR of length LEN as a constant of type TYPE. For
7621 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7622 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7623 return NULL_TREE. */
7625 tree
7626 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7628 switch (TREE_CODE (type))
7630 case INTEGER_TYPE:
7631 case ENUMERAL_TYPE:
7632 case BOOLEAN_TYPE:
7633 case POINTER_TYPE:
7634 case REFERENCE_TYPE:
7635 return native_interpret_int (type, ptr, len);
7637 case REAL_TYPE:
7638 return native_interpret_real (type, ptr, len);
7640 case FIXED_POINT_TYPE:
7641 return native_interpret_fixed (type, ptr, len);
7643 case COMPLEX_TYPE:
7644 return native_interpret_complex (type, ptr, len);
7646 case VECTOR_TYPE:
7647 return native_interpret_vector (type, ptr, len);
7649 default:
7650 return NULL_TREE;
7654 /* Returns true if we can interpret the contents of a native encoding
7655 as TYPE. */
7657 static bool
7658 can_native_interpret_type_p (tree type)
7660 switch (TREE_CODE (type))
7662 case INTEGER_TYPE:
7663 case ENUMERAL_TYPE:
7664 case BOOLEAN_TYPE:
7665 case POINTER_TYPE:
7666 case REFERENCE_TYPE:
7667 case FIXED_POINT_TYPE:
7668 case REAL_TYPE:
7669 case COMPLEX_TYPE:
7670 case VECTOR_TYPE:
7671 return true;
7672 default:
7673 return false;
7678 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7679 TYPE at compile-time. If we're unable to perform the conversion
7680 return NULL_TREE. */
7682 static tree
7683 fold_view_convert_expr (tree type, tree expr)
7685 /* We support up to 512-bit values (for V8DFmode). */
7686 unsigned char buffer[64];
7687 int len;
7689 /* Check that the host and target are sane. */
7690 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7691 return NULL_TREE;
7693 len = native_encode_expr (expr, buffer, sizeof (buffer));
7694 if (len == 0)
7695 return NULL_TREE;
7697 return native_interpret_expr (type, buffer, len);
7700 /* Build an expression for the address of T. Folds away INDIRECT_REF
7701 to avoid confusing the gimplify process. */
7703 tree
7704 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7706 /* The size of the object is not relevant when talking about its address. */
7707 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7708 t = TREE_OPERAND (t, 0);
7710 if (TREE_CODE (t) == INDIRECT_REF)
7712 t = TREE_OPERAND (t, 0);
7714 if (TREE_TYPE (t) != ptrtype)
7715 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7717 else if (TREE_CODE (t) == MEM_REF
7718 && integer_zerop (TREE_OPERAND (t, 1)))
7719 return TREE_OPERAND (t, 0);
7720 else if (TREE_CODE (t) == MEM_REF
7721 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7722 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7723 TREE_OPERAND (t, 0),
7724 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7725 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7727 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7729 if (TREE_TYPE (t) != ptrtype)
7730 t = fold_convert_loc (loc, ptrtype, t);
7732 else
7733 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7735 return t;
7738 /* Build an expression for the address of T. */
7740 tree
7741 build_fold_addr_expr_loc (location_t loc, tree t)
7743 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7745 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7748 /* Fold a unary expression of code CODE and type TYPE with operand
7749 OP0. Return the folded expression if folding is successful.
7750 Otherwise, return NULL_TREE. */
7752 tree
7753 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7755 tree tem;
7756 tree arg0;
7757 enum tree_code_class kind = TREE_CODE_CLASS (code);
7759 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7760 && TREE_CODE_LENGTH (code) == 1);
7762 arg0 = op0;
7763 if (arg0)
7765 if (CONVERT_EXPR_CODE_P (code)
7766 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7768 /* Don't use STRIP_NOPS, because signedness of argument type
7769 matters. */
7770 STRIP_SIGN_NOPS (arg0);
7772 else
7774 /* Strip any conversions that don't change the mode. This
7775 is safe for every expression, except for a comparison
7776 expression because its signedness is derived from its
7777 operands.
7779 Note that this is done as an internal manipulation within
7780 the constant folder, in order to find the simplest
7781 representation of the arguments so that their form can be
7782 studied. In any cases, the appropriate type conversions
7783 should be put back in the tree that will get out of the
7784 constant folder. */
7785 STRIP_NOPS (arg0);
7788 if (CONSTANT_CLASS_P (arg0))
7790 tree tem = const_unop (code, type, arg0);
7791 if (tem)
7793 if (TREE_TYPE (tem) != type)
7794 tem = fold_convert_loc (loc, type, tem);
7795 return tem;
7800 tem = generic_simplify (loc, code, type, op0);
7801 if (tem)
7802 return tem;
7804 if (TREE_CODE_CLASS (code) == tcc_unary)
7806 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7807 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7808 fold_build1_loc (loc, code, type,
7809 fold_convert_loc (loc, TREE_TYPE (op0),
7810 TREE_OPERAND (arg0, 1))));
7811 else if (TREE_CODE (arg0) == COND_EXPR)
7813 tree arg01 = TREE_OPERAND (arg0, 1);
7814 tree arg02 = TREE_OPERAND (arg0, 2);
7815 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7816 arg01 = fold_build1_loc (loc, code, type,
7817 fold_convert_loc (loc,
7818 TREE_TYPE (op0), arg01));
7819 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7820 arg02 = fold_build1_loc (loc, code, type,
7821 fold_convert_loc (loc,
7822 TREE_TYPE (op0), arg02));
7823 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7824 arg01, arg02);
7826 /* If this was a conversion, and all we did was to move into
7827 inside the COND_EXPR, bring it back out. But leave it if
7828 it is a conversion from integer to integer and the
7829 result precision is no wider than a word since such a
7830 conversion is cheap and may be optimized away by combine,
7831 while it couldn't if it were outside the COND_EXPR. Then return
7832 so we don't get into an infinite recursion loop taking the
7833 conversion out and then back in. */
7835 if ((CONVERT_EXPR_CODE_P (code)
7836 || code == NON_LVALUE_EXPR)
7837 && TREE_CODE (tem) == COND_EXPR
7838 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7839 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7840 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7841 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7842 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7843 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7844 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7845 && (INTEGRAL_TYPE_P
7846 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7847 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7848 || flag_syntax_only))
7849 tem = build1_loc (loc, code, type,
7850 build3 (COND_EXPR,
7851 TREE_TYPE (TREE_OPERAND
7852 (TREE_OPERAND (tem, 1), 0)),
7853 TREE_OPERAND (tem, 0),
7854 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7855 TREE_OPERAND (TREE_OPERAND (tem, 2),
7856 0)));
7857 return tem;
7861 switch (code)
7863 case NON_LVALUE_EXPR:
7864 if (!maybe_lvalue_p (op0))
7865 return fold_convert_loc (loc, type, op0);
7866 return NULL_TREE;
7868 CASE_CONVERT:
7869 case FLOAT_EXPR:
7870 case FIX_TRUNC_EXPR:
7871 if (COMPARISON_CLASS_P (op0))
7873 /* If we have (type) (a CMP b) and type is an integral type, return
7874 new expression involving the new type. Canonicalize
7875 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7876 non-integral type.
7877 Do not fold the result as that would not simplify further, also
7878 folding again results in recursions. */
7879 if (TREE_CODE (type) == BOOLEAN_TYPE)
7880 return build2_loc (loc, TREE_CODE (op0), type,
7881 TREE_OPERAND (op0, 0),
7882 TREE_OPERAND (op0, 1));
7883 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7884 && TREE_CODE (type) != VECTOR_TYPE)
7885 return build3_loc (loc, COND_EXPR, type, op0,
7886 constant_boolean_node (true, type),
7887 constant_boolean_node (false, type));
7890 /* Handle (T *)&A.B.C for A being of type T and B and C
7891 living at offset zero. This occurs frequently in
7892 C++ upcasting and then accessing the base. */
7893 if (TREE_CODE (op0) == ADDR_EXPR
7894 && POINTER_TYPE_P (type)
7895 && handled_component_p (TREE_OPERAND (op0, 0)))
7897 poly_int64 bitsize, bitpos;
7898 tree offset;
7899 machine_mode mode;
7900 int unsignedp, reversep, volatilep;
7901 tree base
7902 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7903 &offset, &mode, &unsignedp, &reversep,
7904 &volatilep);
7905 /* If the reference was to a (constant) zero offset, we can use
7906 the address of the base if it has the same base type
7907 as the result type and the pointer type is unqualified. */
7908 if (!offset
7909 && known_eq (bitpos, 0)
7910 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7911 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7912 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7913 return fold_convert_loc (loc, type,
7914 build_fold_addr_expr_loc (loc, base));
7917 if (TREE_CODE (op0) == MODIFY_EXPR
7918 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7919 /* Detect assigning a bitfield. */
7920 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7921 && DECL_BIT_FIELD
7922 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7924 /* Don't leave an assignment inside a conversion
7925 unless assigning a bitfield. */
7926 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7927 /* First do the assignment, then return converted constant. */
7928 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7929 TREE_NO_WARNING (tem) = 1;
7930 TREE_USED (tem) = 1;
7931 return tem;
7934 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7935 constants (if x has signed type, the sign bit cannot be set
7936 in c). This folds extension into the BIT_AND_EXPR.
7937 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7938 very likely don't have maximal range for their precision and this
7939 transformation effectively doesn't preserve non-maximal ranges. */
7940 if (TREE_CODE (type) == INTEGER_TYPE
7941 && TREE_CODE (op0) == BIT_AND_EXPR
7942 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7944 tree and_expr = op0;
7945 tree and0 = TREE_OPERAND (and_expr, 0);
7946 tree and1 = TREE_OPERAND (and_expr, 1);
7947 int change = 0;
7949 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7950 || (TYPE_PRECISION (type)
7951 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7952 change = 1;
7953 else if (TYPE_PRECISION (TREE_TYPE (and1))
7954 <= HOST_BITS_PER_WIDE_INT
7955 && tree_fits_uhwi_p (and1))
7957 unsigned HOST_WIDE_INT cst;
7959 cst = tree_to_uhwi (and1);
7960 cst &= HOST_WIDE_INT_M1U
7961 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7962 change = (cst == 0);
7963 if (change
7964 && !flag_syntax_only
7965 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7966 == ZERO_EXTEND))
7968 tree uns = unsigned_type_for (TREE_TYPE (and0));
7969 and0 = fold_convert_loc (loc, uns, and0);
7970 and1 = fold_convert_loc (loc, uns, and1);
7973 if (change)
7975 tem = force_fit_type (type, wi::to_widest (and1), 0,
7976 TREE_OVERFLOW (and1));
7977 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7978 fold_convert_loc (loc, type, and0), tem);
7982 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7983 cast (T1)X will fold away. We assume that this happens when X itself
7984 is a cast. */
7985 if (POINTER_TYPE_P (type)
7986 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7987 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7989 tree arg00 = TREE_OPERAND (arg0, 0);
7990 tree arg01 = TREE_OPERAND (arg0, 1);
7992 return fold_build_pointer_plus_loc
7993 (loc, fold_convert_loc (loc, type, arg00), arg01);
7996 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7997 of the same precision, and X is an integer type not narrower than
7998 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7999 if (INTEGRAL_TYPE_P (type)
8000 && TREE_CODE (op0) == BIT_NOT_EXPR
8001 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8002 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8003 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8005 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8006 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8007 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8008 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8009 fold_convert_loc (loc, type, tem));
8012 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8013 type of X and Y (integer types only). */
8014 if (INTEGRAL_TYPE_P (type)
8015 && TREE_CODE (op0) == MULT_EXPR
8016 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8017 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8019 /* Be careful not to introduce new overflows. */
8020 tree mult_type;
8021 if (TYPE_OVERFLOW_WRAPS (type))
8022 mult_type = type;
8023 else
8024 mult_type = unsigned_type_for (type);
8026 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8028 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8029 fold_convert_loc (loc, mult_type,
8030 TREE_OPERAND (op0, 0)),
8031 fold_convert_loc (loc, mult_type,
8032 TREE_OPERAND (op0, 1)));
8033 return fold_convert_loc (loc, type, tem);
8037 return NULL_TREE;
8039 case VIEW_CONVERT_EXPR:
8040 if (TREE_CODE (op0) == MEM_REF)
8042 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8043 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8044 tem = fold_build2_loc (loc, MEM_REF, type,
8045 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8046 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8047 return tem;
8050 return NULL_TREE;
8052 case NEGATE_EXPR:
8053 tem = fold_negate_expr (loc, arg0);
8054 if (tem)
8055 return fold_convert_loc (loc, type, tem);
8056 return NULL_TREE;
8058 case ABS_EXPR:
8059 /* Convert fabs((double)float) into (double)fabsf(float). */
8060 if (TREE_CODE (arg0) == NOP_EXPR
8061 && TREE_CODE (type) == REAL_TYPE)
8063 tree targ0 = strip_float_extensions (arg0);
8064 if (targ0 != arg0)
8065 return fold_convert_loc (loc, type,
8066 fold_build1_loc (loc, ABS_EXPR,
8067 TREE_TYPE (targ0),
8068 targ0));
8070 return NULL_TREE;
8072 case BIT_NOT_EXPR:
8073 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8074 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8075 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8076 fold_convert_loc (loc, type,
8077 TREE_OPERAND (arg0, 0)))))
8078 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8079 fold_convert_loc (loc, type,
8080 TREE_OPERAND (arg0, 1)));
8081 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8082 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8083 fold_convert_loc (loc, type,
8084 TREE_OPERAND (arg0, 1)))))
8085 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8086 fold_convert_loc (loc, type,
8087 TREE_OPERAND (arg0, 0)), tem);
8089 return NULL_TREE;
8091 case TRUTH_NOT_EXPR:
8092 /* Note that the operand of this must be an int
8093 and its values must be 0 or 1.
8094 ("true" is a fixed value perhaps depending on the language,
8095 but we don't handle values other than 1 correctly yet.) */
8096 tem = fold_truth_not_expr (loc, arg0);
8097 if (!tem)
8098 return NULL_TREE;
8099 return fold_convert_loc (loc, type, tem);
8101 case INDIRECT_REF:
8102 /* Fold *&X to X if X is an lvalue. */
8103 if (TREE_CODE (op0) == ADDR_EXPR)
8105 tree op00 = TREE_OPERAND (op0, 0);
8106 if ((VAR_P (op00)
8107 || TREE_CODE (op00) == PARM_DECL
8108 || TREE_CODE (op00) == RESULT_DECL)
8109 && !TREE_READONLY (op00))
8110 return op00;
8112 return NULL_TREE;
8114 default:
8115 return NULL_TREE;
8116 } /* switch (code) */
8120 /* If the operation was a conversion do _not_ mark a resulting constant
8121 with TREE_OVERFLOW if the original constant was not. These conversions
8122 have implementation defined behavior and retaining the TREE_OVERFLOW
8123 flag here would confuse later passes such as VRP. */
8124 tree
8125 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8126 tree type, tree op0)
8128 tree res = fold_unary_loc (loc, code, type, op0);
8129 if (res
8130 && TREE_CODE (res) == INTEGER_CST
8131 && TREE_CODE (op0) == INTEGER_CST
8132 && CONVERT_EXPR_CODE_P (code))
8133 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8135 return res;
8138 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8139 operands OP0 and OP1. LOC is the location of the resulting expression.
8140 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8141 Return the folded expression if folding is successful. Otherwise,
8142 return NULL_TREE. */
8143 static tree
8144 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8145 tree arg0, tree arg1, tree op0, tree op1)
8147 tree tem;
8149 /* We only do these simplifications if we are optimizing. */
8150 if (!optimize)
8151 return NULL_TREE;
8153 /* Check for things like (A || B) && (A || C). We can convert this
8154 to A || (B && C). Note that either operator can be any of the four
8155 truth and/or operations and the transformation will still be
8156 valid. Also note that we only care about order for the
8157 ANDIF and ORIF operators. If B contains side effects, this
8158 might change the truth-value of A. */
8159 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8160 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8161 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8162 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8163 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8164 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8166 tree a00 = TREE_OPERAND (arg0, 0);
8167 tree a01 = TREE_OPERAND (arg0, 1);
8168 tree a10 = TREE_OPERAND (arg1, 0);
8169 tree a11 = TREE_OPERAND (arg1, 1);
8170 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8171 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8172 && (code == TRUTH_AND_EXPR
8173 || code == TRUTH_OR_EXPR));
8175 if (operand_equal_p (a00, a10, 0))
8176 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8177 fold_build2_loc (loc, code, type, a01, a11));
8178 else if (commutative && operand_equal_p (a00, a11, 0))
8179 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8180 fold_build2_loc (loc, code, type, a01, a10));
8181 else if (commutative && operand_equal_p (a01, a10, 0))
8182 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8183 fold_build2_loc (loc, code, type, a00, a11));
8185 /* This case if tricky because we must either have commutative
8186 operators or else A10 must not have side-effects. */
8188 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8189 && operand_equal_p (a01, a11, 0))
8190 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8191 fold_build2_loc (loc, code, type, a00, a10),
8192 a01);
8195 /* See if we can build a range comparison. */
8196 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8197 return tem;
8199 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8200 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8202 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8203 if (tem)
8204 return fold_build2_loc (loc, code, type, tem, arg1);
8207 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8208 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8210 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8211 if (tem)
8212 return fold_build2_loc (loc, code, type, arg0, tem);
8215 /* Check for the possibility of merging component references. If our
8216 lhs is another similar operation, try to merge its rhs with our
8217 rhs. Then try to merge our lhs and rhs. */
8218 if (TREE_CODE (arg0) == code
8219 && (tem = fold_truth_andor_1 (loc, code, type,
8220 TREE_OPERAND (arg0, 1), arg1)) != 0)
8221 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8223 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8224 return tem;
8226 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8227 && !flag_sanitize_coverage
8228 && (code == TRUTH_AND_EXPR
8229 || code == TRUTH_ANDIF_EXPR
8230 || code == TRUTH_OR_EXPR
8231 || code == TRUTH_ORIF_EXPR))
8233 enum tree_code ncode, icode;
8235 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8236 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8237 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8239 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8240 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8241 We don't want to pack more than two leafs to a non-IF AND/OR
8242 expression.
8243 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8244 equal to IF-CODE, then we don't want to add right-hand operand.
8245 If the inner right-hand side of left-hand operand has
8246 side-effects, or isn't simple, then we can't add to it,
8247 as otherwise we might destroy if-sequence. */
8248 if (TREE_CODE (arg0) == icode
8249 && simple_operand_p_2 (arg1)
8250 /* Needed for sequence points to handle trappings, and
8251 side-effects. */
8252 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8254 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8255 arg1);
8256 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8257 tem);
8259 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8260 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8261 else if (TREE_CODE (arg1) == icode
8262 && simple_operand_p_2 (arg0)
8263 /* Needed for sequence points to handle trappings, and
8264 side-effects. */
8265 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8267 tem = fold_build2_loc (loc, ncode, type,
8268 arg0, TREE_OPERAND (arg1, 0));
8269 return fold_build2_loc (loc, icode, type, tem,
8270 TREE_OPERAND (arg1, 1));
8272 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8273 into (A OR B).
8274 For sequence point consistancy, we need to check for trapping,
8275 and side-effects. */
8276 else if (code == icode && simple_operand_p_2 (arg0)
8277 && simple_operand_p_2 (arg1))
8278 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8281 return NULL_TREE;
8284 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8285 by changing CODE to reduce the magnitude of constants involved in
8286 ARG0 of the comparison.
8287 Returns a canonicalized comparison tree if a simplification was
8288 possible, otherwise returns NULL_TREE.
8289 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8290 valid if signed overflow is undefined. */
8292 static tree
8293 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8294 tree arg0, tree arg1,
8295 bool *strict_overflow_p)
8297 enum tree_code code0 = TREE_CODE (arg0);
8298 tree t, cst0 = NULL_TREE;
8299 int sgn0;
8301 /* Match A +- CST code arg1. We can change this only if overflow
8302 is undefined. */
8303 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8304 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8305 /* In principle pointers also have undefined overflow behavior,
8306 but that causes problems elsewhere. */
8307 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8308 && (code0 == MINUS_EXPR
8309 || code0 == PLUS_EXPR)
8310 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8311 return NULL_TREE;
8313 /* Identify the constant in arg0 and its sign. */
8314 cst0 = TREE_OPERAND (arg0, 1);
8315 sgn0 = tree_int_cst_sgn (cst0);
8317 /* Overflowed constants and zero will cause problems. */
8318 if (integer_zerop (cst0)
8319 || TREE_OVERFLOW (cst0))
8320 return NULL_TREE;
8322 /* See if we can reduce the magnitude of the constant in
8323 arg0 by changing the comparison code. */
8324 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8325 if (code == LT_EXPR
8326 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8327 code = LE_EXPR;
8328 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8329 else if (code == GT_EXPR
8330 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8331 code = GE_EXPR;
8332 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8333 else if (code == LE_EXPR
8334 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8335 code = LT_EXPR;
8336 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8337 else if (code == GE_EXPR
8338 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8339 code = GT_EXPR;
8340 else
8341 return NULL_TREE;
8342 *strict_overflow_p = true;
8344 /* Now build the constant reduced in magnitude. But not if that
8345 would produce one outside of its types range. */
8346 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8347 && ((sgn0 == 1
8348 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8349 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8350 || (sgn0 == -1
8351 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8352 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8353 return NULL_TREE;
8355 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8356 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8357 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8358 t = fold_convert (TREE_TYPE (arg1), t);
8360 return fold_build2_loc (loc, code, type, t, arg1);
8363 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8364 overflow further. Try to decrease the magnitude of constants involved
8365 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8366 and put sole constants at the second argument position.
8367 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8369 static tree
8370 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8371 tree arg0, tree arg1)
8373 tree t;
8374 bool strict_overflow_p;
8375 const char * const warnmsg = G_("assuming signed overflow does not occur "
8376 "when reducing constant in comparison");
8378 /* Try canonicalization by simplifying arg0. */
8379 strict_overflow_p = false;
8380 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8381 &strict_overflow_p);
8382 if (t)
8384 if (strict_overflow_p)
8385 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8386 return t;
8389 /* Try canonicalization by simplifying arg1 using the swapped
8390 comparison. */
8391 code = swap_tree_comparison (code);
8392 strict_overflow_p = false;
8393 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8394 &strict_overflow_p);
8395 if (t && strict_overflow_p)
8396 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8397 return t;
8400 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8401 space. This is used to avoid issuing overflow warnings for
8402 expressions like &p->x which can not wrap. */
8404 static bool
8405 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8407 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8408 return true;
8410 if (maybe_lt (bitpos, 0))
8411 return true;
8413 poly_wide_int wi_offset;
8414 int precision = TYPE_PRECISION (TREE_TYPE (base));
8415 if (offset == NULL_TREE)
8416 wi_offset = wi::zero (precision);
8417 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8418 return true;
8419 else
8420 wi_offset = wi::to_poly_wide (offset);
8422 wi::overflow_type overflow;
8423 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8424 precision);
8425 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8426 if (overflow)
8427 return true;
8429 poly_uint64 total_hwi, size;
8430 if (!total.to_uhwi (&total_hwi)
8431 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8432 &size)
8433 || known_eq (size, 0U))
8434 return true;
8436 if (known_le (total_hwi, size))
8437 return false;
8439 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8440 array. */
8441 if (TREE_CODE (base) == ADDR_EXPR
8442 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8443 &size)
8444 && maybe_ne (size, 0U)
8445 && known_le (total_hwi, size))
8446 return false;
8448 return true;
8451 /* Return a positive integer when the symbol DECL is known to have
8452 a nonzero address, zero when it's known not to (e.g., it's a weak
8453 symbol), and a negative integer when the symbol is not yet in the
8454 symbol table and so whether or not its address is zero is unknown.
8455 For function local objects always return positive integer. */
8456 static int
8457 maybe_nonzero_address (tree decl)
8459 if (DECL_P (decl) && decl_in_symtab_p (decl))
8460 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8461 return symbol->nonzero_address ();
8463 /* Function local objects are never NULL. */
8464 if (DECL_P (decl)
8465 && (DECL_CONTEXT (decl)
8466 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8467 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8468 return 1;
8470 return -1;
8473 /* Subroutine of fold_binary. This routine performs all of the
8474 transformations that are common to the equality/inequality
8475 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8476 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8477 fold_binary should call fold_binary. Fold a comparison with
8478 tree code CODE and type TYPE with operands OP0 and OP1. Return
8479 the folded comparison or NULL_TREE. */
8481 static tree
8482 fold_comparison (location_t loc, enum tree_code code, tree type,
8483 tree op0, tree op1)
8485 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8486 tree arg0, arg1, tem;
8488 arg0 = op0;
8489 arg1 = op1;
8491 STRIP_SIGN_NOPS (arg0);
8492 STRIP_SIGN_NOPS (arg1);
8494 /* For comparisons of pointers we can decompose it to a compile time
8495 comparison of the base objects and the offsets into the object.
8496 This requires at least one operand being an ADDR_EXPR or a
8497 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8498 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8499 && (TREE_CODE (arg0) == ADDR_EXPR
8500 || TREE_CODE (arg1) == ADDR_EXPR
8501 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8502 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8504 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8505 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8506 machine_mode mode;
8507 int volatilep, reversep, unsignedp;
8508 bool indirect_base0 = false, indirect_base1 = false;
8510 /* Get base and offset for the access. Strip ADDR_EXPR for
8511 get_inner_reference, but put it back by stripping INDIRECT_REF
8512 off the base object if possible. indirect_baseN will be true
8513 if baseN is not an address but refers to the object itself. */
8514 base0 = arg0;
8515 if (TREE_CODE (arg0) == ADDR_EXPR)
8517 base0
8518 = get_inner_reference (TREE_OPERAND (arg0, 0),
8519 &bitsize, &bitpos0, &offset0, &mode,
8520 &unsignedp, &reversep, &volatilep);
8521 if (TREE_CODE (base0) == INDIRECT_REF)
8522 base0 = TREE_OPERAND (base0, 0);
8523 else
8524 indirect_base0 = true;
8526 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8528 base0 = TREE_OPERAND (arg0, 0);
8529 STRIP_SIGN_NOPS (base0);
8530 if (TREE_CODE (base0) == ADDR_EXPR)
8532 base0
8533 = get_inner_reference (TREE_OPERAND (base0, 0),
8534 &bitsize, &bitpos0, &offset0, &mode,
8535 &unsignedp, &reversep, &volatilep);
8536 if (TREE_CODE (base0) == INDIRECT_REF)
8537 base0 = TREE_OPERAND (base0, 0);
8538 else
8539 indirect_base0 = true;
8541 if (offset0 == NULL_TREE || integer_zerop (offset0))
8542 offset0 = TREE_OPERAND (arg0, 1);
8543 else
8544 offset0 = size_binop (PLUS_EXPR, offset0,
8545 TREE_OPERAND (arg0, 1));
8546 if (poly_int_tree_p (offset0))
8548 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8549 TYPE_PRECISION (sizetype));
8550 tem <<= LOG2_BITS_PER_UNIT;
8551 tem += bitpos0;
8552 if (tem.to_shwi (&bitpos0))
8553 offset0 = NULL_TREE;
8557 base1 = arg1;
8558 if (TREE_CODE (arg1) == ADDR_EXPR)
8560 base1
8561 = get_inner_reference (TREE_OPERAND (arg1, 0),
8562 &bitsize, &bitpos1, &offset1, &mode,
8563 &unsignedp, &reversep, &volatilep);
8564 if (TREE_CODE (base1) == INDIRECT_REF)
8565 base1 = TREE_OPERAND (base1, 0);
8566 else
8567 indirect_base1 = true;
8569 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8571 base1 = TREE_OPERAND (arg1, 0);
8572 STRIP_SIGN_NOPS (base1);
8573 if (TREE_CODE (base1) == ADDR_EXPR)
8575 base1
8576 = get_inner_reference (TREE_OPERAND (base1, 0),
8577 &bitsize, &bitpos1, &offset1, &mode,
8578 &unsignedp, &reversep, &volatilep);
8579 if (TREE_CODE (base1) == INDIRECT_REF)
8580 base1 = TREE_OPERAND (base1, 0);
8581 else
8582 indirect_base1 = true;
8584 if (offset1 == NULL_TREE || integer_zerop (offset1))
8585 offset1 = TREE_OPERAND (arg1, 1);
8586 else
8587 offset1 = size_binop (PLUS_EXPR, offset1,
8588 TREE_OPERAND (arg1, 1));
8589 if (poly_int_tree_p (offset1))
8591 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8592 TYPE_PRECISION (sizetype));
8593 tem <<= LOG2_BITS_PER_UNIT;
8594 tem += bitpos1;
8595 if (tem.to_shwi (&bitpos1))
8596 offset1 = NULL_TREE;
8600 /* If we have equivalent bases we might be able to simplify. */
8601 if (indirect_base0 == indirect_base1
8602 && operand_equal_p (base0, base1,
8603 indirect_base0 ? OEP_ADDRESS_OF : 0))
8605 /* We can fold this expression to a constant if the non-constant
8606 offset parts are equal. */
8607 if ((offset0 == offset1
8608 || (offset0 && offset1
8609 && operand_equal_p (offset0, offset1, 0)))
8610 && (equality_code
8611 || (indirect_base0
8612 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8613 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8615 if (!equality_code
8616 && maybe_ne (bitpos0, bitpos1)
8617 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8618 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8619 fold_overflow_warning (("assuming pointer wraparound does not "
8620 "occur when comparing P +- C1 with "
8621 "P +- C2"),
8622 WARN_STRICT_OVERFLOW_CONDITIONAL);
8624 switch (code)
8626 case EQ_EXPR:
8627 if (known_eq (bitpos0, bitpos1))
8628 return constant_boolean_node (true, type);
8629 if (known_ne (bitpos0, bitpos1))
8630 return constant_boolean_node (false, type);
8631 break;
8632 case NE_EXPR:
8633 if (known_ne (bitpos0, bitpos1))
8634 return constant_boolean_node (true, type);
8635 if (known_eq (bitpos0, bitpos1))
8636 return constant_boolean_node (false, type);
8637 break;
8638 case LT_EXPR:
8639 if (known_lt (bitpos0, bitpos1))
8640 return constant_boolean_node (true, type);
8641 if (known_ge (bitpos0, bitpos1))
8642 return constant_boolean_node (false, type);
8643 break;
8644 case LE_EXPR:
8645 if (known_le (bitpos0, bitpos1))
8646 return constant_boolean_node (true, type);
8647 if (known_gt (bitpos0, bitpos1))
8648 return constant_boolean_node (false, type);
8649 break;
8650 case GE_EXPR:
8651 if (known_ge (bitpos0, bitpos1))
8652 return constant_boolean_node (true, type);
8653 if (known_lt (bitpos0, bitpos1))
8654 return constant_boolean_node (false, type);
8655 break;
8656 case GT_EXPR:
8657 if (known_gt (bitpos0, bitpos1))
8658 return constant_boolean_node (true, type);
8659 if (known_le (bitpos0, bitpos1))
8660 return constant_boolean_node (false, type);
8661 break;
8662 default:;
8665 /* We can simplify the comparison to a comparison of the variable
8666 offset parts if the constant offset parts are equal.
8667 Be careful to use signed sizetype here because otherwise we
8668 mess with array offsets in the wrong way. This is possible
8669 because pointer arithmetic is restricted to retain within an
8670 object and overflow on pointer differences is undefined as of
8671 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8672 else if (known_eq (bitpos0, bitpos1)
8673 && (equality_code
8674 || (indirect_base0
8675 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8676 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8678 /* By converting to signed sizetype we cover middle-end pointer
8679 arithmetic which operates on unsigned pointer types of size
8680 type size and ARRAY_REF offsets which are properly sign or
8681 zero extended from their type in case it is narrower than
8682 sizetype. */
8683 if (offset0 == NULL_TREE)
8684 offset0 = build_int_cst (ssizetype, 0);
8685 else
8686 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8687 if (offset1 == NULL_TREE)
8688 offset1 = build_int_cst (ssizetype, 0);
8689 else
8690 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8692 if (!equality_code
8693 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8694 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8695 fold_overflow_warning (("assuming pointer wraparound does not "
8696 "occur when comparing P +- C1 with "
8697 "P +- C2"),
8698 WARN_STRICT_OVERFLOW_COMPARISON);
8700 return fold_build2_loc (loc, code, type, offset0, offset1);
8703 /* For equal offsets we can simplify to a comparison of the
8704 base addresses. */
8705 else if (known_eq (bitpos0, bitpos1)
8706 && (indirect_base0
8707 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8708 && (indirect_base1
8709 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8710 && ((offset0 == offset1)
8711 || (offset0 && offset1
8712 && operand_equal_p (offset0, offset1, 0))))
8714 if (indirect_base0)
8715 base0 = build_fold_addr_expr_loc (loc, base0);
8716 if (indirect_base1)
8717 base1 = build_fold_addr_expr_loc (loc, base1);
8718 return fold_build2_loc (loc, code, type, base0, base1);
8720 /* Comparison between an ordinary (non-weak) symbol and a null
8721 pointer can be eliminated since such symbols must have a non
8722 null address. In C, relational expressions between pointers
8723 to objects and null pointers are undefined. The results
8724 below follow the C++ rules with the additional property that
8725 every object pointer compares greater than a null pointer.
8727 else if (((DECL_P (base0)
8728 && maybe_nonzero_address (base0) > 0
8729 /* Avoid folding references to struct members at offset 0 to
8730 prevent tests like '&ptr->firstmember == 0' from getting
8731 eliminated. When ptr is null, although the -> expression
8732 is strictly speaking invalid, GCC retains it as a matter
8733 of QoI. See PR c/44555. */
8734 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
8735 || CONSTANT_CLASS_P (base0))
8736 && indirect_base0
8737 /* The caller guarantees that when one of the arguments is
8738 constant (i.e., null in this case) it is second. */
8739 && integer_zerop (arg1))
8741 switch (code)
8743 case EQ_EXPR:
8744 case LE_EXPR:
8745 case LT_EXPR:
8746 return constant_boolean_node (false, type);
8747 case GE_EXPR:
8748 case GT_EXPR:
8749 case NE_EXPR:
8750 return constant_boolean_node (true, type);
8751 default:
8752 gcc_unreachable ();
8757 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8758 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8759 the resulting offset is smaller in absolute value than the
8760 original one and has the same sign. */
8761 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8762 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8763 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8764 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8765 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8766 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8767 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8768 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8770 tree const1 = TREE_OPERAND (arg0, 1);
8771 tree const2 = TREE_OPERAND (arg1, 1);
8772 tree variable1 = TREE_OPERAND (arg0, 0);
8773 tree variable2 = TREE_OPERAND (arg1, 0);
8774 tree cst;
8775 const char * const warnmsg = G_("assuming signed overflow does not "
8776 "occur when combining constants around "
8777 "a comparison");
8779 /* Put the constant on the side where it doesn't overflow and is
8780 of lower absolute value and of same sign than before. */
8781 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8782 ? MINUS_EXPR : PLUS_EXPR,
8783 const2, const1);
8784 if (!TREE_OVERFLOW (cst)
8785 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8786 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8788 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8789 return fold_build2_loc (loc, code, type,
8790 variable1,
8791 fold_build2_loc (loc, TREE_CODE (arg1),
8792 TREE_TYPE (arg1),
8793 variable2, cst));
8796 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8797 ? MINUS_EXPR : PLUS_EXPR,
8798 const1, const2);
8799 if (!TREE_OVERFLOW (cst)
8800 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8801 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8803 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8804 return fold_build2_loc (loc, code, type,
8805 fold_build2_loc (loc, TREE_CODE (arg0),
8806 TREE_TYPE (arg0),
8807 variable1, cst),
8808 variable2);
8812 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8813 if (tem)
8814 return tem;
8816 /* If we are comparing an expression that just has comparisons
8817 of two integer values, arithmetic expressions of those comparisons,
8818 and constants, we can simplify it. There are only three cases
8819 to check: the two values can either be equal, the first can be
8820 greater, or the second can be greater. Fold the expression for
8821 those three values. Since each value must be 0 or 1, we have
8822 eight possibilities, each of which corresponds to the constant 0
8823 or 1 or one of the six possible comparisons.
8825 This handles common cases like (a > b) == 0 but also handles
8826 expressions like ((x > y) - (y > x)) > 0, which supposedly
8827 occur in macroized code. */
8829 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8831 tree cval1 = 0, cval2 = 0;
8833 if (twoval_comparison_p (arg0, &cval1, &cval2)
8834 /* Don't handle degenerate cases here; they should already
8835 have been handled anyway. */
8836 && cval1 != 0 && cval2 != 0
8837 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8838 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8839 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8840 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8841 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8842 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8843 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8845 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8846 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8848 /* We can't just pass T to eval_subst in case cval1 or cval2
8849 was the same as ARG1. */
8851 tree high_result
8852 = fold_build2_loc (loc, code, type,
8853 eval_subst (loc, arg0, cval1, maxval,
8854 cval2, minval),
8855 arg1);
8856 tree equal_result
8857 = fold_build2_loc (loc, code, type,
8858 eval_subst (loc, arg0, cval1, maxval,
8859 cval2, maxval),
8860 arg1);
8861 tree low_result
8862 = fold_build2_loc (loc, code, type,
8863 eval_subst (loc, arg0, cval1, minval,
8864 cval2, maxval),
8865 arg1);
8867 /* All three of these results should be 0 or 1. Confirm they are.
8868 Then use those values to select the proper code to use. */
8870 if (TREE_CODE (high_result) == INTEGER_CST
8871 && TREE_CODE (equal_result) == INTEGER_CST
8872 && TREE_CODE (low_result) == INTEGER_CST)
8874 /* Make a 3-bit mask with the high-order bit being the
8875 value for `>', the next for '=', and the low for '<'. */
8876 switch ((integer_onep (high_result) * 4)
8877 + (integer_onep (equal_result) * 2)
8878 + integer_onep (low_result))
8880 case 0:
8881 /* Always false. */
8882 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8883 case 1:
8884 code = LT_EXPR;
8885 break;
8886 case 2:
8887 code = EQ_EXPR;
8888 break;
8889 case 3:
8890 code = LE_EXPR;
8891 break;
8892 case 4:
8893 code = GT_EXPR;
8894 break;
8895 case 5:
8896 code = NE_EXPR;
8897 break;
8898 case 6:
8899 code = GE_EXPR;
8900 break;
8901 case 7:
8902 /* Always true. */
8903 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8906 return fold_build2_loc (loc, code, type, cval1, cval2);
8911 return NULL_TREE;
8915 /* Subroutine of fold_binary. Optimize complex multiplications of the
8916 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8917 argument EXPR represents the expression "z" of type TYPE. */
8919 static tree
8920 fold_mult_zconjz (location_t loc, tree type, tree expr)
8922 tree itype = TREE_TYPE (type);
8923 tree rpart, ipart, tem;
8925 if (TREE_CODE (expr) == COMPLEX_EXPR)
8927 rpart = TREE_OPERAND (expr, 0);
8928 ipart = TREE_OPERAND (expr, 1);
8930 else if (TREE_CODE (expr) == COMPLEX_CST)
8932 rpart = TREE_REALPART (expr);
8933 ipart = TREE_IMAGPART (expr);
8935 else
8937 expr = save_expr (expr);
8938 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8939 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8942 rpart = save_expr (rpart);
8943 ipart = save_expr (ipart);
8944 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8945 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8946 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8947 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8948 build_zero_cst (itype));
8952 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8953 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8954 true if successful. */
8956 static bool
8957 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8959 unsigned HOST_WIDE_INT i, nunits;
8961 if (TREE_CODE (arg) == VECTOR_CST
8962 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
8964 for (i = 0; i < nunits; ++i)
8965 elts[i] = VECTOR_CST_ELT (arg, i);
8967 else if (TREE_CODE (arg) == CONSTRUCTOR)
8969 constructor_elt *elt;
8971 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8972 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8973 return false;
8974 else
8975 elts[i] = elt->value;
8977 else
8978 return false;
8979 for (; i < nelts; i++)
8980 elts[i]
8981 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8982 return true;
8985 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8986 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8987 NULL_TREE otherwise. */
8989 static tree
8990 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
8992 unsigned int i;
8993 unsigned HOST_WIDE_INT nelts;
8994 bool need_ctor = false;
8996 if (!sel.length ().is_constant (&nelts))
8997 return NULL_TREE;
8998 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
8999 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9000 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9001 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9002 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9003 return NULL_TREE;
9005 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9006 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9007 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9008 return NULL_TREE;
9010 tree_vector_builder out_elts (type, nelts, 1);
9011 for (i = 0; i < nelts; i++)
9013 HOST_WIDE_INT index;
9014 if (!sel[i].is_constant (&index))
9015 return NULL_TREE;
9016 if (!CONSTANT_CLASS_P (in_elts[index]))
9017 need_ctor = true;
9018 out_elts.quick_push (unshare_expr (in_elts[index]));
9021 if (need_ctor)
9023 vec<constructor_elt, va_gc> *v;
9024 vec_alloc (v, nelts);
9025 for (i = 0; i < nelts; i++)
9026 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9027 return build_constructor (type, v);
9029 else
9030 return out_elts.build ();
9033 /* Try to fold a pointer difference of type TYPE two address expressions of
9034 array references AREF0 and AREF1 using location LOC. Return a
9035 simplified expression for the difference or NULL_TREE. */
9037 static tree
9038 fold_addr_of_array_ref_difference (location_t loc, tree type,
9039 tree aref0, tree aref1,
9040 bool use_pointer_diff)
9042 tree base0 = TREE_OPERAND (aref0, 0);
9043 tree base1 = TREE_OPERAND (aref1, 0);
9044 tree base_offset = build_int_cst (type, 0);
9046 /* If the bases are array references as well, recurse. If the bases
9047 are pointer indirections compute the difference of the pointers.
9048 If the bases are equal, we are set. */
9049 if ((TREE_CODE (base0) == ARRAY_REF
9050 && TREE_CODE (base1) == ARRAY_REF
9051 && (base_offset
9052 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9053 use_pointer_diff)))
9054 || (INDIRECT_REF_P (base0)
9055 && INDIRECT_REF_P (base1)
9056 && (base_offset
9057 = use_pointer_diff
9058 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9059 TREE_OPERAND (base0, 0),
9060 TREE_OPERAND (base1, 0))
9061 : fold_binary_loc (loc, MINUS_EXPR, type,
9062 fold_convert (type,
9063 TREE_OPERAND (base0, 0)),
9064 fold_convert (type,
9065 TREE_OPERAND (base1, 0)))))
9066 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9068 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9069 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9070 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9071 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9072 return fold_build2_loc (loc, PLUS_EXPR, type,
9073 base_offset,
9074 fold_build2_loc (loc, MULT_EXPR, type,
9075 diff, esz));
9077 return NULL_TREE;
9080 /* If the real or vector real constant CST of type TYPE has an exact
9081 inverse, return it, else return NULL. */
9083 tree
9084 exact_inverse (tree type, tree cst)
9086 REAL_VALUE_TYPE r;
9087 tree unit_type;
9088 machine_mode mode;
9090 switch (TREE_CODE (cst))
9092 case REAL_CST:
9093 r = TREE_REAL_CST (cst);
9095 if (exact_real_inverse (TYPE_MODE (type), &r))
9096 return build_real (type, r);
9098 return NULL_TREE;
9100 case VECTOR_CST:
9102 unit_type = TREE_TYPE (type);
9103 mode = TYPE_MODE (unit_type);
9105 tree_vector_builder elts;
9106 if (!elts.new_unary_operation (type, cst, false))
9107 return NULL_TREE;
9108 unsigned int count = elts.encoded_nelts ();
9109 for (unsigned int i = 0; i < count; ++i)
9111 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9112 if (!exact_real_inverse (mode, &r))
9113 return NULL_TREE;
9114 elts.quick_push (build_real (unit_type, r));
9117 return elts.build ();
9120 default:
9121 return NULL_TREE;
9125 /* Mask out the tz least significant bits of X of type TYPE where
9126 tz is the number of trailing zeroes in Y. */
9127 static wide_int
9128 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9130 int tz = wi::ctz (y);
9131 if (tz > 0)
9132 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9133 return x;
9136 /* Return true when T is an address and is known to be nonzero.
9137 For floating point we further ensure that T is not denormal.
9138 Similar logic is present in nonzero_address in rtlanal.h.
9140 If the return value is based on the assumption that signed overflow
9141 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9142 change *STRICT_OVERFLOW_P. */
9144 static bool
9145 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9147 tree type = TREE_TYPE (t);
9148 enum tree_code code;
9150 /* Doing something useful for floating point would need more work. */
9151 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9152 return false;
9154 code = TREE_CODE (t);
9155 switch (TREE_CODE_CLASS (code))
9157 case tcc_unary:
9158 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9159 strict_overflow_p);
9160 case tcc_binary:
9161 case tcc_comparison:
9162 return tree_binary_nonzero_warnv_p (code, type,
9163 TREE_OPERAND (t, 0),
9164 TREE_OPERAND (t, 1),
9165 strict_overflow_p);
9166 case tcc_constant:
9167 case tcc_declaration:
9168 case tcc_reference:
9169 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9171 default:
9172 break;
9175 switch (code)
9177 case TRUTH_NOT_EXPR:
9178 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9179 strict_overflow_p);
9181 case TRUTH_AND_EXPR:
9182 case TRUTH_OR_EXPR:
9183 case TRUTH_XOR_EXPR:
9184 return tree_binary_nonzero_warnv_p (code, type,
9185 TREE_OPERAND (t, 0),
9186 TREE_OPERAND (t, 1),
9187 strict_overflow_p);
9189 case COND_EXPR:
9190 case CONSTRUCTOR:
9191 case OBJ_TYPE_REF:
9192 case ASSERT_EXPR:
9193 case ADDR_EXPR:
9194 case WITH_SIZE_EXPR:
9195 case SSA_NAME:
9196 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9198 case COMPOUND_EXPR:
9199 case MODIFY_EXPR:
9200 case BIND_EXPR:
9201 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9202 strict_overflow_p);
9204 case SAVE_EXPR:
9205 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9206 strict_overflow_p);
9208 case CALL_EXPR:
9210 tree fndecl = get_callee_fndecl (t);
9211 if (!fndecl) return false;
9212 if (flag_delete_null_pointer_checks && !flag_check_new
9213 && DECL_IS_OPERATOR_NEW (fndecl)
9214 && !TREE_NOTHROW (fndecl))
9215 return true;
9216 if (flag_delete_null_pointer_checks
9217 && lookup_attribute ("returns_nonnull",
9218 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9219 return true;
9220 return alloca_call_p (t);
9223 default:
9224 break;
9226 return false;
9229 /* Return true when T is an address and is known to be nonzero.
9230 Handle warnings about undefined signed overflow. */
9232 bool
9233 tree_expr_nonzero_p (tree t)
9235 bool ret, strict_overflow_p;
9237 strict_overflow_p = false;
9238 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9239 if (strict_overflow_p)
9240 fold_overflow_warning (("assuming signed overflow does not occur when "
9241 "determining that expression is always "
9242 "non-zero"),
9243 WARN_STRICT_OVERFLOW_MISC);
9244 return ret;
9247 /* Return true if T is known not to be equal to an integer W. */
9249 bool
9250 expr_not_equal_to (tree t, const wide_int &w)
9252 wide_int min, max, nz;
9253 value_range_type rtype;
9254 switch (TREE_CODE (t))
9256 case INTEGER_CST:
9257 return wi::to_wide (t) != w;
9259 case SSA_NAME:
9260 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9261 return false;
9262 rtype = get_range_info (t, &min, &max);
9263 if (rtype == VR_RANGE)
9265 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9266 return true;
9267 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9268 return true;
9270 else if (rtype == VR_ANTI_RANGE
9271 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9272 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9273 return true;
9274 /* If T has some known zero bits and W has any of those bits set,
9275 then T is known not to be equal to W. */
9276 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9277 TYPE_PRECISION (TREE_TYPE (t))), 0))
9278 return true;
9279 return false;
9281 default:
9282 return false;
9286 /* Fold a binary expression of code CODE and type TYPE with operands
9287 OP0 and OP1. LOC is the location of the resulting expression.
9288 Return the folded expression if folding is successful. Otherwise,
9289 return NULL_TREE. */
9291 tree
9292 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9293 tree op0, tree op1)
9295 enum tree_code_class kind = TREE_CODE_CLASS (code);
9296 tree arg0, arg1, tem;
9297 tree t1 = NULL_TREE;
9298 bool strict_overflow_p;
9299 unsigned int prec;
9301 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9302 && TREE_CODE_LENGTH (code) == 2
9303 && op0 != NULL_TREE
9304 && op1 != NULL_TREE);
9306 arg0 = op0;
9307 arg1 = op1;
9309 /* Strip any conversions that don't change the mode. This is
9310 safe for every expression, except for a comparison expression
9311 because its signedness is derived from its operands. So, in
9312 the latter case, only strip conversions that don't change the
9313 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9314 preserved.
9316 Note that this is done as an internal manipulation within the
9317 constant folder, in order to find the simplest representation
9318 of the arguments so that their form can be studied. In any
9319 cases, the appropriate type conversions should be put back in
9320 the tree that will get out of the constant folder. */
9322 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9324 STRIP_SIGN_NOPS (arg0);
9325 STRIP_SIGN_NOPS (arg1);
9327 else
9329 STRIP_NOPS (arg0);
9330 STRIP_NOPS (arg1);
9333 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9334 constant but we can't do arithmetic on them. */
9335 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9337 tem = const_binop (code, type, arg0, arg1);
9338 if (tem != NULL_TREE)
9340 if (TREE_TYPE (tem) != type)
9341 tem = fold_convert_loc (loc, type, tem);
9342 return tem;
9346 /* If this is a commutative operation, and ARG0 is a constant, move it
9347 to ARG1 to reduce the number of tests below. */
9348 if (commutative_tree_code (code)
9349 && tree_swap_operands_p (arg0, arg1))
9350 return fold_build2_loc (loc, code, type, op1, op0);
9352 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9353 to ARG1 to reduce the number of tests below. */
9354 if (kind == tcc_comparison
9355 && tree_swap_operands_p (arg0, arg1))
9356 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9358 tem = generic_simplify (loc, code, type, op0, op1);
9359 if (tem)
9360 return tem;
9362 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9364 First check for cases where an arithmetic operation is applied to a
9365 compound, conditional, or comparison operation. Push the arithmetic
9366 operation inside the compound or conditional to see if any folding
9367 can then be done. Convert comparison to conditional for this purpose.
9368 The also optimizes non-constant cases that used to be done in
9369 expand_expr.
9371 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9372 one of the operands is a comparison and the other is a comparison, a
9373 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9374 code below would make the expression more complex. Change it to a
9375 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9376 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9378 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9379 || code == EQ_EXPR || code == NE_EXPR)
9380 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9381 && ((truth_value_p (TREE_CODE (arg0))
9382 && (truth_value_p (TREE_CODE (arg1))
9383 || (TREE_CODE (arg1) == BIT_AND_EXPR
9384 && integer_onep (TREE_OPERAND (arg1, 1)))))
9385 || (truth_value_p (TREE_CODE (arg1))
9386 && (truth_value_p (TREE_CODE (arg0))
9387 || (TREE_CODE (arg0) == BIT_AND_EXPR
9388 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9390 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9391 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9392 : TRUTH_XOR_EXPR,
9393 boolean_type_node,
9394 fold_convert_loc (loc, boolean_type_node, arg0),
9395 fold_convert_loc (loc, boolean_type_node, arg1));
9397 if (code == EQ_EXPR)
9398 tem = invert_truthvalue_loc (loc, tem);
9400 return fold_convert_loc (loc, type, tem);
9403 if (TREE_CODE_CLASS (code) == tcc_binary
9404 || TREE_CODE_CLASS (code) == tcc_comparison)
9406 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9408 tem = fold_build2_loc (loc, code, type,
9409 fold_convert_loc (loc, TREE_TYPE (op0),
9410 TREE_OPERAND (arg0, 1)), op1);
9411 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9412 tem);
9414 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9416 tem = fold_build2_loc (loc, code, type, op0,
9417 fold_convert_loc (loc, TREE_TYPE (op1),
9418 TREE_OPERAND (arg1, 1)));
9419 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9420 tem);
9423 if (TREE_CODE (arg0) == COND_EXPR
9424 || TREE_CODE (arg0) == VEC_COND_EXPR
9425 || COMPARISON_CLASS_P (arg0))
9427 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9428 arg0, arg1,
9429 /*cond_first_p=*/1);
9430 if (tem != NULL_TREE)
9431 return tem;
9434 if (TREE_CODE (arg1) == COND_EXPR
9435 || TREE_CODE (arg1) == VEC_COND_EXPR
9436 || COMPARISON_CLASS_P (arg1))
9438 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9439 arg1, arg0,
9440 /*cond_first_p=*/0);
9441 if (tem != NULL_TREE)
9442 return tem;
9446 switch (code)
9448 case MEM_REF:
9449 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9450 if (TREE_CODE (arg0) == ADDR_EXPR
9451 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9453 tree iref = TREE_OPERAND (arg0, 0);
9454 return fold_build2 (MEM_REF, type,
9455 TREE_OPERAND (iref, 0),
9456 int_const_binop (PLUS_EXPR, arg1,
9457 TREE_OPERAND (iref, 1)));
9460 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9461 if (TREE_CODE (arg0) == ADDR_EXPR
9462 && handled_component_p (TREE_OPERAND (arg0, 0)))
9464 tree base;
9465 poly_int64 coffset;
9466 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9467 &coffset);
9468 if (!base)
9469 return NULL_TREE;
9470 return fold_build2 (MEM_REF, type,
9471 build_fold_addr_expr (base),
9472 int_const_binop (PLUS_EXPR, arg1,
9473 size_int (coffset)));
9476 return NULL_TREE;
9478 case POINTER_PLUS_EXPR:
9479 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9480 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9481 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9482 return fold_convert_loc (loc, type,
9483 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9484 fold_convert_loc (loc, sizetype,
9485 arg1),
9486 fold_convert_loc (loc, sizetype,
9487 arg0)));
9489 return NULL_TREE;
9491 case PLUS_EXPR:
9492 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9494 /* X + (X / CST) * -CST is X % CST. */
9495 if (TREE_CODE (arg1) == MULT_EXPR
9496 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9497 && operand_equal_p (arg0,
9498 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9500 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9501 tree cst1 = TREE_OPERAND (arg1, 1);
9502 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9503 cst1, cst0);
9504 if (sum && integer_zerop (sum))
9505 return fold_convert_loc (loc, type,
9506 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9507 TREE_TYPE (arg0), arg0,
9508 cst0));
9512 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9513 one. Make sure the type is not saturating and has the signedness of
9514 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9515 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9516 if ((TREE_CODE (arg0) == MULT_EXPR
9517 || TREE_CODE (arg1) == MULT_EXPR)
9518 && !TYPE_SATURATING (type)
9519 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9520 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9521 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9523 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9524 if (tem)
9525 return tem;
9528 if (! FLOAT_TYPE_P (type))
9530 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9531 (plus (plus (mult) (mult)) (foo)) so that we can
9532 take advantage of the factoring cases below. */
9533 if (ANY_INTEGRAL_TYPE_P (type)
9534 && TYPE_OVERFLOW_WRAPS (type)
9535 && (((TREE_CODE (arg0) == PLUS_EXPR
9536 || TREE_CODE (arg0) == MINUS_EXPR)
9537 && TREE_CODE (arg1) == MULT_EXPR)
9538 || ((TREE_CODE (arg1) == PLUS_EXPR
9539 || TREE_CODE (arg1) == MINUS_EXPR)
9540 && TREE_CODE (arg0) == MULT_EXPR)))
9542 tree parg0, parg1, parg, marg;
9543 enum tree_code pcode;
9545 if (TREE_CODE (arg1) == MULT_EXPR)
9546 parg = arg0, marg = arg1;
9547 else
9548 parg = arg1, marg = arg0;
9549 pcode = TREE_CODE (parg);
9550 parg0 = TREE_OPERAND (parg, 0);
9551 parg1 = TREE_OPERAND (parg, 1);
9552 STRIP_NOPS (parg0);
9553 STRIP_NOPS (parg1);
9555 if (TREE_CODE (parg0) == MULT_EXPR
9556 && TREE_CODE (parg1) != MULT_EXPR)
9557 return fold_build2_loc (loc, pcode, type,
9558 fold_build2_loc (loc, PLUS_EXPR, type,
9559 fold_convert_loc (loc, type,
9560 parg0),
9561 fold_convert_loc (loc, type,
9562 marg)),
9563 fold_convert_loc (loc, type, parg1));
9564 if (TREE_CODE (parg0) != MULT_EXPR
9565 && TREE_CODE (parg1) == MULT_EXPR)
9566 return
9567 fold_build2_loc (loc, PLUS_EXPR, type,
9568 fold_convert_loc (loc, type, parg0),
9569 fold_build2_loc (loc, pcode, type,
9570 fold_convert_loc (loc, type, marg),
9571 fold_convert_loc (loc, type,
9572 parg1)));
9575 else
9577 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9578 to __complex__ ( x, y ). This is not the same for SNaNs or
9579 if signed zeros are involved. */
9580 if (!HONOR_SNANS (element_mode (arg0))
9581 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9582 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9584 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9585 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9586 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9587 bool arg0rz = false, arg0iz = false;
9588 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9589 || (arg0i && (arg0iz = real_zerop (arg0i))))
9591 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9592 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9593 if (arg0rz && arg1i && real_zerop (arg1i))
9595 tree rp = arg1r ? arg1r
9596 : build1 (REALPART_EXPR, rtype, arg1);
9597 tree ip = arg0i ? arg0i
9598 : build1 (IMAGPART_EXPR, rtype, arg0);
9599 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9601 else if (arg0iz && arg1r && real_zerop (arg1r))
9603 tree rp = arg0r ? arg0r
9604 : build1 (REALPART_EXPR, rtype, arg0);
9605 tree ip = arg1i ? arg1i
9606 : build1 (IMAGPART_EXPR, rtype, arg1);
9607 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9612 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9613 We associate floats only if the user has specified
9614 -fassociative-math. */
9615 if (flag_associative_math
9616 && TREE_CODE (arg1) == PLUS_EXPR
9617 && TREE_CODE (arg0) != MULT_EXPR)
9619 tree tree10 = TREE_OPERAND (arg1, 0);
9620 tree tree11 = TREE_OPERAND (arg1, 1);
9621 if (TREE_CODE (tree11) == MULT_EXPR
9622 && TREE_CODE (tree10) == MULT_EXPR)
9624 tree tree0;
9625 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9626 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9629 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9630 We associate floats only if the user has specified
9631 -fassociative-math. */
9632 if (flag_associative_math
9633 && TREE_CODE (arg0) == PLUS_EXPR
9634 && TREE_CODE (arg1) != MULT_EXPR)
9636 tree tree00 = TREE_OPERAND (arg0, 0);
9637 tree tree01 = TREE_OPERAND (arg0, 1);
9638 if (TREE_CODE (tree01) == MULT_EXPR
9639 && TREE_CODE (tree00) == MULT_EXPR)
9641 tree tree0;
9642 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9643 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9648 bit_rotate:
9649 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9650 is a rotate of A by C1 bits. */
9651 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9652 is a rotate of A by B bits.
9653 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9654 though in this case CODE must be | and not + or ^, otherwise
9655 it doesn't return A when B is 0. */
9657 enum tree_code code0, code1;
9658 tree rtype;
9659 code0 = TREE_CODE (arg0);
9660 code1 = TREE_CODE (arg1);
9661 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9662 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9663 && operand_equal_p (TREE_OPERAND (arg0, 0),
9664 TREE_OPERAND (arg1, 0), 0)
9665 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9666 TYPE_UNSIGNED (rtype))
9667 /* Only create rotates in complete modes. Other cases are not
9668 expanded properly. */
9669 && (element_precision (rtype)
9670 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9672 tree tree01, tree11;
9673 tree orig_tree01, orig_tree11;
9674 enum tree_code code01, code11;
9676 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9677 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9678 STRIP_NOPS (tree01);
9679 STRIP_NOPS (tree11);
9680 code01 = TREE_CODE (tree01);
9681 code11 = TREE_CODE (tree11);
9682 if (code11 != MINUS_EXPR
9683 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9685 std::swap (code0, code1);
9686 std::swap (code01, code11);
9687 std::swap (tree01, tree11);
9688 std::swap (orig_tree01, orig_tree11);
9690 if (code01 == INTEGER_CST
9691 && code11 == INTEGER_CST
9692 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9693 == element_precision (rtype)))
9695 tem = build2_loc (loc, LROTATE_EXPR,
9696 rtype, TREE_OPERAND (arg0, 0),
9697 code0 == LSHIFT_EXPR
9698 ? orig_tree01 : orig_tree11);
9699 return fold_convert_loc (loc, type, tem);
9701 else if (code11 == MINUS_EXPR)
9703 tree tree110, tree111;
9704 tree110 = TREE_OPERAND (tree11, 0);
9705 tree111 = TREE_OPERAND (tree11, 1);
9706 STRIP_NOPS (tree110);
9707 STRIP_NOPS (tree111);
9708 if (TREE_CODE (tree110) == INTEGER_CST
9709 && compare_tree_int (tree110,
9710 element_precision (rtype)) == 0
9711 && operand_equal_p (tree01, tree111, 0))
9713 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9714 ? LROTATE_EXPR : RROTATE_EXPR),
9715 rtype, TREE_OPERAND (arg0, 0),
9716 orig_tree01);
9717 return fold_convert_loc (loc, type, tem);
9720 else if (code == BIT_IOR_EXPR
9721 && code11 == BIT_AND_EXPR
9722 && pow2p_hwi (element_precision (rtype)))
9724 tree tree110, tree111;
9725 tree110 = TREE_OPERAND (tree11, 0);
9726 tree111 = TREE_OPERAND (tree11, 1);
9727 STRIP_NOPS (tree110);
9728 STRIP_NOPS (tree111);
9729 if (TREE_CODE (tree110) == NEGATE_EXPR
9730 && TREE_CODE (tree111) == INTEGER_CST
9731 && compare_tree_int (tree111,
9732 element_precision (rtype) - 1) == 0
9733 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9735 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9736 ? LROTATE_EXPR : RROTATE_EXPR),
9737 rtype, TREE_OPERAND (arg0, 0),
9738 orig_tree01);
9739 return fold_convert_loc (loc, type, tem);
9745 associate:
9746 /* In most languages, can't associate operations on floats through
9747 parentheses. Rather than remember where the parentheses were, we
9748 don't associate floats at all, unless the user has specified
9749 -fassociative-math.
9750 And, we need to make sure type is not saturating. */
9752 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9753 && !TYPE_SATURATING (type))
9755 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9756 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9757 tree atype = type;
9758 bool ok = true;
9760 /* Split both trees into variables, constants, and literals. Then
9761 associate each group together, the constants with literals,
9762 then the result with variables. This increases the chances of
9763 literals being recombined later and of generating relocatable
9764 expressions for the sum of a constant and literal. */
9765 var0 = split_tree (arg0, type, code,
9766 &minus_var0, &con0, &minus_con0,
9767 &lit0, &minus_lit0, 0);
9768 var1 = split_tree (arg1, type, code,
9769 &minus_var1, &con1, &minus_con1,
9770 &lit1, &minus_lit1, code == MINUS_EXPR);
9772 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9773 if (code == MINUS_EXPR)
9774 code = PLUS_EXPR;
9776 /* With undefined overflow prefer doing association in a type
9777 which wraps on overflow, if that is one of the operand types. */
9778 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
9779 && !TYPE_OVERFLOW_WRAPS (type))
9781 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9782 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9783 atype = TREE_TYPE (arg0);
9784 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9785 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9786 atype = TREE_TYPE (arg1);
9787 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9790 /* With undefined overflow we can only associate constants with one
9791 variable, and constants whose association doesn't overflow. */
9792 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
9793 && !TYPE_OVERFLOW_WRAPS (atype))
9795 if ((var0 && var1) || (minus_var0 && minus_var1))
9797 /* ??? If split_tree would handle NEGATE_EXPR we could
9798 simply reject these cases and the allowed cases would
9799 be the var0/minus_var1 ones. */
9800 tree tmp0 = var0 ? var0 : minus_var0;
9801 tree tmp1 = var1 ? var1 : minus_var1;
9802 bool one_neg = false;
9804 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9806 tmp0 = TREE_OPERAND (tmp0, 0);
9807 one_neg = !one_neg;
9809 if (CONVERT_EXPR_P (tmp0)
9810 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9811 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9812 <= TYPE_PRECISION (atype)))
9813 tmp0 = TREE_OPERAND (tmp0, 0);
9814 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9816 tmp1 = TREE_OPERAND (tmp1, 0);
9817 one_neg = !one_neg;
9819 if (CONVERT_EXPR_P (tmp1)
9820 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9821 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9822 <= TYPE_PRECISION (atype)))
9823 tmp1 = TREE_OPERAND (tmp1, 0);
9824 /* The only case we can still associate with two variables
9825 is if they cancel out. */
9826 if (!one_neg
9827 || !operand_equal_p (tmp0, tmp1, 0))
9828 ok = false;
9830 else if ((var0 && minus_var1
9831 && ! operand_equal_p (var0, minus_var1, 0))
9832 || (minus_var0 && var1
9833 && ! operand_equal_p (minus_var0, var1, 0)))
9834 ok = false;
9837 /* Only do something if we found more than two objects. Otherwise,
9838 nothing has changed and we risk infinite recursion. */
9839 if (ok
9840 && ((var0 != 0) + (var1 != 0)
9841 + (minus_var0 != 0) + (minus_var1 != 0)
9842 + (con0 != 0) + (con1 != 0)
9843 + (minus_con0 != 0) + (minus_con1 != 0)
9844 + (lit0 != 0) + (lit1 != 0)
9845 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
9847 var0 = associate_trees (loc, var0, var1, code, atype);
9848 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9849 code, atype);
9850 con0 = associate_trees (loc, con0, con1, code, atype);
9851 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9852 code, atype);
9853 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9854 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9855 code, atype);
9857 if (minus_var0 && var0)
9859 var0 = associate_trees (loc, var0, minus_var0,
9860 MINUS_EXPR, atype);
9861 minus_var0 = 0;
9863 if (minus_con0 && con0)
9865 con0 = associate_trees (loc, con0, minus_con0,
9866 MINUS_EXPR, atype);
9867 minus_con0 = 0;
9870 /* Preserve the MINUS_EXPR if the negative part of the literal is
9871 greater than the positive part. Otherwise, the multiplicative
9872 folding code (i.e extract_muldiv) may be fooled in case
9873 unsigned constants are subtracted, like in the following
9874 example: ((X*2 + 4) - 8U)/2. */
9875 if (minus_lit0 && lit0)
9877 if (TREE_CODE (lit0) == INTEGER_CST
9878 && TREE_CODE (minus_lit0) == INTEGER_CST
9879 && tree_int_cst_lt (lit0, minus_lit0)
9880 /* But avoid ending up with only negated parts. */
9881 && (var0 || con0))
9883 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9884 MINUS_EXPR, atype);
9885 lit0 = 0;
9887 else
9889 lit0 = associate_trees (loc, lit0, minus_lit0,
9890 MINUS_EXPR, atype);
9891 minus_lit0 = 0;
9895 /* Don't introduce overflows through reassociation. */
9896 if ((lit0 && TREE_OVERFLOW_P (lit0))
9897 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9898 return NULL_TREE;
9900 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9901 con0 = associate_trees (loc, con0, lit0, code, atype);
9902 lit0 = 0;
9903 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9904 code, atype);
9905 minus_lit0 = 0;
9907 /* Eliminate minus_con0. */
9908 if (minus_con0)
9910 if (con0)
9911 con0 = associate_trees (loc, con0, minus_con0,
9912 MINUS_EXPR, atype);
9913 else if (var0)
9914 var0 = associate_trees (loc, var0, minus_con0,
9915 MINUS_EXPR, atype);
9916 else
9917 gcc_unreachable ();
9918 minus_con0 = 0;
9921 /* Eliminate minus_var0. */
9922 if (minus_var0)
9924 if (con0)
9925 con0 = associate_trees (loc, con0, minus_var0,
9926 MINUS_EXPR, atype);
9927 else
9928 gcc_unreachable ();
9929 minus_var0 = 0;
9932 return
9933 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9934 code, atype));
9938 return NULL_TREE;
9940 case POINTER_DIFF_EXPR:
9941 case MINUS_EXPR:
9942 /* Fold &a[i] - &a[j] to i-j. */
9943 if (TREE_CODE (arg0) == ADDR_EXPR
9944 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9945 && TREE_CODE (arg1) == ADDR_EXPR
9946 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9948 tree tem = fold_addr_of_array_ref_difference (loc, type,
9949 TREE_OPERAND (arg0, 0),
9950 TREE_OPERAND (arg1, 0),
9951 code
9952 == POINTER_DIFF_EXPR);
9953 if (tem)
9954 return tem;
9957 /* Further transformations are not for pointers. */
9958 if (code == POINTER_DIFF_EXPR)
9959 return NULL_TREE;
9961 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9962 if (TREE_CODE (arg0) == NEGATE_EXPR
9963 && negate_expr_p (op1)
9964 /* If arg0 is e.g. unsigned int and type is int, then this could
9965 introduce UB, because if A is INT_MIN at runtime, the original
9966 expression can be well defined while the latter is not.
9967 See PR83269. */
9968 && !(ANY_INTEGRAL_TYPE_P (type)
9969 && TYPE_OVERFLOW_UNDEFINED (type)
9970 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9971 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9972 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9973 fold_convert_loc (loc, type,
9974 TREE_OPERAND (arg0, 0)));
9976 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9977 __complex__ ( x, -y ). This is not the same for SNaNs or if
9978 signed zeros are involved. */
9979 if (!HONOR_SNANS (element_mode (arg0))
9980 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9981 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9983 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9984 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9985 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9986 bool arg0rz = false, arg0iz = false;
9987 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9988 || (arg0i && (arg0iz = real_zerop (arg0i))))
9990 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9991 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9992 if (arg0rz && arg1i && real_zerop (arg1i))
9994 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9995 arg1r ? arg1r
9996 : build1 (REALPART_EXPR, rtype, arg1));
9997 tree ip = arg0i ? arg0i
9998 : build1 (IMAGPART_EXPR, rtype, arg0);
9999 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10001 else if (arg0iz && arg1r && real_zerop (arg1r))
10003 tree rp = arg0r ? arg0r
10004 : build1 (REALPART_EXPR, rtype, arg0);
10005 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10006 arg1i ? arg1i
10007 : build1 (IMAGPART_EXPR, rtype, arg1));
10008 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10013 /* A - B -> A + (-B) if B is easily negatable. */
10014 if (negate_expr_p (op1)
10015 && ! TYPE_OVERFLOW_SANITIZED (type)
10016 && ((FLOAT_TYPE_P (type)
10017 /* Avoid this transformation if B is a positive REAL_CST. */
10018 && (TREE_CODE (op1) != REAL_CST
10019 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10020 || INTEGRAL_TYPE_P (type)))
10021 return fold_build2_loc (loc, PLUS_EXPR, type,
10022 fold_convert_loc (loc, type, arg0),
10023 negate_expr (op1));
10025 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10026 one. Make sure the type is not saturating and has the signedness of
10027 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10028 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10029 if ((TREE_CODE (arg0) == MULT_EXPR
10030 || TREE_CODE (arg1) == MULT_EXPR)
10031 && !TYPE_SATURATING (type)
10032 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10033 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10034 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10036 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10037 if (tem)
10038 return tem;
10041 goto associate;
10043 case MULT_EXPR:
10044 if (! FLOAT_TYPE_P (type))
10046 /* Transform x * -C into -x * C if x is easily negatable. */
10047 if (TREE_CODE (op1) == INTEGER_CST
10048 && tree_int_cst_sgn (op1) == -1
10049 && negate_expr_p (op0)
10050 && negate_expr_p (op1)
10051 && (tem = negate_expr (op1)) != op1
10052 && ! TREE_OVERFLOW (tem))
10053 return fold_build2_loc (loc, MULT_EXPR, type,
10054 fold_convert_loc (loc, type,
10055 negate_expr (op0)), tem);
10057 strict_overflow_p = false;
10058 if (TREE_CODE (arg1) == INTEGER_CST
10059 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10060 &strict_overflow_p)) != 0)
10062 if (strict_overflow_p)
10063 fold_overflow_warning (("assuming signed overflow does not "
10064 "occur when simplifying "
10065 "multiplication"),
10066 WARN_STRICT_OVERFLOW_MISC);
10067 return fold_convert_loc (loc, type, tem);
10070 /* Optimize z * conj(z) for integer complex numbers. */
10071 if (TREE_CODE (arg0) == CONJ_EXPR
10072 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10073 return fold_mult_zconjz (loc, type, arg1);
10074 if (TREE_CODE (arg1) == CONJ_EXPR
10075 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10076 return fold_mult_zconjz (loc, type, arg0);
10078 else
10080 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10081 This is not the same for NaNs or if signed zeros are
10082 involved. */
10083 if (!HONOR_NANS (arg0)
10084 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10085 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10086 && TREE_CODE (arg1) == COMPLEX_CST
10087 && real_zerop (TREE_REALPART (arg1)))
10089 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10090 if (real_onep (TREE_IMAGPART (arg1)))
10091 return
10092 fold_build2_loc (loc, COMPLEX_EXPR, type,
10093 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10094 rtype, arg0)),
10095 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10096 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10097 return
10098 fold_build2_loc (loc, COMPLEX_EXPR, type,
10099 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10100 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10101 rtype, arg0)));
10104 /* Optimize z * conj(z) for floating point complex numbers.
10105 Guarded by flag_unsafe_math_optimizations as non-finite
10106 imaginary components don't produce scalar results. */
10107 if (flag_unsafe_math_optimizations
10108 && TREE_CODE (arg0) == CONJ_EXPR
10109 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10110 return fold_mult_zconjz (loc, type, arg1);
10111 if (flag_unsafe_math_optimizations
10112 && TREE_CODE (arg1) == CONJ_EXPR
10113 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10114 return fold_mult_zconjz (loc, type, arg0);
10116 goto associate;
10118 case BIT_IOR_EXPR:
10119 /* Canonicalize (X & C1) | C2. */
10120 if (TREE_CODE (arg0) == BIT_AND_EXPR
10121 && TREE_CODE (arg1) == INTEGER_CST
10122 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10124 int width = TYPE_PRECISION (type), w;
10125 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10126 wide_int c2 = wi::to_wide (arg1);
10128 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10129 if ((c1 & c2) == c1)
10130 return omit_one_operand_loc (loc, type, arg1,
10131 TREE_OPERAND (arg0, 0));
10133 wide_int msk = wi::mask (width, false,
10134 TYPE_PRECISION (TREE_TYPE (arg1)));
10136 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10137 if (wi::bit_and_not (msk, c1 | c2) == 0)
10139 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10140 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10143 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10144 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10145 mode which allows further optimizations. */
10146 c1 &= msk;
10147 c2 &= msk;
10148 wide_int c3 = wi::bit_and_not (c1, c2);
10149 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10151 wide_int mask = wi::mask (w, false,
10152 TYPE_PRECISION (type));
10153 if (((c1 | c2) & mask) == mask
10154 && wi::bit_and_not (c1, mask) == 0)
10156 c3 = mask;
10157 break;
10161 if (c3 != c1)
10163 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10164 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10165 wide_int_to_tree (type, c3));
10166 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10170 /* See if this can be simplified into a rotate first. If that
10171 is unsuccessful continue in the association code. */
10172 goto bit_rotate;
10174 case BIT_XOR_EXPR:
10175 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10176 if (TREE_CODE (arg0) == BIT_AND_EXPR
10177 && INTEGRAL_TYPE_P (type)
10178 && integer_onep (TREE_OPERAND (arg0, 1))
10179 && integer_onep (arg1))
10180 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10181 build_zero_cst (TREE_TYPE (arg0)));
10183 /* See if this can be simplified into a rotate first. If that
10184 is unsuccessful continue in the association code. */
10185 goto bit_rotate;
10187 case BIT_AND_EXPR:
10188 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10189 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10190 && INTEGRAL_TYPE_P (type)
10191 && integer_onep (TREE_OPERAND (arg0, 1))
10192 && integer_onep (arg1))
10194 tree tem2;
10195 tem = TREE_OPERAND (arg0, 0);
10196 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10197 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10198 tem, tem2);
10199 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10200 build_zero_cst (TREE_TYPE (tem)));
10202 /* Fold ~X & 1 as (X & 1) == 0. */
10203 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10204 && INTEGRAL_TYPE_P (type)
10205 && integer_onep (arg1))
10207 tree tem2;
10208 tem = TREE_OPERAND (arg0, 0);
10209 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10210 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10211 tem, tem2);
10212 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10213 build_zero_cst (TREE_TYPE (tem)));
10215 /* Fold !X & 1 as X == 0. */
10216 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10217 && integer_onep (arg1))
10219 tem = TREE_OPERAND (arg0, 0);
10220 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10221 build_zero_cst (TREE_TYPE (tem)));
10224 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10225 multiple of 1 << CST. */
10226 if (TREE_CODE (arg1) == INTEGER_CST)
10228 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10229 wide_int ncst1 = -cst1;
10230 if ((cst1 & ncst1) == ncst1
10231 && multiple_of_p (type, arg0,
10232 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10233 return fold_convert_loc (loc, type, arg0);
10236 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10237 bits from CST2. */
10238 if (TREE_CODE (arg1) == INTEGER_CST
10239 && TREE_CODE (arg0) == MULT_EXPR
10240 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10242 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10243 wide_int masked
10244 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10246 if (masked == 0)
10247 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10248 arg0, arg1);
10249 else if (masked != warg1)
10251 /* Avoid the transform if arg1 is a mask of some
10252 mode which allows further optimizations. */
10253 int pop = wi::popcount (warg1);
10254 if (!(pop >= BITS_PER_UNIT
10255 && pow2p_hwi (pop)
10256 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10257 return fold_build2_loc (loc, code, type, op0,
10258 wide_int_to_tree (type, masked));
10262 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10263 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10264 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10266 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10268 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10269 if (mask == -1)
10270 return
10271 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10274 goto associate;
10276 case RDIV_EXPR:
10277 /* Don't touch a floating-point divide by zero unless the mode
10278 of the constant can represent infinity. */
10279 if (TREE_CODE (arg1) == REAL_CST
10280 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10281 && real_zerop (arg1))
10282 return NULL_TREE;
10284 /* (-A) / (-B) -> A / B */
10285 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10286 return fold_build2_loc (loc, RDIV_EXPR, type,
10287 TREE_OPERAND (arg0, 0),
10288 negate_expr (arg1));
10289 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10290 return fold_build2_loc (loc, RDIV_EXPR, type,
10291 negate_expr (arg0),
10292 TREE_OPERAND (arg1, 0));
10293 return NULL_TREE;
10295 case TRUNC_DIV_EXPR:
10296 /* Fall through */
10298 case FLOOR_DIV_EXPR:
10299 /* Simplify A / (B << N) where A and B are positive and B is
10300 a power of 2, to A >> (N + log2(B)). */
10301 strict_overflow_p = false;
10302 if (TREE_CODE (arg1) == LSHIFT_EXPR
10303 && (TYPE_UNSIGNED (type)
10304 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10306 tree sval = TREE_OPERAND (arg1, 0);
10307 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10309 tree sh_cnt = TREE_OPERAND (arg1, 1);
10310 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10311 wi::exact_log2 (wi::to_wide (sval)));
10313 if (strict_overflow_p)
10314 fold_overflow_warning (("assuming signed overflow does not "
10315 "occur when simplifying A / (B << N)"),
10316 WARN_STRICT_OVERFLOW_MISC);
10318 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10319 sh_cnt, pow2);
10320 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10321 fold_convert_loc (loc, type, arg0), sh_cnt);
10325 /* Fall through */
10327 case ROUND_DIV_EXPR:
10328 case CEIL_DIV_EXPR:
10329 case EXACT_DIV_EXPR:
10330 if (integer_zerop (arg1))
10331 return NULL_TREE;
10333 /* Convert -A / -B to A / B when the type is signed and overflow is
10334 undefined. */
10335 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10336 && TREE_CODE (op0) == NEGATE_EXPR
10337 && negate_expr_p (op1))
10339 if (INTEGRAL_TYPE_P (type))
10340 fold_overflow_warning (("assuming signed overflow does not occur "
10341 "when distributing negation across "
10342 "division"),
10343 WARN_STRICT_OVERFLOW_MISC);
10344 return fold_build2_loc (loc, code, type,
10345 fold_convert_loc (loc, type,
10346 TREE_OPERAND (arg0, 0)),
10347 negate_expr (op1));
10349 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10350 && TREE_CODE (arg1) == NEGATE_EXPR
10351 && negate_expr_p (op0))
10353 if (INTEGRAL_TYPE_P (type))
10354 fold_overflow_warning (("assuming signed overflow does not occur "
10355 "when distributing negation across "
10356 "division"),
10357 WARN_STRICT_OVERFLOW_MISC);
10358 return fold_build2_loc (loc, code, type,
10359 negate_expr (op0),
10360 fold_convert_loc (loc, type,
10361 TREE_OPERAND (arg1, 0)));
10364 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10365 operation, EXACT_DIV_EXPR.
10367 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10368 At one time others generated faster code, it's not clear if they do
10369 after the last round to changes to the DIV code in expmed.c. */
10370 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10371 && multiple_of_p (type, arg0, arg1))
10372 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10373 fold_convert (type, arg0),
10374 fold_convert (type, arg1));
10376 strict_overflow_p = false;
10377 if (TREE_CODE (arg1) == INTEGER_CST
10378 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10379 &strict_overflow_p)) != 0)
10381 if (strict_overflow_p)
10382 fold_overflow_warning (("assuming signed overflow does not occur "
10383 "when simplifying division"),
10384 WARN_STRICT_OVERFLOW_MISC);
10385 return fold_convert_loc (loc, type, tem);
10388 return NULL_TREE;
10390 case CEIL_MOD_EXPR:
10391 case FLOOR_MOD_EXPR:
10392 case ROUND_MOD_EXPR:
10393 case TRUNC_MOD_EXPR:
10394 strict_overflow_p = false;
10395 if (TREE_CODE (arg1) == INTEGER_CST
10396 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10397 &strict_overflow_p)) != 0)
10399 if (strict_overflow_p)
10400 fold_overflow_warning (("assuming signed overflow does not occur "
10401 "when simplifying modulus"),
10402 WARN_STRICT_OVERFLOW_MISC);
10403 return fold_convert_loc (loc, type, tem);
10406 return NULL_TREE;
10408 case LROTATE_EXPR:
10409 case RROTATE_EXPR:
10410 case RSHIFT_EXPR:
10411 case LSHIFT_EXPR:
10412 /* Since negative shift count is not well-defined,
10413 don't try to compute it in the compiler. */
10414 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10415 return NULL_TREE;
10417 prec = element_precision (type);
10419 /* If we have a rotate of a bit operation with the rotate count and
10420 the second operand of the bit operation both constant,
10421 permute the two operations. */
10422 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10423 && (TREE_CODE (arg0) == BIT_AND_EXPR
10424 || TREE_CODE (arg0) == BIT_IOR_EXPR
10425 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10428 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10429 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10430 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10431 fold_build2_loc (loc, code, type,
10432 arg00, arg1),
10433 fold_build2_loc (loc, code, type,
10434 arg01, arg1));
10437 /* Two consecutive rotates adding up to the some integer
10438 multiple of the precision of the type can be ignored. */
10439 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10440 && TREE_CODE (arg0) == RROTATE_EXPR
10441 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10442 && wi::umod_trunc (wi::to_wide (arg1)
10443 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10444 prec) == 0)
10445 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10447 return NULL_TREE;
10449 case MIN_EXPR:
10450 case MAX_EXPR:
10451 goto associate;
10453 case TRUTH_ANDIF_EXPR:
10454 /* Note that the operands of this must be ints
10455 and their values must be 0 or 1.
10456 ("true" is a fixed value perhaps depending on the language.) */
10457 /* If first arg is constant zero, return it. */
10458 if (integer_zerop (arg0))
10459 return fold_convert_loc (loc, type, arg0);
10460 /* FALLTHRU */
10461 case TRUTH_AND_EXPR:
10462 /* If either arg is constant true, drop it. */
10463 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10464 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10465 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10466 /* Preserve sequence points. */
10467 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10468 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10469 /* If second arg is constant zero, result is zero, but first arg
10470 must be evaluated. */
10471 if (integer_zerop (arg1))
10472 return omit_one_operand_loc (loc, type, arg1, arg0);
10473 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10474 case will be handled here. */
10475 if (integer_zerop (arg0))
10476 return omit_one_operand_loc (loc, type, arg0, arg1);
10478 /* !X && X is always false. */
10479 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10480 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10481 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10482 /* X && !X is always false. */
10483 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10484 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10485 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10487 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10488 means A >= Y && A != MAX, but in this case we know that
10489 A < X <= MAX. */
10491 if (!TREE_SIDE_EFFECTS (arg0)
10492 && !TREE_SIDE_EFFECTS (arg1))
10494 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10495 if (tem && !operand_equal_p (tem, arg0, 0))
10496 return fold_build2_loc (loc, code, type, tem, arg1);
10498 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10499 if (tem && !operand_equal_p (tem, arg1, 0))
10500 return fold_build2_loc (loc, code, type, arg0, tem);
10503 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10504 != NULL_TREE)
10505 return tem;
10507 return NULL_TREE;
10509 case TRUTH_ORIF_EXPR:
10510 /* Note that the operands of this must be ints
10511 and their values must be 0 or true.
10512 ("true" is a fixed value perhaps depending on the language.) */
10513 /* If first arg is constant true, return it. */
10514 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10515 return fold_convert_loc (loc, type, arg0);
10516 /* FALLTHRU */
10517 case TRUTH_OR_EXPR:
10518 /* If either arg is constant zero, drop it. */
10519 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10520 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10521 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10522 /* Preserve sequence points. */
10523 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10524 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10525 /* If second arg is constant true, result is true, but we must
10526 evaluate first arg. */
10527 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10528 return omit_one_operand_loc (loc, type, arg1, arg0);
10529 /* Likewise for first arg, but note this only occurs here for
10530 TRUTH_OR_EXPR. */
10531 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10532 return omit_one_operand_loc (loc, type, arg0, arg1);
10534 /* !X || X is always true. */
10535 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10536 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10537 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10538 /* X || !X is always true. */
10539 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10540 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10541 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10543 /* (X && !Y) || (!X && Y) is X ^ Y */
10544 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10545 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10547 tree a0, a1, l0, l1, n0, n1;
10549 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10550 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10552 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10553 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10555 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10556 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10558 if ((operand_equal_p (n0, a0, 0)
10559 && operand_equal_p (n1, a1, 0))
10560 || (operand_equal_p (n0, a1, 0)
10561 && operand_equal_p (n1, a0, 0)))
10562 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10565 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10566 != NULL_TREE)
10567 return tem;
10569 return NULL_TREE;
10571 case TRUTH_XOR_EXPR:
10572 /* If the second arg is constant zero, drop it. */
10573 if (integer_zerop (arg1))
10574 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10575 /* If the second arg is constant true, this is a logical inversion. */
10576 if (integer_onep (arg1))
10578 tem = invert_truthvalue_loc (loc, arg0);
10579 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10581 /* Identical arguments cancel to zero. */
10582 if (operand_equal_p (arg0, arg1, 0))
10583 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10585 /* !X ^ X is always true. */
10586 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10587 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10588 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10590 /* X ^ !X is always true. */
10591 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10592 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10593 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10595 return NULL_TREE;
10597 case EQ_EXPR:
10598 case NE_EXPR:
10599 STRIP_NOPS (arg0);
10600 STRIP_NOPS (arg1);
10602 tem = fold_comparison (loc, code, type, op0, op1);
10603 if (tem != NULL_TREE)
10604 return tem;
10606 /* bool_var != 1 becomes !bool_var. */
10607 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10608 && code == NE_EXPR)
10609 return fold_convert_loc (loc, type,
10610 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10611 TREE_TYPE (arg0), arg0));
10613 /* bool_var == 0 becomes !bool_var. */
10614 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10615 && code == EQ_EXPR)
10616 return fold_convert_loc (loc, type,
10617 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10618 TREE_TYPE (arg0), arg0));
10620 /* !exp != 0 becomes !exp */
10621 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10622 && code == NE_EXPR)
10623 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10625 /* If this is an EQ or NE comparison with zero and ARG0 is
10626 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10627 two operations, but the latter can be done in one less insn
10628 on machines that have only two-operand insns or on which a
10629 constant cannot be the first operand. */
10630 if (TREE_CODE (arg0) == BIT_AND_EXPR
10631 && integer_zerop (arg1))
10633 tree arg00 = TREE_OPERAND (arg0, 0);
10634 tree arg01 = TREE_OPERAND (arg0, 1);
10635 if (TREE_CODE (arg00) == LSHIFT_EXPR
10636 && integer_onep (TREE_OPERAND (arg00, 0)))
10638 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10639 arg01, TREE_OPERAND (arg00, 1));
10640 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10641 build_int_cst (TREE_TYPE (arg0), 1));
10642 return fold_build2_loc (loc, code, type,
10643 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10644 arg1);
10646 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10647 && integer_onep (TREE_OPERAND (arg01, 0)))
10649 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10650 arg00, TREE_OPERAND (arg01, 1));
10651 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10652 build_int_cst (TREE_TYPE (arg0), 1));
10653 return fold_build2_loc (loc, code, type,
10654 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10655 arg1);
10659 /* If this is an NE or EQ comparison of zero against the result of a
10660 signed MOD operation whose second operand is a power of 2, make
10661 the MOD operation unsigned since it is simpler and equivalent. */
10662 if (integer_zerop (arg1)
10663 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10664 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10665 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10666 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10667 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10668 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10670 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10671 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10672 fold_convert_loc (loc, newtype,
10673 TREE_OPERAND (arg0, 0)),
10674 fold_convert_loc (loc, newtype,
10675 TREE_OPERAND (arg0, 1)));
10677 return fold_build2_loc (loc, code, type, newmod,
10678 fold_convert_loc (loc, newtype, arg1));
10681 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10682 C1 is a valid shift constant, and C2 is a power of two, i.e.
10683 a single bit. */
10684 if (TREE_CODE (arg0) == BIT_AND_EXPR
10685 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10686 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10687 == INTEGER_CST
10688 && integer_pow2p (TREE_OPERAND (arg0, 1))
10689 && integer_zerop (arg1))
10691 tree itype = TREE_TYPE (arg0);
10692 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10693 prec = TYPE_PRECISION (itype);
10695 /* Check for a valid shift count. */
10696 if (wi::ltu_p (wi::to_wide (arg001), prec))
10698 tree arg01 = TREE_OPERAND (arg0, 1);
10699 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10700 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10701 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10702 can be rewritten as (X & (C2 << C1)) != 0. */
10703 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10705 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10706 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10707 return fold_build2_loc (loc, code, type, tem,
10708 fold_convert_loc (loc, itype, arg1));
10710 /* Otherwise, for signed (arithmetic) shifts,
10711 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10712 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10713 else if (!TYPE_UNSIGNED (itype))
10714 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10715 arg000, build_int_cst (itype, 0));
10716 /* Otherwise, of unsigned (logical) shifts,
10717 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10718 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10719 else
10720 return omit_one_operand_loc (loc, type,
10721 code == EQ_EXPR ? integer_one_node
10722 : integer_zero_node,
10723 arg000);
10727 /* If this is a comparison of a field, we may be able to simplify it. */
10728 if ((TREE_CODE (arg0) == COMPONENT_REF
10729 || TREE_CODE (arg0) == BIT_FIELD_REF)
10730 /* Handle the constant case even without -O
10731 to make sure the warnings are given. */
10732 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10734 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10735 if (t1)
10736 return t1;
10739 /* Optimize comparisons of strlen vs zero to a compare of the
10740 first character of the string vs zero. To wit,
10741 strlen(ptr) == 0 => *ptr == 0
10742 strlen(ptr) != 0 => *ptr != 0
10743 Other cases should reduce to one of these two (or a constant)
10744 due to the return value of strlen being unsigned. */
10745 if (TREE_CODE (arg0) == CALL_EXPR
10746 && integer_zerop (arg1))
10748 tree fndecl = get_callee_fndecl (arg0);
10750 if (fndecl
10751 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10752 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10753 && call_expr_nargs (arg0) == 1
10754 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10756 tree iref = build_fold_indirect_ref_loc (loc,
10757 CALL_EXPR_ARG (arg0, 0));
10758 return fold_build2_loc (loc, code, type, iref,
10759 build_int_cst (TREE_TYPE (iref), 0));
10763 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10764 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10765 if (TREE_CODE (arg0) == RSHIFT_EXPR
10766 && integer_zerop (arg1)
10767 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10769 tree arg00 = TREE_OPERAND (arg0, 0);
10770 tree arg01 = TREE_OPERAND (arg0, 1);
10771 tree itype = TREE_TYPE (arg00);
10772 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10774 if (TYPE_UNSIGNED (itype))
10776 itype = signed_type_for (itype);
10777 arg00 = fold_convert_loc (loc, itype, arg00);
10779 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10780 type, arg00, build_zero_cst (itype));
10784 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10785 (X & C) == 0 when C is a single bit. */
10786 if (TREE_CODE (arg0) == BIT_AND_EXPR
10787 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10788 && integer_zerop (arg1)
10789 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10791 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10792 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10793 TREE_OPERAND (arg0, 1));
10794 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10795 type, tem,
10796 fold_convert_loc (loc, TREE_TYPE (arg0),
10797 arg1));
10800 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10801 constant C is a power of two, i.e. a single bit. */
10802 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10803 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10804 && integer_zerop (arg1)
10805 && integer_pow2p (TREE_OPERAND (arg0, 1))
10806 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10807 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10809 tree arg00 = TREE_OPERAND (arg0, 0);
10810 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10811 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10814 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10815 when is C is a power of two, i.e. a single bit. */
10816 if (TREE_CODE (arg0) == BIT_AND_EXPR
10817 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10818 && integer_zerop (arg1)
10819 && integer_pow2p (TREE_OPERAND (arg0, 1))
10820 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10821 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10823 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10824 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10825 arg000, TREE_OPERAND (arg0, 1));
10826 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10827 tem, build_int_cst (TREE_TYPE (tem), 0));
10830 if (integer_zerop (arg1)
10831 && tree_expr_nonzero_p (arg0))
10833 tree res = constant_boolean_node (code==NE_EXPR, type);
10834 return omit_one_operand_loc (loc, type, res, arg0);
10837 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10838 if (TREE_CODE (arg0) == BIT_AND_EXPR
10839 && TREE_CODE (arg1) == BIT_AND_EXPR)
10841 tree arg00 = TREE_OPERAND (arg0, 0);
10842 tree arg01 = TREE_OPERAND (arg0, 1);
10843 tree arg10 = TREE_OPERAND (arg1, 0);
10844 tree arg11 = TREE_OPERAND (arg1, 1);
10845 tree itype = TREE_TYPE (arg0);
10847 if (operand_equal_p (arg01, arg11, 0))
10849 tem = fold_convert_loc (loc, itype, arg10);
10850 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10851 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10852 return fold_build2_loc (loc, code, type, tem,
10853 build_zero_cst (itype));
10855 if (operand_equal_p (arg01, arg10, 0))
10857 tem = fold_convert_loc (loc, itype, arg11);
10858 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10859 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10860 return fold_build2_loc (loc, code, type, tem,
10861 build_zero_cst (itype));
10863 if (operand_equal_p (arg00, arg11, 0))
10865 tem = fold_convert_loc (loc, itype, arg10);
10866 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10867 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10868 return fold_build2_loc (loc, code, type, tem,
10869 build_zero_cst (itype));
10871 if (operand_equal_p (arg00, arg10, 0))
10873 tem = fold_convert_loc (loc, itype, arg11);
10874 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10875 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10876 return fold_build2_loc (loc, code, type, tem,
10877 build_zero_cst (itype));
10881 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10882 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10884 tree arg00 = TREE_OPERAND (arg0, 0);
10885 tree arg01 = TREE_OPERAND (arg0, 1);
10886 tree arg10 = TREE_OPERAND (arg1, 0);
10887 tree arg11 = TREE_OPERAND (arg1, 1);
10888 tree itype = TREE_TYPE (arg0);
10890 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10891 operand_equal_p guarantees no side-effects so we don't need
10892 to use omit_one_operand on Z. */
10893 if (operand_equal_p (arg01, arg11, 0))
10894 return fold_build2_loc (loc, code, type, arg00,
10895 fold_convert_loc (loc, TREE_TYPE (arg00),
10896 arg10));
10897 if (operand_equal_p (arg01, arg10, 0))
10898 return fold_build2_loc (loc, code, type, arg00,
10899 fold_convert_loc (loc, TREE_TYPE (arg00),
10900 arg11));
10901 if (operand_equal_p (arg00, arg11, 0))
10902 return fold_build2_loc (loc, code, type, arg01,
10903 fold_convert_loc (loc, TREE_TYPE (arg01),
10904 arg10));
10905 if (operand_equal_p (arg00, arg10, 0))
10906 return fold_build2_loc (loc, code, type, arg01,
10907 fold_convert_loc (loc, TREE_TYPE (arg01),
10908 arg11));
10910 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10911 if (TREE_CODE (arg01) == INTEGER_CST
10912 && TREE_CODE (arg11) == INTEGER_CST)
10914 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10915 fold_convert_loc (loc, itype, arg11));
10916 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10917 return fold_build2_loc (loc, code, type, tem,
10918 fold_convert_loc (loc, itype, arg10));
10922 /* Attempt to simplify equality/inequality comparisons of complex
10923 values. Only lower the comparison if the result is known or
10924 can be simplified to a single scalar comparison. */
10925 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10926 || TREE_CODE (arg0) == COMPLEX_CST)
10927 && (TREE_CODE (arg1) == COMPLEX_EXPR
10928 || TREE_CODE (arg1) == COMPLEX_CST))
10930 tree real0, imag0, real1, imag1;
10931 tree rcond, icond;
10933 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10935 real0 = TREE_OPERAND (arg0, 0);
10936 imag0 = TREE_OPERAND (arg0, 1);
10938 else
10940 real0 = TREE_REALPART (arg0);
10941 imag0 = TREE_IMAGPART (arg0);
10944 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10946 real1 = TREE_OPERAND (arg1, 0);
10947 imag1 = TREE_OPERAND (arg1, 1);
10949 else
10951 real1 = TREE_REALPART (arg1);
10952 imag1 = TREE_IMAGPART (arg1);
10955 rcond = fold_binary_loc (loc, code, type, real0, real1);
10956 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10958 if (integer_zerop (rcond))
10960 if (code == EQ_EXPR)
10961 return omit_two_operands_loc (loc, type, boolean_false_node,
10962 imag0, imag1);
10963 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10965 else
10967 if (code == NE_EXPR)
10968 return omit_two_operands_loc (loc, type, boolean_true_node,
10969 imag0, imag1);
10970 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10974 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10975 if (icond && TREE_CODE (icond) == INTEGER_CST)
10977 if (integer_zerop (icond))
10979 if (code == EQ_EXPR)
10980 return omit_two_operands_loc (loc, type, boolean_false_node,
10981 real0, real1);
10982 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10984 else
10986 if (code == NE_EXPR)
10987 return omit_two_operands_loc (loc, type, boolean_true_node,
10988 real0, real1);
10989 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10994 return NULL_TREE;
10996 case LT_EXPR:
10997 case GT_EXPR:
10998 case LE_EXPR:
10999 case GE_EXPR:
11000 tem = fold_comparison (loc, code, type, op0, op1);
11001 if (tem != NULL_TREE)
11002 return tem;
11004 /* Transform comparisons of the form X +- C CMP X. */
11005 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11006 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11008 && !HONOR_SNANS (arg0))
11010 tree arg01 = TREE_OPERAND (arg0, 1);
11011 enum tree_code code0 = TREE_CODE (arg0);
11012 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11014 /* (X - c) > X becomes false. */
11015 if (code == GT_EXPR
11016 && ((code0 == MINUS_EXPR && is_positive >= 0)
11017 || (code0 == PLUS_EXPR && is_positive <= 0)))
11018 return constant_boolean_node (0, type);
11020 /* Likewise (X + c) < X becomes false. */
11021 if (code == LT_EXPR
11022 && ((code0 == PLUS_EXPR && is_positive >= 0)
11023 || (code0 == MINUS_EXPR && is_positive <= 0)))
11024 return constant_boolean_node (0, type);
11026 /* Convert (X - c) <= X to true. */
11027 if (!HONOR_NANS (arg1)
11028 && code == LE_EXPR
11029 && ((code0 == MINUS_EXPR && is_positive >= 0)
11030 || (code0 == PLUS_EXPR && is_positive <= 0)))
11031 return constant_boolean_node (1, type);
11033 /* Convert (X + c) >= X to true. */
11034 if (!HONOR_NANS (arg1)
11035 && code == GE_EXPR
11036 && ((code0 == PLUS_EXPR && is_positive >= 0)
11037 || (code0 == MINUS_EXPR && is_positive <= 0)))
11038 return constant_boolean_node (1, type);
11041 /* If we are comparing an ABS_EXPR with a constant, we can
11042 convert all the cases into explicit comparisons, but they may
11043 well not be faster than doing the ABS and one comparison.
11044 But ABS (X) <= C is a range comparison, which becomes a subtraction
11045 and a comparison, and is probably faster. */
11046 if (code == LE_EXPR
11047 && TREE_CODE (arg1) == INTEGER_CST
11048 && TREE_CODE (arg0) == ABS_EXPR
11049 && ! TREE_SIDE_EFFECTS (arg0)
11050 && (tem = negate_expr (arg1)) != 0
11051 && TREE_CODE (tem) == INTEGER_CST
11052 && !TREE_OVERFLOW (tem))
11053 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11054 build2 (GE_EXPR, type,
11055 TREE_OPERAND (arg0, 0), tem),
11056 build2 (LE_EXPR, type,
11057 TREE_OPERAND (arg0, 0), arg1));
11059 /* Convert ABS_EXPR<x> >= 0 to true. */
11060 strict_overflow_p = false;
11061 if (code == GE_EXPR
11062 && (integer_zerop (arg1)
11063 || (! HONOR_NANS (arg0)
11064 && real_zerop (arg1)))
11065 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11067 if (strict_overflow_p)
11068 fold_overflow_warning (("assuming signed overflow does not occur "
11069 "when simplifying comparison of "
11070 "absolute value and zero"),
11071 WARN_STRICT_OVERFLOW_CONDITIONAL);
11072 return omit_one_operand_loc (loc, type,
11073 constant_boolean_node (true, type),
11074 arg0);
11077 /* Convert ABS_EXPR<x> < 0 to false. */
11078 strict_overflow_p = false;
11079 if (code == LT_EXPR
11080 && (integer_zerop (arg1) || real_zerop (arg1))
11081 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11083 if (strict_overflow_p)
11084 fold_overflow_warning (("assuming signed overflow does not occur "
11085 "when simplifying comparison of "
11086 "absolute value and zero"),
11087 WARN_STRICT_OVERFLOW_CONDITIONAL);
11088 return omit_one_operand_loc (loc, type,
11089 constant_boolean_node (false, type),
11090 arg0);
11093 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11094 and similarly for >= into !=. */
11095 if ((code == LT_EXPR || code == GE_EXPR)
11096 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11097 && TREE_CODE (arg1) == LSHIFT_EXPR
11098 && integer_onep (TREE_OPERAND (arg1, 0)))
11099 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11100 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11101 TREE_OPERAND (arg1, 1)),
11102 build_zero_cst (TREE_TYPE (arg0)));
11104 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11105 otherwise Y might be >= # of bits in X's type and thus e.g.
11106 (unsigned char) (1 << Y) for Y 15 might be 0.
11107 If the cast is widening, then 1 << Y should have unsigned type,
11108 otherwise if Y is number of bits in the signed shift type minus 1,
11109 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11110 31 might be 0xffffffff80000000. */
11111 if ((code == LT_EXPR || code == GE_EXPR)
11112 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11113 && CONVERT_EXPR_P (arg1)
11114 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11115 && (element_precision (TREE_TYPE (arg1))
11116 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11117 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11118 || (element_precision (TREE_TYPE (arg1))
11119 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11120 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11122 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11123 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11124 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11125 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11126 build_zero_cst (TREE_TYPE (arg0)));
11129 return NULL_TREE;
11131 case UNORDERED_EXPR:
11132 case ORDERED_EXPR:
11133 case UNLT_EXPR:
11134 case UNLE_EXPR:
11135 case UNGT_EXPR:
11136 case UNGE_EXPR:
11137 case UNEQ_EXPR:
11138 case LTGT_EXPR:
11139 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11141 tree targ0 = strip_float_extensions (arg0);
11142 tree targ1 = strip_float_extensions (arg1);
11143 tree newtype = TREE_TYPE (targ0);
11145 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11146 newtype = TREE_TYPE (targ1);
11148 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11149 return fold_build2_loc (loc, code, type,
11150 fold_convert_loc (loc, newtype, targ0),
11151 fold_convert_loc (loc, newtype, targ1));
11154 return NULL_TREE;
11156 case COMPOUND_EXPR:
11157 /* When pedantic, a compound expression can be neither an lvalue
11158 nor an integer constant expression. */
11159 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11160 return NULL_TREE;
11161 /* Don't let (0, 0) be null pointer constant. */
11162 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11163 : fold_convert_loc (loc, type, arg1);
11164 return pedantic_non_lvalue_loc (loc, tem);
11166 case ASSERT_EXPR:
11167 /* An ASSERT_EXPR should never be passed to fold_binary. */
11168 gcc_unreachable ();
11170 default:
11171 return NULL_TREE;
11172 } /* switch (code) */
11175 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11176 ((A & N) + B) & M -> (A + B) & M
11177 Similarly if (N & M) == 0,
11178 ((A | N) + B) & M -> (A + B) & M
11179 and for - instead of + (or unary - instead of +)
11180 and/or ^ instead of |.
11181 If B is constant and (B & M) == 0, fold into A & M.
11183 This function is a helper for match.pd patterns. Return non-NULL
11184 type in which the simplified operation should be performed only
11185 if any optimization is possible.
11187 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11188 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11189 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11190 +/-. */
11191 tree
11192 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11193 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11194 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11195 tree *pmop)
11197 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11198 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11199 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11200 if (~cst1 == 0
11201 || (cst1 & (cst1 + 1)) != 0
11202 || !INTEGRAL_TYPE_P (type)
11203 || (!TYPE_OVERFLOW_WRAPS (type)
11204 && TREE_CODE (type) != INTEGER_TYPE)
11205 || (wi::max_value (type) & cst1) != cst1)
11206 return NULL_TREE;
11208 enum tree_code codes[2] = { code00, code01 };
11209 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11210 int which = 0;
11211 wide_int cst0;
11213 /* Now we know that arg0 is (C + D) or (C - D) or -C and
11214 arg1 (M) is == (1LL << cst) - 1.
11215 Store C into PMOP[0] and D into PMOP[1]. */
11216 pmop[0] = arg00;
11217 pmop[1] = arg01;
11218 which = code != NEGATE_EXPR;
11220 for (; which >= 0; which--)
11221 switch (codes[which])
11223 case BIT_AND_EXPR:
11224 case BIT_IOR_EXPR:
11225 case BIT_XOR_EXPR:
11226 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11227 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11228 if (codes[which] == BIT_AND_EXPR)
11230 if (cst0 != cst1)
11231 break;
11233 else if (cst0 != 0)
11234 break;
11235 /* If C or D is of the form (A & N) where
11236 (N & M) == M, or of the form (A | N) or
11237 (A ^ N) where (N & M) == 0, replace it with A. */
11238 pmop[which] = arg0xx[2 * which];
11239 break;
11240 case ERROR_MARK:
11241 if (TREE_CODE (pmop[which]) != INTEGER_CST)
11242 break;
11243 /* If C or D is a N where (N & M) == 0, it can be
11244 omitted (replaced with 0). */
11245 if ((code == PLUS_EXPR
11246 || (code == MINUS_EXPR && which == 0))
11247 && (cst1 & wi::to_wide (pmop[which])) == 0)
11248 pmop[which] = build_int_cst (type, 0);
11249 /* Similarly, with C - N where (-N & M) == 0. */
11250 if (code == MINUS_EXPR
11251 && which == 1
11252 && (cst1 & -wi::to_wide (pmop[which])) == 0)
11253 pmop[which] = build_int_cst (type, 0);
11254 break;
11255 default:
11256 gcc_unreachable ();
11259 /* Only build anything new if we optimized one or both arguments above. */
11260 if (pmop[0] == arg00 && pmop[1] == arg01)
11261 return NULL_TREE;
11263 if (TYPE_OVERFLOW_WRAPS (type))
11264 return type;
11265 else
11266 return unsigned_type_for (type);
11269 /* Used by contains_label_[p1]. */
11271 struct contains_label_data
11273 hash_set<tree> *pset;
11274 bool inside_switch_p;
11277 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11278 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11279 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
11281 static tree
11282 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11284 contains_label_data *d = (contains_label_data *) data;
11285 switch (TREE_CODE (*tp))
11287 case LABEL_EXPR:
11288 return *tp;
11290 case CASE_LABEL_EXPR:
11291 if (!d->inside_switch_p)
11292 return *tp;
11293 return NULL_TREE;
11295 case SWITCH_EXPR:
11296 if (!d->inside_switch_p)
11298 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11299 return *tp;
11300 d->inside_switch_p = true;
11301 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11302 return *tp;
11303 d->inside_switch_p = false;
11304 *walk_subtrees = 0;
11306 return NULL_TREE;
11308 case GOTO_EXPR:
11309 *walk_subtrees = 0;
11310 return NULL_TREE;
11312 default:
11313 return NULL_TREE;
11317 /* Return whether the sub-tree ST contains a label which is accessible from
11318 outside the sub-tree. */
11320 static bool
11321 contains_label_p (tree st)
11323 hash_set<tree> pset;
11324 contains_label_data data = { &pset, false };
11325 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11328 /* Fold a ternary expression of code CODE and type TYPE with operands
11329 OP0, OP1, and OP2. Return the folded expression if folding is
11330 successful. Otherwise, return NULL_TREE. */
11332 tree
11333 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11334 tree op0, tree op1, tree op2)
11336 tree tem;
11337 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11338 enum tree_code_class kind = TREE_CODE_CLASS (code);
11340 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11341 && TREE_CODE_LENGTH (code) == 3);
11343 /* If this is a commutative operation, and OP0 is a constant, move it
11344 to OP1 to reduce the number of tests below. */
11345 if (commutative_ternary_tree_code (code)
11346 && tree_swap_operands_p (op0, op1))
11347 return fold_build3_loc (loc, code, type, op1, op0, op2);
11349 tem = generic_simplify (loc, code, type, op0, op1, op2);
11350 if (tem)
11351 return tem;
11353 /* Strip any conversions that don't change the mode. This is safe
11354 for every expression, except for a comparison expression because
11355 its signedness is derived from its operands. So, in the latter
11356 case, only strip conversions that don't change the signedness.
11358 Note that this is done as an internal manipulation within the
11359 constant folder, in order to find the simplest representation of
11360 the arguments so that their form can be studied. In any cases,
11361 the appropriate type conversions should be put back in the tree
11362 that will get out of the constant folder. */
11363 if (op0)
11365 arg0 = op0;
11366 STRIP_NOPS (arg0);
11369 if (op1)
11371 arg1 = op1;
11372 STRIP_NOPS (arg1);
11375 if (op2)
11377 arg2 = op2;
11378 STRIP_NOPS (arg2);
11381 switch (code)
11383 case COMPONENT_REF:
11384 if (TREE_CODE (arg0) == CONSTRUCTOR
11385 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11387 unsigned HOST_WIDE_INT idx;
11388 tree field, value;
11389 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11390 if (field == arg1)
11391 return value;
11393 return NULL_TREE;
11395 case COND_EXPR:
11396 case VEC_COND_EXPR:
11397 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11398 so all simple results must be passed through pedantic_non_lvalue. */
11399 if (TREE_CODE (arg0) == INTEGER_CST)
11401 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11402 tem = integer_zerop (arg0) ? op2 : op1;
11403 /* Only optimize constant conditions when the selected branch
11404 has the same type as the COND_EXPR. This avoids optimizing
11405 away "c ? x : throw", where the throw has a void type.
11406 Avoid throwing away that operand which contains label. */
11407 if ((!TREE_SIDE_EFFECTS (unused_op)
11408 || !contains_label_p (unused_op))
11409 && (! VOID_TYPE_P (TREE_TYPE (tem))
11410 || VOID_TYPE_P (type)))
11411 return pedantic_non_lvalue_loc (loc, tem);
11412 return NULL_TREE;
11414 else if (TREE_CODE (arg0) == VECTOR_CST)
11416 unsigned HOST_WIDE_INT nelts;
11417 if ((TREE_CODE (arg1) == VECTOR_CST
11418 || TREE_CODE (arg1) == CONSTRUCTOR)
11419 && (TREE_CODE (arg2) == VECTOR_CST
11420 || TREE_CODE (arg2) == CONSTRUCTOR)
11421 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11423 vec_perm_builder sel (nelts, nelts, 1);
11424 for (unsigned int i = 0; i < nelts; i++)
11426 tree val = VECTOR_CST_ELT (arg0, i);
11427 if (integer_all_onesp (val))
11428 sel.quick_push (i);
11429 else if (integer_zerop (val))
11430 sel.quick_push (nelts + i);
11431 else /* Currently unreachable. */
11432 return NULL_TREE;
11434 vec_perm_indices indices (sel, 2, nelts);
11435 tree t = fold_vec_perm (type, arg1, arg2, indices);
11436 if (t != NULL_TREE)
11437 return t;
11441 /* If we have A op B ? A : C, we may be able to convert this to a
11442 simpler expression, depending on the operation and the values
11443 of B and C. Signed zeros prevent all of these transformations,
11444 for reasons given above each one.
11446 Also try swapping the arguments and inverting the conditional. */
11447 if (COMPARISON_CLASS_P (arg0)
11448 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11449 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11451 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11452 if (tem)
11453 return tem;
11456 if (COMPARISON_CLASS_P (arg0)
11457 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11458 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11460 location_t loc0 = expr_location_or (arg0, loc);
11461 tem = fold_invert_truthvalue (loc0, arg0);
11462 if (tem && COMPARISON_CLASS_P (tem))
11464 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11465 if (tem)
11466 return tem;
11470 /* If the second operand is simpler than the third, swap them
11471 since that produces better jump optimization results. */
11472 if (truth_value_p (TREE_CODE (arg0))
11473 && tree_swap_operands_p (op1, op2))
11475 location_t loc0 = expr_location_or (arg0, loc);
11476 /* See if this can be inverted. If it can't, possibly because
11477 it was a floating-point inequality comparison, don't do
11478 anything. */
11479 tem = fold_invert_truthvalue (loc0, arg0);
11480 if (tem)
11481 return fold_build3_loc (loc, code, type, tem, op2, op1);
11484 /* Convert A ? 1 : 0 to simply A. */
11485 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11486 : (integer_onep (op1)
11487 && !VECTOR_TYPE_P (type)))
11488 && integer_zerop (op2)
11489 /* If we try to convert OP0 to our type, the
11490 call to fold will try to move the conversion inside
11491 a COND, which will recurse. In that case, the COND_EXPR
11492 is probably the best choice, so leave it alone. */
11493 && type == TREE_TYPE (arg0))
11494 return pedantic_non_lvalue_loc (loc, arg0);
11496 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11497 over COND_EXPR in cases such as floating point comparisons. */
11498 if (integer_zerop (op1)
11499 && code == COND_EXPR
11500 && integer_onep (op2)
11501 && !VECTOR_TYPE_P (type)
11502 && truth_value_p (TREE_CODE (arg0)))
11503 return pedantic_non_lvalue_loc (loc,
11504 fold_convert_loc (loc, type,
11505 invert_truthvalue_loc (loc,
11506 arg0)));
11508 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11509 if (TREE_CODE (arg0) == LT_EXPR
11510 && integer_zerop (TREE_OPERAND (arg0, 1))
11511 && integer_zerop (op2)
11512 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11514 /* sign_bit_p looks through both zero and sign extensions,
11515 but for this optimization only sign extensions are
11516 usable. */
11517 tree tem2 = TREE_OPERAND (arg0, 0);
11518 while (tem != tem2)
11520 if (TREE_CODE (tem2) != NOP_EXPR
11521 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11523 tem = NULL_TREE;
11524 break;
11526 tem2 = TREE_OPERAND (tem2, 0);
11528 /* sign_bit_p only checks ARG1 bits within A's precision.
11529 If <sign bit of A> has wider type than A, bits outside
11530 of A's precision in <sign bit of A> need to be checked.
11531 If they are all 0, this optimization needs to be done
11532 in unsigned A's type, if they are all 1 in signed A's type,
11533 otherwise this can't be done. */
11534 if (tem
11535 && TYPE_PRECISION (TREE_TYPE (tem))
11536 < TYPE_PRECISION (TREE_TYPE (arg1))
11537 && TYPE_PRECISION (TREE_TYPE (tem))
11538 < TYPE_PRECISION (type))
11540 int inner_width, outer_width;
11541 tree tem_type;
11543 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11544 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11545 if (outer_width > TYPE_PRECISION (type))
11546 outer_width = TYPE_PRECISION (type);
11548 wide_int mask = wi::shifted_mask
11549 (inner_width, outer_width - inner_width, false,
11550 TYPE_PRECISION (TREE_TYPE (arg1)));
11552 wide_int common = mask & wi::to_wide (arg1);
11553 if (common == mask)
11555 tem_type = signed_type_for (TREE_TYPE (tem));
11556 tem = fold_convert_loc (loc, tem_type, tem);
11558 else if (common == 0)
11560 tem_type = unsigned_type_for (TREE_TYPE (tem));
11561 tem = fold_convert_loc (loc, tem_type, tem);
11563 else
11564 tem = NULL;
11567 if (tem)
11568 return
11569 fold_convert_loc (loc, type,
11570 fold_build2_loc (loc, BIT_AND_EXPR,
11571 TREE_TYPE (tem), tem,
11572 fold_convert_loc (loc,
11573 TREE_TYPE (tem),
11574 arg1)));
11577 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11578 already handled above. */
11579 if (TREE_CODE (arg0) == BIT_AND_EXPR
11580 && integer_onep (TREE_OPERAND (arg0, 1))
11581 && integer_zerop (op2)
11582 && integer_pow2p (arg1))
11584 tree tem = TREE_OPERAND (arg0, 0);
11585 STRIP_NOPS (tem);
11586 if (TREE_CODE (tem) == RSHIFT_EXPR
11587 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11588 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11589 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11590 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11591 fold_convert_loc (loc, type,
11592 TREE_OPERAND (tem, 0)),
11593 op1);
11596 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11597 is probably obsolete because the first operand should be a
11598 truth value (that's why we have the two cases above), but let's
11599 leave it in until we can confirm this for all front-ends. */
11600 if (integer_zerop (op2)
11601 && TREE_CODE (arg0) == NE_EXPR
11602 && integer_zerop (TREE_OPERAND (arg0, 1))
11603 && integer_pow2p (arg1)
11604 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11605 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11606 arg1, OEP_ONLY_CONST))
11607 return pedantic_non_lvalue_loc (loc,
11608 fold_convert_loc (loc, type,
11609 TREE_OPERAND (arg0, 0)));
11611 /* Disable the transformations below for vectors, since
11612 fold_binary_op_with_conditional_arg may undo them immediately,
11613 yielding an infinite loop. */
11614 if (code == VEC_COND_EXPR)
11615 return NULL_TREE;
11617 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11618 if (integer_zerop (op2)
11619 && truth_value_p (TREE_CODE (arg0))
11620 && truth_value_p (TREE_CODE (arg1))
11621 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11622 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11623 : TRUTH_ANDIF_EXPR,
11624 type, fold_convert_loc (loc, type, arg0), op1);
11626 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11627 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11628 && truth_value_p (TREE_CODE (arg0))
11629 && truth_value_p (TREE_CODE (arg1))
11630 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11632 location_t loc0 = expr_location_or (arg0, loc);
11633 /* Only perform transformation if ARG0 is easily inverted. */
11634 tem = fold_invert_truthvalue (loc0, arg0);
11635 if (tem)
11636 return fold_build2_loc (loc, code == VEC_COND_EXPR
11637 ? BIT_IOR_EXPR
11638 : TRUTH_ORIF_EXPR,
11639 type, fold_convert_loc (loc, type, tem),
11640 op1);
11643 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11644 if (integer_zerop (arg1)
11645 && truth_value_p (TREE_CODE (arg0))
11646 && truth_value_p (TREE_CODE (op2))
11647 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11649 location_t loc0 = expr_location_or (arg0, loc);
11650 /* Only perform transformation if ARG0 is easily inverted. */
11651 tem = fold_invert_truthvalue (loc0, arg0);
11652 if (tem)
11653 return fold_build2_loc (loc, code == VEC_COND_EXPR
11654 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11655 type, fold_convert_loc (loc, type, tem),
11656 op2);
11659 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11660 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11661 && truth_value_p (TREE_CODE (arg0))
11662 && truth_value_p (TREE_CODE (op2))
11663 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11664 return fold_build2_loc (loc, code == VEC_COND_EXPR
11665 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11666 type, fold_convert_loc (loc, type, arg0), op2);
11668 return NULL_TREE;
11670 case CALL_EXPR:
11671 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11672 of fold_ternary on them. */
11673 gcc_unreachable ();
11675 case BIT_FIELD_REF:
11676 if (TREE_CODE (arg0) == VECTOR_CST
11677 && (type == TREE_TYPE (TREE_TYPE (arg0))
11678 || (VECTOR_TYPE_P (type)
11679 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
11680 && tree_fits_uhwi_p (op1)
11681 && tree_fits_uhwi_p (op2))
11683 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11684 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11685 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11686 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11688 if (n != 0
11689 && (idx % width) == 0
11690 && (n % width) == 0
11691 && known_le ((idx + n) / width,
11692 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
11694 idx = idx / width;
11695 n = n / width;
11697 if (TREE_CODE (arg0) == VECTOR_CST)
11699 if (n == 1)
11701 tem = VECTOR_CST_ELT (arg0, idx);
11702 if (VECTOR_TYPE_P (type))
11703 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
11704 return tem;
11707 tree_vector_builder vals (type, n, 1);
11708 for (unsigned i = 0; i < n; ++i)
11709 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11710 return vals.build ();
11715 /* On constants we can use native encode/interpret to constant
11716 fold (nearly) all BIT_FIELD_REFs. */
11717 if (CONSTANT_CLASS_P (arg0)
11718 && can_native_interpret_type_p (type)
11719 && BITS_PER_UNIT == 8
11720 && tree_fits_uhwi_p (op1)
11721 && tree_fits_uhwi_p (op2))
11723 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11724 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11725 /* Limit us to a reasonable amount of work. To relax the
11726 other limitations we need bit-shifting of the buffer
11727 and rounding up the size. */
11728 if (bitpos % BITS_PER_UNIT == 0
11729 && bitsize % BITS_PER_UNIT == 0
11730 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11732 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11733 unsigned HOST_WIDE_INT len
11734 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11735 bitpos / BITS_PER_UNIT);
11736 if (len > 0
11737 && len * BITS_PER_UNIT >= bitsize)
11739 tree v = native_interpret_expr (type, b,
11740 bitsize / BITS_PER_UNIT);
11741 if (v)
11742 return v;
11747 return NULL_TREE;
11749 case VEC_PERM_EXPR:
11750 if (TREE_CODE (arg2) == VECTOR_CST)
11752 /* Build a vector of integers from the tree mask. */
11753 vec_perm_builder builder;
11754 if (!tree_to_vec_perm_builder (&builder, arg2))
11755 return NULL_TREE;
11757 /* Create a vec_perm_indices for the integer vector. */
11758 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
11759 bool single_arg = (op0 == op1);
11760 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
11762 /* Check for cases that fold to OP0 or OP1 in their original
11763 element order. */
11764 if (sel.series_p (0, 1, 0, 1))
11765 return op0;
11766 if (sel.series_p (0, 1, nelts, 1))
11767 return op1;
11769 if (!single_arg)
11771 if (sel.all_from_input_p (0))
11772 op1 = op0;
11773 else if (sel.all_from_input_p (1))
11775 op0 = op1;
11776 sel.rotate_inputs (1);
11780 if ((TREE_CODE (op0) == VECTOR_CST
11781 || TREE_CODE (op0) == CONSTRUCTOR)
11782 && (TREE_CODE (op1) == VECTOR_CST
11783 || TREE_CODE (op1) == CONSTRUCTOR))
11785 tree t = fold_vec_perm (type, op0, op1, sel);
11786 if (t != NULL_TREE)
11787 return t;
11790 bool changed = (op0 == op1 && !single_arg);
11792 /* Generate a canonical form of the selector. */
11793 if (arg2 == op2 && sel.encoding () != builder)
11795 /* Some targets are deficient and fail to expand a single
11796 argument permutation while still allowing an equivalent
11797 2-argument version. */
11798 if (sel.ninputs () == 2
11799 || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
11800 op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11801 else
11803 vec_perm_indices sel2 (builder, 2, nelts);
11804 if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
11805 op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
11806 else
11807 /* Not directly supported with either encoding,
11808 so use the preferred form. */
11809 op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11811 changed = true;
11814 if (changed)
11815 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11817 return NULL_TREE;
11819 case BIT_INSERT_EXPR:
11820 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11821 if (TREE_CODE (arg0) == INTEGER_CST
11822 && TREE_CODE (arg1) == INTEGER_CST)
11824 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11825 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11826 wide_int tem = (wi::to_wide (arg0)
11827 & wi::shifted_mask (bitpos, bitsize, true,
11828 TYPE_PRECISION (type)));
11829 wide_int tem2
11830 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11831 bitsize), bitpos);
11832 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11834 else if (TREE_CODE (arg0) == VECTOR_CST
11835 && CONSTANT_CLASS_P (arg1)
11836 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11837 TREE_TYPE (arg1)))
11839 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11840 unsigned HOST_WIDE_INT elsize
11841 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11842 if (bitpos % elsize == 0)
11844 unsigned k = bitpos / elsize;
11845 unsigned HOST_WIDE_INT nelts;
11846 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11847 return arg0;
11848 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
11850 tree_vector_builder elts (type, nelts, 1);
11851 elts.quick_grow (nelts);
11852 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
11853 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11854 return elts.build ();
11858 return NULL_TREE;
11860 default:
11861 return NULL_TREE;
11862 } /* switch (code) */
11865 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11866 of an array (or vector). */
11868 tree
11869 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11871 tree index_type = NULL_TREE;
11872 offset_int low_bound = 0;
11874 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11876 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11877 if (domain_type && TYPE_MIN_VALUE (domain_type))
11879 /* Static constructors for variably sized objects makes no sense. */
11880 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11881 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11882 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11886 if (index_type)
11887 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11888 TYPE_SIGN (index_type));
11890 offset_int index = low_bound - 1;
11891 if (index_type)
11892 index = wi::ext (index, TYPE_PRECISION (index_type),
11893 TYPE_SIGN (index_type));
11895 offset_int max_index;
11896 unsigned HOST_WIDE_INT cnt;
11897 tree cfield, cval;
11899 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11901 /* Array constructor might explicitly set index, or specify a range,
11902 or leave index NULL meaning that it is next index after previous
11903 one. */
11904 if (cfield)
11906 if (TREE_CODE (cfield) == INTEGER_CST)
11907 max_index = index = wi::to_offset (cfield);
11908 else
11910 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11911 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11912 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11915 else
11917 index += 1;
11918 if (index_type)
11919 index = wi::ext (index, TYPE_PRECISION (index_type),
11920 TYPE_SIGN (index_type));
11921 max_index = index;
11924 /* Do we have match? */
11925 if (wi::cmpu (access_index, index) >= 0
11926 && wi::cmpu (access_index, max_index) <= 0)
11927 return cval;
11929 return NULL_TREE;
11932 /* Perform constant folding and related simplification of EXPR.
11933 The related simplifications include x*1 => x, x*0 => 0, etc.,
11934 and application of the associative law.
11935 NOP_EXPR conversions may be removed freely (as long as we
11936 are careful not to change the type of the overall expression).
11937 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11938 but we can constant-fold them if they have constant operands. */
11940 #ifdef ENABLE_FOLD_CHECKING
11941 # define fold(x) fold_1 (x)
11942 static tree fold_1 (tree);
11943 static
11944 #endif
11945 tree
11946 fold (tree expr)
11948 const tree t = expr;
11949 enum tree_code code = TREE_CODE (t);
11950 enum tree_code_class kind = TREE_CODE_CLASS (code);
11951 tree tem;
11952 location_t loc = EXPR_LOCATION (expr);
11954 /* Return right away if a constant. */
11955 if (kind == tcc_constant)
11956 return t;
11958 /* CALL_EXPR-like objects with variable numbers of operands are
11959 treated specially. */
11960 if (kind == tcc_vl_exp)
11962 if (code == CALL_EXPR)
11964 tem = fold_call_expr (loc, expr, false);
11965 return tem ? tem : expr;
11967 return expr;
11970 if (IS_EXPR_CODE_CLASS (kind))
11972 tree type = TREE_TYPE (t);
11973 tree op0, op1, op2;
11975 switch (TREE_CODE_LENGTH (code))
11977 case 1:
11978 op0 = TREE_OPERAND (t, 0);
11979 tem = fold_unary_loc (loc, code, type, op0);
11980 return tem ? tem : expr;
11981 case 2:
11982 op0 = TREE_OPERAND (t, 0);
11983 op1 = TREE_OPERAND (t, 1);
11984 tem = fold_binary_loc (loc, code, type, op0, op1);
11985 return tem ? tem : expr;
11986 case 3:
11987 op0 = TREE_OPERAND (t, 0);
11988 op1 = TREE_OPERAND (t, 1);
11989 op2 = TREE_OPERAND (t, 2);
11990 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11991 return tem ? tem : expr;
11992 default:
11993 break;
11997 switch (code)
11999 case ARRAY_REF:
12001 tree op0 = TREE_OPERAND (t, 0);
12002 tree op1 = TREE_OPERAND (t, 1);
12004 if (TREE_CODE (op1) == INTEGER_CST
12005 && TREE_CODE (op0) == CONSTRUCTOR
12006 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12008 tree val = get_array_ctor_element_at_index (op0,
12009 wi::to_offset (op1));
12010 if (val)
12011 return val;
12014 return t;
12017 /* Return a VECTOR_CST if possible. */
12018 case CONSTRUCTOR:
12020 tree type = TREE_TYPE (t);
12021 if (TREE_CODE (type) != VECTOR_TYPE)
12022 return t;
12024 unsigned i;
12025 tree val;
12026 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12027 if (! CONSTANT_CLASS_P (val))
12028 return t;
12030 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12033 case CONST_DECL:
12034 return fold (DECL_INITIAL (t));
12036 default:
12037 return t;
12038 } /* switch (code) */
12041 #ifdef ENABLE_FOLD_CHECKING
12042 #undef fold
12044 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12045 hash_table<nofree_ptr_hash<const tree_node> > *);
12046 static void fold_check_failed (const_tree, const_tree);
12047 void print_fold_checksum (const_tree);
12049 /* When --enable-checking=fold, compute a digest of expr before
12050 and after actual fold call to see if fold did not accidentally
12051 change original expr. */
12053 tree
12054 fold (tree expr)
12056 tree ret;
12057 struct md5_ctx ctx;
12058 unsigned char checksum_before[16], checksum_after[16];
12059 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12061 md5_init_ctx (&ctx);
12062 fold_checksum_tree (expr, &ctx, &ht);
12063 md5_finish_ctx (&ctx, checksum_before);
12064 ht.empty ();
12066 ret = fold_1 (expr);
12068 md5_init_ctx (&ctx);
12069 fold_checksum_tree (expr, &ctx, &ht);
12070 md5_finish_ctx (&ctx, checksum_after);
12072 if (memcmp (checksum_before, checksum_after, 16))
12073 fold_check_failed (expr, ret);
12075 return ret;
12078 void
12079 print_fold_checksum (const_tree expr)
12081 struct md5_ctx ctx;
12082 unsigned char checksum[16], cnt;
12083 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12085 md5_init_ctx (&ctx);
12086 fold_checksum_tree (expr, &ctx, &ht);
12087 md5_finish_ctx (&ctx, checksum);
12088 for (cnt = 0; cnt < 16; ++cnt)
12089 fprintf (stderr, "%02x", checksum[cnt]);
12090 putc ('\n', stderr);
12093 static void
12094 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12096 internal_error ("fold check: original tree changed by fold");
12099 static void
12100 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12101 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12103 const tree_node **slot;
12104 enum tree_code code;
12105 union tree_node buf;
12106 int i, len;
12108 recursive_label:
12109 if (expr == NULL)
12110 return;
12111 slot = ht->find_slot (expr, INSERT);
12112 if (*slot != NULL)
12113 return;
12114 *slot = expr;
12115 code = TREE_CODE (expr);
12116 if (TREE_CODE_CLASS (code) == tcc_declaration
12117 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12119 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12120 memcpy ((char *) &buf, expr, tree_size (expr));
12121 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12122 buf.decl_with_vis.symtab_node = NULL;
12123 expr = (tree) &buf;
12125 else if (TREE_CODE_CLASS (code) == tcc_type
12126 && (TYPE_POINTER_TO (expr)
12127 || TYPE_REFERENCE_TO (expr)
12128 || TYPE_CACHED_VALUES_P (expr)
12129 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12130 || TYPE_NEXT_VARIANT (expr)
12131 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12133 /* Allow these fields to be modified. */
12134 tree tmp;
12135 memcpy ((char *) &buf, expr, tree_size (expr));
12136 expr = tmp = (tree) &buf;
12137 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12138 TYPE_POINTER_TO (tmp) = NULL;
12139 TYPE_REFERENCE_TO (tmp) = NULL;
12140 TYPE_NEXT_VARIANT (tmp) = NULL;
12141 TYPE_ALIAS_SET (tmp) = -1;
12142 if (TYPE_CACHED_VALUES_P (tmp))
12144 TYPE_CACHED_VALUES_P (tmp) = 0;
12145 TYPE_CACHED_VALUES (tmp) = NULL;
12148 md5_process_bytes (expr, tree_size (expr), ctx);
12149 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12150 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12151 if (TREE_CODE_CLASS (code) != tcc_type
12152 && TREE_CODE_CLASS (code) != tcc_declaration
12153 && code != TREE_LIST
12154 && code != SSA_NAME
12155 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12156 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12157 switch (TREE_CODE_CLASS (code))
12159 case tcc_constant:
12160 switch (code)
12162 case STRING_CST:
12163 md5_process_bytes (TREE_STRING_POINTER (expr),
12164 TREE_STRING_LENGTH (expr), ctx);
12165 break;
12166 case COMPLEX_CST:
12167 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12168 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12169 break;
12170 case VECTOR_CST:
12171 len = vector_cst_encoded_nelts (expr);
12172 for (i = 0; i < len; ++i)
12173 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12174 break;
12175 default:
12176 break;
12178 break;
12179 case tcc_exceptional:
12180 switch (code)
12182 case TREE_LIST:
12183 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12184 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12185 expr = TREE_CHAIN (expr);
12186 goto recursive_label;
12187 break;
12188 case TREE_VEC:
12189 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12190 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12191 break;
12192 default:
12193 break;
12195 break;
12196 case tcc_expression:
12197 case tcc_reference:
12198 case tcc_comparison:
12199 case tcc_unary:
12200 case tcc_binary:
12201 case tcc_statement:
12202 case tcc_vl_exp:
12203 len = TREE_OPERAND_LENGTH (expr);
12204 for (i = 0; i < len; ++i)
12205 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12206 break;
12207 case tcc_declaration:
12208 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12209 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12210 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12212 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12213 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12214 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12215 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12216 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12219 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12221 if (TREE_CODE (expr) == FUNCTION_DECL)
12223 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12224 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12226 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12228 break;
12229 case tcc_type:
12230 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12231 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12232 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12233 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12234 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12235 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12236 if (INTEGRAL_TYPE_P (expr)
12237 || SCALAR_FLOAT_TYPE_P (expr))
12239 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12240 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12242 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12243 if (TREE_CODE (expr) == RECORD_TYPE
12244 || TREE_CODE (expr) == UNION_TYPE
12245 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12246 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12247 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12248 break;
12249 default:
12250 break;
12254 /* Helper function for outputting the checksum of a tree T. When
12255 debugging with gdb, you can "define mynext" to be "next" followed
12256 by "call debug_fold_checksum (op0)", then just trace down till the
12257 outputs differ. */
12259 DEBUG_FUNCTION void
12260 debug_fold_checksum (const_tree t)
12262 int i;
12263 unsigned char checksum[16];
12264 struct md5_ctx ctx;
12265 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12267 md5_init_ctx (&ctx);
12268 fold_checksum_tree (t, &ctx, &ht);
12269 md5_finish_ctx (&ctx, checksum);
12270 ht.empty ();
12272 for (i = 0; i < 16; i++)
12273 fprintf (stderr, "%d ", checksum[i]);
12275 fprintf (stderr, "\n");
12278 #endif
12280 /* Fold a unary tree expression with code CODE of type TYPE with an
12281 operand OP0. LOC is the location of the resulting expression.
12282 Return a folded expression if successful. Otherwise, return a tree
12283 expression with code CODE of type TYPE with an operand OP0. */
12285 tree
12286 fold_build1_loc (location_t loc,
12287 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12289 tree tem;
12290 #ifdef ENABLE_FOLD_CHECKING
12291 unsigned char checksum_before[16], checksum_after[16];
12292 struct md5_ctx ctx;
12293 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12295 md5_init_ctx (&ctx);
12296 fold_checksum_tree (op0, &ctx, &ht);
12297 md5_finish_ctx (&ctx, checksum_before);
12298 ht.empty ();
12299 #endif
12301 tem = fold_unary_loc (loc, code, type, op0);
12302 if (!tem)
12303 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12305 #ifdef ENABLE_FOLD_CHECKING
12306 md5_init_ctx (&ctx);
12307 fold_checksum_tree (op0, &ctx, &ht);
12308 md5_finish_ctx (&ctx, checksum_after);
12310 if (memcmp (checksum_before, checksum_after, 16))
12311 fold_check_failed (op0, tem);
12312 #endif
12313 return tem;
12316 /* Fold a binary tree expression with code CODE of type TYPE with
12317 operands OP0 and OP1. LOC is the location of the resulting
12318 expression. Return a folded expression if successful. Otherwise,
12319 return a tree expression with code CODE of type TYPE with operands
12320 OP0 and OP1. */
12322 tree
12323 fold_build2_loc (location_t loc,
12324 enum tree_code code, tree type, tree op0, tree op1
12325 MEM_STAT_DECL)
12327 tree tem;
12328 #ifdef ENABLE_FOLD_CHECKING
12329 unsigned char checksum_before_op0[16],
12330 checksum_before_op1[16],
12331 checksum_after_op0[16],
12332 checksum_after_op1[16];
12333 struct md5_ctx ctx;
12334 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12336 md5_init_ctx (&ctx);
12337 fold_checksum_tree (op0, &ctx, &ht);
12338 md5_finish_ctx (&ctx, checksum_before_op0);
12339 ht.empty ();
12341 md5_init_ctx (&ctx);
12342 fold_checksum_tree (op1, &ctx, &ht);
12343 md5_finish_ctx (&ctx, checksum_before_op1);
12344 ht.empty ();
12345 #endif
12347 tem = fold_binary_loc (loc, code, type, op0, op1);
12348 if (!tem)
12349 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12351 #ifdef ENABLE_FOLD_CHECKING
12352 md5_init_ctx (&ctx);
12353 fold_checksum_tree (op0, &ctx, &ht);
12354 md5_finish_ctx (&ctx, checksum_after_op0);
12355 ht.empty ();
12357 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12358 fold_check_failed (op0, tem);
12360 md5_init_ctx (&ctx);
12361 fold_checksum_tree (op1, &ctx, &ht);
12362 md5_finish_ctx (&ctx, checksum_after_op1);
12364 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12365 fold_check_failed (op1, tem);
12366 #endif
12367 return tem;
12370 /* Fold a ternary tree expression with code CODE of type TYPE with
12371 operands OP0, OP1, and OP2. Return a folded expression if
12372 successful. Otherwise, return a tree expression with code CODE of
12373 type TYPE with operands OP0, OP1, and OP2. */
12375 tree
12376 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12377 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12379 tree tem;
12380 #ifdef ENABLE_FOLD_CHECKING
12381 unsigned char checksum_before_op0[16],
12382 checksum_before_op1[16],
12383 checksum_before_op2[16],
12384 checksum_after_op0[16],
12385 checksum_after_op1[16],
12386 checksum_after_op2[16];
12387 struct md5_ctx ctx;
12388 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12390 md5_init_ctx (&ctx);
12391 fold_checksum_tree (op0, &ctx, &ht);
12392 md5_finish_ctx (&ctx, checksum_before_op0);
12393 ht.empty ();
12395 md5_init_ctx (&ctx);
12396 fold_checksum_tree (op1, &ctx, &ht);
12397 md5_finish_ctx (&ctx, checksum_before_op1);
12398 ht.empty ();
12400 md5_init_ctx (&ctx);
12401 fold_checksum_tree (op2, &ctx, &ht);
12402 md5_finish_ctx (&ctx, checksum_before_op2);
12403 ht.empty ();
12404 #endif
12406 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12407 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12408 if (!tem)
12409 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12411 #ifdef ENABLE_FOLD_CHECKING
12412 md5_init_ctx (&ctx);
12413 fold_checksum_tree (op0, &ctx, &ht);
12414 md5_finish_ctx (&ctx, checksum_after_op0);
12415 ht.empty ();
12417 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12418 fold_check_failed (op0, tem);
12420 md5_init_ctx (&ctx);
12421 fold_checksum_tree (op1, &ctx, &ht);
12422 md5_finish_ctx (&ctx, checksum_after_op1);
12423 ht.empty ();
12425 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12426 fold_check_failed (op1, tem);
12428 md5_init_ctx (&ctx);
12429 fold_checksum_tree (op2, &ctx, &ht);
12430 md5_finish_ctx (&ctx, checksum_after_op2);
12432 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12433 fold_check_failed (op2, tem);
12434 #endif
12435 return tem;
12438 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12439 arguments in ARGARRAY, and a null static chain.
12440 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12441 of type TYPE from the given operands as constructed by build_call_array. */
12443 tree
12444 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12445 int nargs, tree *argarray)
12447 tree tem;
12448 #ifdef ENABLE_FOLD_CHECKING
12449 unsigned char checksum_before_fn[16],
12450 checksum_before_arglist[16],
12451 checksum_after_fn[16],
12452 checksum_after_arglist[16];
12453 struct md5_ctx ctx;
12454 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12455 int i;
12457 md5_init_ctx (&ctx);
12458 fold_checksum_tree (fn, &ctx, &ht);
12459 md5_finish_ctx (&ctx, checksum_before_fn);
12460 ht.empty ();
12462 md5_init_ctx (&ctx);
12463 for (i = 0; i < nargs; i++)
12464 fold_checksum_tree (argarray[i], &ctx, &ht);
12465 md5_finish_ctx (&ctx, checksum_before_arglist);
12466 ht.empty ();
12467 #endif
12469 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12470 if (!tem)
12471 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12473 #ifdef ENABLE_FOLD_CHECKING
12474 md5_init_ctx (&ctx);
12475 fold_checksum_tree (fn, &ctx, &ht);
12476 md5_finish_ctx (&ctx, checksum_after_fn);
12477 ht.empty ();
12479 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12480 fold_check_failed (fn, tem);
12482 md5_init_ctx (&ctx);
12483 for (i = 0; i < nargs; i++)
12484 fold_checksum_tree (argarray[i], &ctx, &ht);
12485 md5_finish_ctx (&ctx, checksum_after_arglist);
12487 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12488 fold_check_failed (NULL_TREE, tem);
12489 #endif
12490 return tem;
12493 /* Perform constant folding and related simplification of initializer
12494 expression EXPR. These behave identically to "fold_buildN" but ignore
12495 potential run-time traps and exceptions that fold must preserve. */
12497 #define START_FOLD_INIT \
12498 int saved_signaling_nans = flag_signaling_nans;\
12499 int saved_trapping_math = flag_trapping_math;\
12500 int saved_rounding_math = flag_rounding_math;\
12501 int saved_trapv = flag_trapv;\
12502 int saved_folding_initializer = folding_initializer;\
12503 flag_signaling_nans = 0;\
12504 flag_trapping_math = 0;\
12505 flag_rounding_math = 0;\
12506 flag_trapv = 0;\
12507 folding_initializer = 1;
12509 #define END_FOLD_INIT \
12510 flag_signaling_nans = saved_signaling_nans;\
12511 flag_trapping_math = saved_trapping_math;\
12512 flag_rounding_math = saved_rounding_math;\
12513 flag_trapv = saved_trapv;\
12514 folding_initializer = saved_folding_initializer;
12516 tree
12517 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12518 tree type, tree op)
12520 tree result;
12521 START_FOLD_INIT;
12523 result = fold_build1_loc (loc, code, type, op);
12525 END_FOLD_INIT;
12526 return result;
12529 tree
12530 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12531 tree type, tree op0, tree op1)
12533 tree result;
12534 START_FOLD_INIT;
12536 result = fold_build2_loc (loc, code, type, op0, op1);
12538 END_FOLD_INIT;
12539 return result;
12542 tree
12543 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12544 int nargs, tree *argarray)
12546 tree result;
12547 START_FOLD_INIT;
12549 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12551 END_FOLD_INIT;
12552 return result;
12555 #undef START_FOLD_INIT
12556 #undef END_FOLD_INIT
12558 /* Determine if first argument is a multiple of second argument. Return 0 if
12559 it is not, or we cannot easily determined it to be.
12561 An example of the sort of thing we care about (at this point; this routine
12562 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12563 fold cases do now) is discovering that
12565 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12567 is a multiple of
12569 SAVE_EXPR (J * 8)
12571 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12573 This code also handles discovering that
12575 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12577 is a multiple of 8 so we don't have to worry about dealing with a
12578 possible remainder.
12580 Note that we *look* inside a SAVE_EXPR only to determine how it was
12581 calculated; it is not safe for fold to do much of anything else with the
12582 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12583 at run time. For example, the latter example above *cannot* be implemented
12584 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12585 evaluation time of the original SAVE_EXPR is not necessarily the same at
12586 the time the new expression is evaluated. The only optimization of this
12587 sort that would be valid is changing
12589 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12591 divided by 8 to
12593 SAVE_EXPR (I) * SAVE_EXPR (J)
12595 (where the same SAVE_EXPR (J) is used in the original and the
12596 transformed version). */
12599 multiple_of_p (tree type, const_tree top, const_tree bottom)
12601 gimple *stmt;
12602 tree t1, op1, op2;
12604 if (operand_equal_p (top, bottom, 0))
12605 return 1;
12607 if (TREE_CODE (type) != INTEGER_TYPE)
12608 return 0;
12610 switch (TREE_CODE (top))
12612 case BIT_AND_EXPR:
12613 /* Bitwise and provides a power of two multiple. If the mask is
12614 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12615 if (!integer_pow2p (bottom))
12616 return 0;
12617 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12618 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12620 case MULT_EXPR:
12621 if (TREE_CODE (bottom) == INTEGER_CST)
12623 op1 = TREE_OPERAND (top, 0);
12624 op2 = TREE_OPERAND (top, 1);
12625 if (TREE_CODE (op1) == INTEGER_CST)
12626 std::swap (op1, op2);
12627 if (TREE_CODE (op2) == INTEGER_CST)
12629 if (multiple_of_p (type, op2, bottom))
12630 return 1;
12631 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
12632 if (multiple_of_p (type, bottom, op2))
12634 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
12635 wi::to_widest (op2));
12636 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
12638 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
12639 return multiple_of_p (type, op1, op2);
12642 return multiple_of_p (type, op1, bottom);
12645 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12646 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12648 case MINUS_EXPR:
12649 /* It is impossible to prove if op0 - op1 is multiple of bottom
12650 precisely, so be conservative here checking if both op0 and op1
12651 are multiple of bottom. Note we check the second operand first
12652 since it's usually simpler. */
12653 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12654 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12656 case PLUS_EXPR:
12657 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12658 as op0 - 3 if the expression has unsigned type. For example,
12659 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12660 op1 = TREE_OPERAND (top, 1);
12661 if (TYPE_UNSIGNED (type)
12662 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12663 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12664 return (multiple_of_p (type, op1, bottom)
12665 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12667 case LSHIFT_EXPR:
12668 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12670 op1 = TREE_OPERAND (top, 1);
12671 /* const_binop may not detect overflow correctly,
12672 so check for it explicitly here. */
12673 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12674 wi::to_wide (op1))
12675 && (t1 = fold_convert (type,
12676 const_binop (LSHIFT_EXPR, size_one_node,
12677 op1))) != 0
12678 && !TREE_OVERFLOW (t1))
12679 return multiple_of_p (type, t1, bottom);
12681 return 0;
12683 case NOP_EXPR:
12684 /* Can't handle conversions from non-integral or wider integral type. */
12685 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12686 || (TYPE_PRECISION (type)
12687 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12688 return 0;
12690 /* fall through */
12692 case SAVE_EXPR:
12693 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12695 case COND_EXPR:
12696 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12697 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12699 case INTEGER_CST:
12700 if (TREE_CODE (bottom) != INTEGER_CST
12701 || integer_zerop (bottom)
12702 || (TYPE_UNSIGNED (type)
12703 && (tree_int_cst_sgn (top) < 0
12704 || tree_int_cst_sgn (bottom) < 0)))
12705 return 0;
12706 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12707 SIGNED);
12709 case SSA_NAME:
12710 if (TREE_CODE (bottom) == INTEGER_CST
12711 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12712 && gimple_code (stmt) == GIMPLE_ASSIGN)
12714 enum tree_code code = gimple_assign_rhs_code (stmt);
12716 /* Check for special cases to see if top is defined as multiple
12717 of bottom:
12719 top = (X & ~(bottom - 1) ; bottom is power of 2
12723 Y = X % bottom
12724 top = X - Y. */
12725 if (code == BIT_AND_EXPR
12726 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12727 && TREE_CODE (op2) == INTEGER_CST
12728 && integer_pow2p (bottom)
12729 && wi::multiple_of_p (wi::to_widest (op2),
12730 wi::to_widest (bottom), UNSIGNED))
12731 return 1;
12733 op1 = gimple_assign_rhs1 (stmt);
12734 if (code == MINUS_EXPR
12735 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12736 && TREE_CODE (op2) == SSA_NAME
12737 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12738 && gimple_code (stmt) == GIMPLE_ASSIGN
12739 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12740 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12741 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12742 return 1;
12745 /* fall through */
12747 default:
12748 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
12749 return multiple_p (wi::to_poly_widest (top),
12750 wi::to_poly_widest (bottom));
12752 return 0;
12756 #define tree_expr_nonnegative_warnv_p(X, Y) \
12757 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12759 #define RECURSE(X) \
12760 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12762 /* Return true if CODE or TYPE is known to be non-negative. */
12764 static bool
12765 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12767 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12768 && truth_value_p (code))
12769 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12770 have a signed:1 type (where the value is -1 and 0). */
12771 return true;
12772 return false;
12775 /* Return true if (CODE OP0) is known to be non-negative. If the return
12776 value is based on the assumption that signed overflow is undefined,
12777 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12778 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12780 bool
12781 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12782 bool *strict_overflow_p, int depth)
12784 if (TYPE_UNSIGNED (type))
12785 return true;
12787 switch (code)
12789 case ABS_EXPR:
12790 /* We can't return 1 if flag_wrapv is set because
12791 ABS_EXPR<INT_MIN> = INT_MIN. */
12792 if (!ANY_INTEGRAL_TYPE_P (type))
12793 return true;
12794 if (TYPE_OVERFLOW_UNDEFINED (type))
12796 *strict_overflow_p = true;
12797 return true;
12799 break;
12801 case NON_LVALUE_EXPR:
12802 case FLOAT_EXPR:
12803 case FIX_TRUNC_EXPR:
12804 return RECURSE (op0);
12806 CASE_CONVERT:
12808 tree inner_type = TREE_TYPE (op0);
12809 tree outer_type = type;
12811 if (TREE_CODE (outer_type) == REAL_TYPE)
12813 if (TREE_CODE (inner_type) == REAL_TYPE)
12814 return RECURSE (op0);
12815 if (INTEGRAL_TYPE_P (inner_type))
12817 if (TYPE_UNSIGNED (inner_type))
12818 return true;
12819 return RECURSE (op0);
12822 else if (INTEGRAL_TYPE_P (outer_type))
12824 if (TREE_CODE (inner_type) == REAL_TYPE)
12825 return RECURSE (op0);
12826 if (INTEGRAL_TYPE_P (inner_type))
12827 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12828 && TYPE_UNSIGNED (inner_type);
12831 break;
12833 default:
12834 return tree_simple_nonnegative_warnv_p (code, type);
12837 /* We don't know sign of `t', so be conservative and return false. */
12838 return false;
12841 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12842 value is based on the assumption that signed overflow is undefined,
12843 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12844 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12846 bool
12847 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12848 tree op1, bool *strict_overflow_p,
12849 int depth)
12851 if (TYPE_UNSIGNED (type))
12852 return true;
12854 switch (code)
12856 case POINTER_PLUS_EXPR:
12857 case PLUS_EXPR:
12858 if (FLOAT_TYPE_P (type))
12859 return RECURSE (op0) && RECURSE (op1);
12861 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12862 both unsigned and at least 2 bits shorter than the result. */
12863 if (TREE_CODE (type) == INTEGER_TYPE
12864 && TREE_CODE (op0) == NOP_EXPR
12865 && TREE_CODE (op1) == NOP_EXPR)
12867 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12868 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12869 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12870 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12872 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12873 TYPE_PRECISION (inner2)) + 1;
12874 return prec < TYPE_PRECISION (type);
12877 break;
12879 case MULT_EXPR:
12880 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12882 /* x * x is always non-negative for floating point x
12883 or without overflow. */
12884 if (operand_equal_p (op0, op1, 0)
12885 || (RECURSE (op0) && RECURSE (op1)))
12887 if (ANY_INTEGRAL_TYPE_P (type)
12888 && TYPE_OVERFLOW_UNDEFINED (type))
12889 *strict_overflow_p = true;
12890 return true;
12894 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12895 both unsigned and their total bits is shorter than the result. */
12896 if (TREE_CODE (type) == INTEGER_TYPE
12897 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12898 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12900 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12901 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12902 : TREE_TYPE (op0);
12903 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12904 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12905 : TREE_TYPE (op1);
12907 bool unsigned0 = TYPE_UNSIGNED (inner0);
12908 bool unsigned1 = TYPE_UNSIGNED (inner1);
12910 if (TREE_CODE (op0) == INTEGER_CST)
12911 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12913 if (TREE_CODE (op1) == INTEGER_CST)
12914 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12916 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12917 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12919 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12920 ? tree_int_cst_min_precision (op0, UNSIGNED)
12921 : TYPE_PRECISION (inner0);
12923 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12924 ? tree_int_cst_min_precision (op1, UNSIGNED)
12925 : TYPE_PRECISION (inner1);
12927 return precision0 + precision1 < TYPE_PRECISION (type);
12930 return false;
12932 case BIT_AND_EXPR:
12933 case MAX_EXPR:
12934 return RECURSE (op0) || RECURSE (op1);
12936 case BIT_IOR_EXPR:
12937 case BIT_XOR_EXPR:
12938 case MIN_EXPR:
12939 case RDIV_EXPR:
12940 case TRUNC_DIV_EXPR:
12941 case CEIL_DIV_EXPR:
12942 case FLOOR_DIV_EXPR:
12943 case ROUND_DIV_EXPR:
12944 return RECURSE (op0) && RECURSE (op1);
12946 case TRUNC_MOD_EXPR:
12947 return RECURSE (op0);
12949 case FLOOR_MOD_EXPR:
12950 return RECURSE (op1);
12952 case CEIL_MOD_EXPR:
12953 case ROUND_MOD_EXPR:
12954 default:
12955 return tree_simple_nonnegative_warnv_p (code, type);
12958 /* We don't know sign of `t', so be conservative and return false. */
12959 return false;
12962 /* Return true if T is known to be non-negative. If the return
12963 value is based on the assumption that signed overflow is undefined,
12964 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12965 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12967 bool
12968 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12970 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12971 return true;
12973 switch (TREE_CODE (t))
12975 case INTEGER_CST:
12976 return tree_int_cst_sgn (t) >= 0;
12978 case REAL_CST:
12979 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12981 case FIXED_CST:
12982 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12984 case COND_EXPR:
12985 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12987 case SSA_NAME:
12988 /* Limit the depth of recursion to avoid quadratic behavior.
12989 This is expected to catch almost all occurrences in practice.
12990 If this code misses important cases that unbounded recursion
12991 would not, passes that need this information could be revised
12992 to provide it through dataflow propagation. */
12993 return (!name_registered_for_update_p (t)
12994 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12995 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12996 strict_overflow_p, depth));
12998 default:
12999 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13003 /* Return true if T is known to be non-negative. If the return
13004 value is based on the assumption that signed overflow is undefined,
13005 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13006 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13008 bool
13009 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13010 bool *strict_overflow_p, int depth)
13012 switch (fn)
13014 CASE_CFN_ACOS:
13015 CASE_CFN_ACOSH:
13016 CASE_CFN_CABS:
13017 CASE_CFN_COSH:
13018 CASE_CFN_ERFC:
13019 CASE_CFN_EXP:
13020 CASE_CFN_EXP10:
13021 CASE_CFN_EXP2:
13022 CASE_CFN_FABS:
13023 CASE_CFN_FDIM:
13024 CASE_CFN_HYPOT:
13025 CASE_CFN_POW10:
13026 CASE_CFN_FFS:
13027 CASE_CFN_PARITY:
13028 CASE_CFN_POPCOUNT:
13029 CASE_CFN_CLZ:
13030 CASE_CFN_CLRSB:
13031 case CFN_BUILT_IN_BSWAP32:
13032 case CFN_BUILT_IN_BSWAP64:
13033 /* Always true. */
13034 return true;
13036 CASE_CFN_SQRT:
13037 CASE_CFN_SQRT_FN:
13038 /* sqrt(-0.0) is -0.0. */
13039 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13040 return true;
13041 return RECURSE (arg0);
13043 CASE_CFN_ASINH:
13044 CASE_CFN_ATAN:
13045 CASE_CFN_ATANH:
13046 CASE_CFN_CBRT:
13047 CASE_CFN_CEIL:
13048 CASE_CFN_CEIL_FN:
13049 CASE_CFN_ERF:
13050 CASE_CFN_EXPM1:
13051 CASE_CFN_FLOOR:
13052 CASE_CFN_FLOOR_FN:
13053 CASE_CFN_FMOD:
13054 CASE_CFN_FREXP:
13055 CASE_CFN_ICEIL:
13056 CASE_CFN_IFLOOR:
13057 CASE_CFN_IRINT:
13058 CASE_CFN_IROUND:
13059 CASE_CFN_LCEIL:
13060 CASE_CFN_LDEXP:
13061 CASE_CFN_LFLOOR:
13062 CASE_CFN_LLCEIL:
13063 CASE_CFN_LLFLOOR:
13064 CASE_CFN_LLRINT:
13065 CASE_CFN_LLROUND:
13066 CASE_CFN_LRINT:
13067 CASE_CFN_LROUND:
13068 CASE_CFN_MODF:
13069 CASE_CFN_NEARBYINT:
13070 CASE_CFN_NEARBYINT_FN:
13071 CASE_CFN_RINT:
13072 CASE_CFN_RINT_FN:
13073 CASE_CFN_ROUND:
13074 CASE_CFN_ROUND_FN:
13075 CASE_CFN_SCALB:
13076 CASE_CFN_SCALBLN:
13077 CASE_CFN_SCALBN:
13078 CASE_CFN_SIGNBIT:
13079 CASE_CFN_SIGNIFICAND:
13080 CASE_CFN_SINH:
13081 CASE_CFN_TANH:
13082 CASE_CFN_TRUNC:
13083 CASE_CFN_TRUNC_FN:
13084 /* True if the 1st argument is nonnegative. */
13085 return RECURSE (arg0);
13087 CASE_CFN_FMAX:
13088 CASE_CFN_FMAX_FN:
13089 /* True if the 1st OR 2nd arguments are nonnegative. */
13090 return RECURSE (arg0) || RECURSE (arg1);
13092 CASE_CFN_FMIN:
13093 CASE_CFN_FMIN_FN:
13094 /* True if the 1st AND 2nd arguments are nonnegative. */
13095 return RECURSE (arg0) && RECURSE (arg1);
13097 CASE_CFN_COPYSIGN:
13098 CASE_CFN_COPYSIGN_FN:
13099 /* True if the 2nd argument is nonnegative. */
13100 return RECURSE (arg1);
13102 CASE_CFN_POWI:
13103 /* True if the 1st argument is nonnegative or the second
13104 argument is an even integer. */
13105 if (TREE_CODE (arg1) == INTEGER_CST
13106 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13107 return true;
13108 return RECURSE (arg0);
13110 CASE_CFN_POW:
13111 /* True if the 1st argument is nonnegative or the second
13112 argument is an even integer valued real. */
13113 if (TREE_CODE (arg1) == REAL_CST)
13115 REAL_VALUE_TYPE c;
13116 HOST_WIDE_INT n;
13118 c = TREE_REAL_CST (arg1);
13119 n = real_to_integer (&c);
13120 if ((n & 1) == 0)
13122 REAL_VALUE_TYPE cint;
13123 real_from_integer (&cint, VOIDmode, n, SIGNED);
13124 if (real_identical (&c, &cint))
13125 return true;
13128 return RECURSE (arg0);
13130 default:
13131 break;
13133 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13136 /* Return true if T is known to be non-negative. If the return
13137 value is based on the assumption that signed overflow is undefined,
13138 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13139 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13141 static bool
13142 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13144 enum tree_code code = TREE_CODE (t);
13145 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13146 return true;
13148 switch (code)
13150 case TARGET_EXPR:
13152 tree temp = TARGET_EXPR_SLOT (t);
13153 t = TARGET_EXPR_INITIAL (t);
13155 /* If the initializer is non-void, then it's a normal expression
13156 that will be assigned to the slot. */
13157 if (!VOID_TYPE_P (t))
13158 return RECURSE (t);
13160 /* Otherwise, the initializer sets the slot in some way. One common
13161 way is an assignment statement at the end of the initializer. */
13162 while (1)
13164 if (TREE_CODE (t) == BIND_EXPR)
13165 t = expr_last (BIND_EXPR_BODY (t));
13166 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13167 || TREE_CODE (t) == TRY_CATCH_EXPR)
13168 t = expr_last (TREE_OPERAND (t, 0));
13169 else if (TREE_CODE (t) == STATEMENT_LIST)
13170 t = expr_last (t);
13171 else
13172 break;
13174 if (TREE_CODE (t) == MODIFY_EXPR
13175 && TREE_OPERAND (t, 0) == temp)
13176 return RECURSE (TREE_OPERAND (t, 1));
13178 return false;
13181 case CALL_EXPR:
13183 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13184 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13186 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13187 get_call_combined_fn (t),
13188 arg0,
13189 arg1,
13190 strict_overflow_p, depth);
13192 case COMPOUND_EXPR:
13193 case MODIFY_EXPR:
13194 return RECURSE (TREE_OPERAND (t, 1));
13196 case BIND_EXPR:
13197 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13199 case SAVE_EXPR:
13200 return RECURSE (TREE_OPERAND (t, 0));
13202 default:
13203 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13207 #undef RECURSE
13208 #undef tree_expr_nonnegative_warnv_p
13210 /* Return true if T is known to be non-negative. If the return
13211 value is based on the assumption that signed overflow is undefined,
13212 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13213 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13215 bool
13216 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13218 enum tree_code code;
13219 if (t == error_mark_node)
13220 return false;
13222 code = TREE_CODE (t);
13223 switch (TREE_CODE_CLASS (code))
13225 case tcc_binary:
13226 case tcc_comparison:
13227 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13228 TREE_TYPE (t),
13229 TREE_OPERAND (t, 0),
13230 TREE_OPERAND (t, 1),
13231 strict_overflow_p, depth);
13233 case tcc_unary:
13234 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13235 TREE_TYPE (t),
13236 TREE_OPERAND (t, 0),
13237 strict_overflow_p, depth);
13239 case tcc_constant:
13240 case tcc_declaration:
13241 case tcc_reference:
13242 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13244 default:
13245 break;
13248 switch (code)
13250 case TRUTH_AND_EXPR:
13251 case TRUTH_OR_EXPR:
13252 case TRUTH_XOR_EXPR:
13253 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13254 TREE_TYPE (t),
13255 TREE_OPERAND (t, 0),
13256 TREE_OPERAND (t, 1),
13257 strict_overflow_p, depth);
13258 case TRUTH_NOT_EXPR:
13259 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13260 TREE_TYPE (t),
13261 TREE_OPERAND (t, 0),
13262 strict_overflow_p, depth);
13264 case COND_EXPR:
13265 case CONSTRUCTOR:
13266 case OBJ_TYPE_REF:
13267 case ASSERT_EXPR:
13268 case ADDR_EXPR:
13269 case WITH_SIZE_EXPR:
13270 case SSA_NAME:
13271 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13273 default:
13274 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13278 /* Return true if `t' is known to be non-negative. Handle warnings
13279 about undefined signed overflow. */
13281 bool
13282 tree_expr_nonnegative_p (tree t)
13284 bool ret, strict_overflow_p;
13286 strict_overflow_p = false;
13287 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13288 if (strict_overflow_p)
13289 fold_overflow_warning (("assuming signed overflow does not occur when "
13290 "determining that expression is always "
13291 "non-negative"),
13292 WARN_STRICT_OVERFLOW_MISC);
13293 return ret;
13297 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13298 For floating point we further ensure that T is not denormal.
13299 Similar logic is present in nonzero_address in rtlanal.h.
13301 If the return value is based on the assumption that signed overflow
13302 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13303 change *STRICT_OVERFLOW_P. */
13305 bool
13306 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13307 bool *strict_overflow_p)
13309 switch (code)
13311 case ABS_EXPR:
13312 return tree_expr_nonzero_warnv_p (op0,
13313 strict_overflow_p);
13315 case NOP_EXPR:
13317 tree inner_type = TREE_TYPE (op0);
13318 tree outer_type = type;
13320 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13321 && tree_expr_nonzero_warnv_p (op0,
13322 strict_overflow_p));
13324 break;
13326 case NON_LVALUE_EXPR:
13327 return tree_expr_nonzero_warnv_p (op0,
13328 strict_overflow_p);
13330 default:
13331 break;
13334 return false;
13337 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13338 For floating point we further ensure that T is not denormal.
13339 Similar logic is present in nonzero_address in rtlanal.h.
13341 If the return value is based on the assumption that signed overflow
13342 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13343 change *STRICT_OVERFLOW_P. */
13345 bool
13346 tree_binary_nonzero_warnv_p (enum tree_code code,
13347 tree type,
13348 tree op0,
13349 tree op1, bool *strict_overflow_p)
13351 bool sub_strict_overflow_p;
13352 switch (code)
13354 case POINTER_PLUS_EXPR:
13355 case PLUS_EXPR:
13356 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13358 /* With the presence of negative values it is hard
13359 to say something. */
13360 sub_strict_overflow_p = false;
13361 if (!tree_expr_nonnegative_warnv_p (op0,
13362 &sub_strict_overflow_p)
13363 || !tree_expr_nonnegative_warnv_p (op1,
13364 &sub_strict_overflow_p))
13365 return false;
13366 /* One of operands must be positive and the other non-negative. */
13367 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13368 overflows, on a twos-complement machine the sum of two
13369 nonnegative numbers can never be zero. */
13370 return (tree_expr_nonzero_warnv_p (op0,
13371 strict_overflow_p)
13372 || tree_expr_nonzero_warnv_p (op1,
13373 strict_overflow_p));
13375 break;
13377 case MULT_EXPR:
13378 if (TYPE_OVERFLOW_UNDEFINED (type))
13380 if (tree_expr_nonzero_warnv_p (op0,
13381 strict_overflow_p)
13382 && tree_expr_nonzero_warnv_p (op1,
13383 strict_overflow_p))
13385 *strict_overflow_p = true;
13386 return true;
13389 break;
13391 case MIN_EXPR:
13392 sub_strict_overflow_p = false;
13393 if (tree_expr_nonzero_warnv_p (op0,
13394 &sub_strict_overflow_p)
13395 && tree_expr_nonzero_warnv_p (op1,
13396 &sub_strict_overflow_p))
13398 if (sub_strict_overflow_p)
13399 *strict_overflow_p = true;
13401 break;
13403 case MAX_EXPR:
13404 sub_strict_overflow_p = false;
13405 if (tree_expr_nonzero_warnv_p (op0,
13406 &sub_strict_overflow_p))
13408 if (sub_strict_overflow_p)
13409 *strict_overflow_p = true;
13411 /* When both operands are nonzero, then MAX must be too. */
13412 if (tree_expr_nonzero_warnv_p (op1,
13413 strict_overflow_p))
13414 return true;
13416 /* MAX where operand 0 is positive is positive. */
13417 return tree_expr_nonnegative_warnv_p (op0,
13418 strict_overflow_p);
13420 /* MAX where operand 1 is positive is positive. */
13421 else if (tree_expr_nonzero_warnv_p (op1,
13422 &sub_strict_overflow_p)
13423 && tree_expr_nonnegative_warnv_p (op1,
13424 &sub_strict_overflow_p))
13426 if (sub_strict_overflow_p)
13427 *strict_overflow_p = true;
13428 return true;
13430 break;
13432 case BIT_IOR_EXPR:
13433 return (tree_expr_nonzero_warnv_p (op1,
13434 strict_overflow_p)
13435 || tree_expr_nonzero_warnv_p (op0,
13436 strict_overflow_p));
13438 default:
13439 break;
13442 return false;
13445 /* Return true when T is an address and is known to be nonzero.
13446 For floating point we further ensure that T is not denormal.
13447 Similar logic is present in nonzero_address in rtlanal.h.
13449 If the return value is based on the assumption that signed overflow
13450 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13451 change *STRICT_OVERFLOW_P. */
13453 bool
13454 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13456 bool sub_strict_overflow_p;
13457 switch (TREE_CODE (t))
13459 case INTEGER_CST:
13460 return !integer_zerop (t);
13462 case ADDR_EXPR:
13464 tree base = TREE_OPERAND (t, 0);
13466 if (!DECL_P (base))
13467 base = get_base_address (base);
13469 if (base && TREE_CODE (base) == TARGET_EXPR)
13470 base = TARGET_EXPR_SLOT (base);
13472 if (!base)
13473 return false;
13475 /* For objects in symbol table check if we know they are non-zero.
13476 Don't do anything for variables and functions before symtab is built;
13477 it is quite possible that they will be declared weak later. */
13478 int nonzero_addr = maybe_nonzero_address (base);
13479 if (nonzero_addr >= 0)
13480 return nonzero_addr;
13482 /* Constants are never weak. */
13483 if (CONSTANT_CLASS_P (base))
13484 return true;
13486 return false;
13489 case COND_EXPR:
13490 sub_strict_overflow_p = false;
13491 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13492 &sub_strict_overflow_p)
13493 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13494 &sub_strict_overflow_p))
13496 if (sub_strict_overflow_p)
13497 *strict_overflow_p = true;
13498 return true;
13500 break;
13502 case SSA_NAME:
13503 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13504 break;
13505 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13507 default:
13508 break;
13510 return false;
13513 #define integer_valued_real_p(X) \
13514 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13516 #define RECURSE(X) \
13517 ((integer_valued_real_p) (X, depth + 1))
13519 /* Return true if the floating point result of (CODE OP0) has an
13520 integer value. We also allow +Inf, -Inf and NaN to be considered
13521 integer values. Return false for signaling NaN.
13523 DEPTH is the current nesting depth of the query. */
13525 bool
13526 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13528 switch (code)
13530 case FLOAT_EXPR:
13531 return true;
13533 case ABS_EXPR:
13534 return RECURSE (op0);
13536 CASE_CONVERT:
13538 tree type = TREE_TYPE (op0);
13539 if (TREE_CODE (type) == INTEGER_TYPE)
13540 return true;
13541 if (TREE_CODE (type) == REAL_TYPE)
13542 return RECURSE (op0);
13543 break;
13546 default:
13547 break;
13549 return false;
13552 /* Return true if the floating point result of (CODE OP0 OP1) has an
13553 integer value. We also allow +Inf, -Inf and NaN to be considered
13554 integer values. Return false for signaling NaN.
13556 DEPTH is the current nesting depth of the query. */
13558 bool
13559 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13561 switch (code)
13563 case PLUS_EXPR:
13564 case MINUS_EXPR:
13565 case MULT_EXPR:
13566 case MIN_EXPR:
13567 case MAX_EXPR:
13568 return RECURSE (op0) && RECURSE (op1);
13570 default:
13571 break;
13573 return false;
13576 /* Return true if the floating point result of calling FNDECL with arguments
13577 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13578 considered integer values. Return false for signaling NaN. If FNDECL
13579 takes fewer than 2 arguments, the remaining ARGn are null.
13581 DEPTH is the current nesting depth of the query. */
13583 bool
13584 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13586 switch (fn)
13588 CASE_CFN_CEIL:
13589 CASE_CFN_CEIL_FN:
13590 CASE_CFN_FLOOR:
13591 CASE_CFN_FLOOR_FN:
13592 CASE_CFN_NEARBYINT:
13593 CASE_CFN_NEARBYINT_FN:
13594 CASE_CFN_RINT:
13595 CASE_CFN_RINT_FN:
13596 CASE_CFN_ROUND:
13597 CASE_CFN_ROUND_FN:
13598 CASE_CFN_TRUNC:
13599 CASE_CFN_TRUNC_FN:
13600 return true;
13602 CASE_CFN_FMIN:
13603 CASE_CFN_FMIN_FN:
13604 CASE_CFN_FMAX:
13605 CASE_CFN_FMAX_FN:
13606 return RECURSE (arg0) && RECURSE (arg1);
13608 default:
13609 break;
13611 return false;
13614 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13615 has an integer value. We also allow +Inf, -Inf and NaN to be
13616 considered integer values. Return false for signaling NaN.
13618 DEPTH is the current nesting depth of the query. */
13620 bool
13621 integer_valued_real_single_p (tree t, int depth)
13623 switch (TREE_CODE (t))
13625 case REAL_CST:
13626 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13628 case COND_EXPR:
13629 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13631 case SSA_NAME:
13632 /* Limit the depth of recursion to avoid quadratic behavior.
13633 This is expected to catch almost all occurrences in practice.
13634 If this code misses important cases that unbounded recursion
13635 would not, passes that need this information could be revised
13636 to provide it through dataflow propagation. */
13637 return (!name_registered_for_update_p (t)
13638 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13639 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13640 depth));
13642 default:
13643 break;
13645 return false;
13648 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13649 has an integer value. We also allow +Inf, -Inf and NaN to be
13650 considered integer values. Return false for signaling NaN.
13652 DEPTH is the current nesting depth of the query. */
13654 static bool
13655 integer_valued_real_invalid_p (tree t, int depth)
13657 switch (TREE_CODE (t))
13659 case COMPOUND_EXPR:
13660 case MODIFY_EXPR:
13661 case BIND_EXPR:
13662 return RECURSE (TREE_OPERAND (t, 1));
13664 case SAVE_EXPR:
13665 return RECURSE (TREE_OPERAND (t, 0));
13667 default:
13668 break;
13670 return false;
13673 #undef RECURSE
13674 #undef integer_valued_real_p
13676 /* Return true if the floating point expression T has an integer value.
13677 We also allow +Inf, -Inf and NaN to be considered integer values.
13678 Return false for signaling NaN.
13680 DEPTH is the current nesting depth of the query. */
13682 bool
13683 integer_valued_real_p (tree t, int depth)
13685 if (t == error_mark_node)
13686 return false;
13688 tree_code code = TREE_CODE (t);
13689 switch (TREE_CODE_CLASS (code))
13691 case tcc_binary:
13692 case tcc_comparison:
13693 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13694 TREE_OPERAND (t, 1), depth);
13696 case tcc_unary:
13697 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13699 case tcc_constant:
13700 case tcc_declaration:
13701 case tcc_reference:
13702 return integer_valued_real_single_p (t, depth);
13704 default:
13705 break;
13708 switch (code)
13710 case COND_EXPR:
13711 case SSA_NAME:
13712 return integer_valued_real_single_p (t, depth);
13714 case CALL_EXPR:
13716 tree arg0 = (call_expr_nargs (t) > 0
13717 ? CALL_EXPR_ARG (t, 0)
13718 : NULL_TREE);
13719 tree arg1 = (call_expr_nargs (t) > 1
13720 ? CALL_EXPR_ARG (t, 1)
13721 : NULL_TREE);
13722 return integer_valued_real_call_p (get_call_combined_fn (t),
13723 arg0, arg1, depth);
13726 default:
13727 return integer_valued_real_invalid_p (t, depth);
13731 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13732 attempt to fold the expression to a constant without modifying TYPE,
13733 OP0 or OP1.
13735 If the expression could be simplified to a constant, then return
13736 the constant. If the expression would not be simplified to a
13737 constant, then return NULL_TREE. */
13739 tree
13740 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13742 tree tem = fold_binary (code, type, op0, op1);
13743 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13746 /* Given the components of a unary expression CODE, TYPE and OP0,
13747 attempt to fold the expression to a constant without modifying
13748 TYPE or OP0.
13750 If the expression could be simplified to a constant, then return
13751 the constant. If the expression would not be simplified to a
13752 constant, then return NULL_TREE. */
13754 tree
13755 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13757 tree tem = fold_unary (code, type, op0);
13758 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13761 /* If EXP represents referencing an element in a constant string
13762 (either via pointer arithmetic or array indexing), return the
13763 tree representing the value accessed, otherwise return NULL. */
13765 tree
13766 fold_read_from_constant_string (tree exp)
13768 if ((TREE_CODE (exp) == INDIRECT_REF
13769 || TREE_CODE (exp) == ARRAY_REF)
13770 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13772 tree exp1 = TREE_OPERAND (exp, 0);
13773 tree index;
13774 tree string;
13775 location_t loc = EXPR_LOCATION (exp);
13777 if (TREE_CODE (exp) == INDIRECT_REF)
13778 string = string_constant (exp1, &index);
13779 else
13781 tree low_bound = array_ref_low_bound (exp);
13782 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13784 /* Optimize the special-case of a zero lower bound.
13786 We convert the low_bound to sizetype to avoid some problems
13787 with constant folding. (E.g. suppose the lower bound is 1,
13788 and its mode is QI. Without the conversion,l (ARRAY
13789 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13790 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13791 if (! integer_zerop (low_bound))
13792 index = size_diffop_loc (loc, index,
13793 fold_convert_loc (loc, sizetype, low_bound));
13795 string = exp1;
13798 scalar_int_mode char_mode;
13799 if (string
13800 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13801 && TREE_CODE (string) == STRING_CST
13802 && TREE_CODE (index) == INTEGER_CST
13803 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13804 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13805 &char_mode)
13806 && GET_MODE_SIZE (char_mode) == 1)
13807 return build_int_cst_type (TREE_TYPE (exp),
13808 (TREE_STRING_POINTER (string)
13809 [TREE_INT_CST_LOW (index)]));
13811 return NULL;
13814 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13815 an integer constant, real, or fixed-point constant.
13817 TYPE is the type of the result. */
13819 static tree
13820 fold_negate_const (tree arg0, tree type)
13822 tree t = NULL_TREE;
13824 switch (TREE_CODE (arg0))
13826 case REAL_CST:
13827 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13828 break;
13830 case FIXED_CST:
13832 FIXED_VALUE_TYPE f;
13833 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13834 &(TREE_FIXED_CST (arg0)), NULL,
13835 TYPE_SATURATING (type));
13836 t = build_fixed (type, f);
13837 /* Propagate overflow flags. */
13838 if (overflow_p | TREE_OVERFLOW (arg0))
13839 TREE_OVERFLOW (t) = 1;
13840 break;
13843 default:
13844 if (poly_int_tree_p (arg0))
13846 wi::overflow_type overflow;
13847 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
13848 t = force_fit_type (type, res, 1,
13849 (overflow && ! TYPE_UNSIGNED (type))
13850 || TREE_OVERFLOW (arg0));
13851 break;
13854 gcc_unreachable ();
13857 return t;
13860 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13861 an integer constant or real constant.
13863 TYPE is the type of the result. */
13865 tree
13866 fold_abs_const (tree arg0, tree type)
13868 tree t = NULL_TREE;
13870 switch (TREE_CODE (arg0))
13872 case INTEGER_CST:
13874 /* If the value is unsigned or non-negative, then the absolute value
13875 is the same as the ordinary value. */
13876 wide_int val = wi::to_wide (arg0);
13877 wi::overflow_type overflow = wi::OVF_NONE;
13878 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
13881 /* If the value is negative, then the absolute value is
13882 its negation. */
13883 else
13884 val = wi::neg (val, &overflow);
13886 /* Force to the destination type, set TREE_OVERFLOW for signed
13887 TYPE only. */
13888 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
13890 break;
13892 case REAL_CST:
13893 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13894 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13895 else
13896 t = arg0;
13897 break;
13899 default:
13900 gcc_unreachable ();
13903 return t;
13906 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13907 constant. TYPE is the type of the result. */
13909 static tree
13910 fold_not_const (const_tree arg0, tree type)
13912 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13914 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13917 /* Given CODE, a relational operator, the target type, TYPE and two
13918 constant operands OP0 and OP1, return the result of the
13919 relational operation. If the result is not a compile time
13920 constant, then return NULL_TREE. */
13922 static tree
13923 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13925 int result, invert;
13927 /* From here on, the only cases we handle are when the result is
13928 known to be a constant. */
13930 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13932 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13933 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13935 /* Handle the cases where either operand is a NaN. */
13936 if (real_isnan (c0) || real_isnan (c1))
13938 switch (code)
13940 case EQ_EXPR:
13941 case ORDERED_EXPR:
13942 result = 0;
13943 break;
13945 case NE_EXPR:
13946 case UNORDERED_EXPR:
13947 case UNLT_EXPR:
13948 case UNLE_EXPR:
13949 case UNGT_EXPR:
13950 case UNGE_EXPR:
13951 case UNEQ_EXPR:
13952 result = 1;
13953 break;
13955 case LT_EXPR:
13956 case LE_EXPR:
13957 case GT_EXPR:
13958 case GE_EXPR:
13959 case LTGT_EXPR:
13960 if (flag_trapping_math)
13961 return NULL_TREE;
13962 result = 0;
13963 break;
13965 default:
13966 gcc_unreachable ();
13969 return constant_boolean_node (result, type);
13972 return constant_boolean_node (real_compare (code, c0, c1), type);
13975 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13977 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13978 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13979 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13982 /* Handle equality/inequality of complex constants. */
13983 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13985 tree rcond = fold_relational_const (code, type,
13986 TREE_REALPART (op0),
13987 TREE_REALPART (op1));
13988 tree icond = fold_relational_const (code, type,
13989 TREE_IMAGPART (op0),
13990 TREE_IMAGPART (op1));
13991 if (code == EQ_EXPR)
13992 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13993 else if (code == NE_EXPR)
13994 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13995 else
13996 return NULL_TREE;
13999 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14001 if (!VECTOR_TYPE_P (type))
14003 /* Have vector comparison with scalar boolean result. */
14004 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14005 && known_eq (VECTOR_CST_NELTS (op0),
14006 VECTOR_CST_NELTS (op1)));
14007 unsigned HOST_WIDE_INT nunits;
14008 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14009 return NULL_TREE;
14010 for (unsigned i = 0; i < nunits; i++)
14012 tree elem0 = VECTOR_CST_ELT (op0, i);
14013 tree elem1 = VECTOR_CST_ELT (op1, i);
14014 tree tmp = fold_relational_const (code, type, elem0, elem1);
14015 if (tmp == NULL_TREE)
14016 return NULL_TREE;
14017 if (integer_zerop (tmp))
14018 return constant_boolean_node (false, type);
14020 return constant_boolean_node (true, type);
14022 tree_vector_builder elts;
14023 if (!elts.new_binary_operation (type, op0, op1, false))
14024 return NULL_TREE;
14025 unsigned int count = elts.encoded_nelts ();
14026 for (unsigned i = 0; i < count; i++)
14028 tree elem_type = TREE_TYPE (type);
14029 tree elem0 = VECTOR_CST_ELT (op0, i);
14030 tree elem1 = VECTOR_CST_ELT (op1, i);
14032 tree tem = fold_relational_const (code, elem_type,
14033 elem0, elem1);
14035 if (tem == NULL_TREE)
14036 return NULL_TREE;
14038 elts.quick_push (build_int_cst (elem_type,
14039 integer_zerop (tem) ? 0 : -1));
14042 return elts.build ();
14045 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14047 To compute GT, swap the arguments and do LT.
14048 To compute GE, do LT and invert the result.
14049 To compute LE, swap the arguments, do LT and invert the result.
14050 To compute NE, do EQ and invert the result.
14052 Therefore, the code below must handle only EQ and LT. */
14054 if (code == LE_EXPR || code == GT_EXPR)
14056 std::swap (op0, op1);
14057 code = swap_tree_comparison (code);
14060 /* Note that it is safe to invert for real values here because we
14061 have already handled the one case that it matters. */
14063 invert = 0;
14064 if (code == NE_EXPR || code == GE_EXPR)
14066 invert = 1;
14067 code = invert_tree_comparison (code, false);
14070 /* Compute a result for LT or EQ if args permit;
14071 Otherwise return T. */
14072 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14074 if (code == EQ_EXPR)
14075 result = tree_int_cst_equal (op0, op1);
14076 else
14077 result = tree_int_cst_lt (op0, op1);
14079 else
14080 return NULL_TREE;
14082 if (invert)
14083 result ^= 1;
14084 return constant_boolean_node (result, type);
14087 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14088 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14089 itself. */
14091 tree
14092 fold_build_cleanup_point_expr (tree type, tree expr)
14094 /* If the expression does not have side effects then we don't have to wrap
14095 it with a cleanup point expression. */
14096 if (!TREE_SIDE_EFFECTS (expr))
14097 return expr;
14099 /* If the expression is a return, check to see if the expression inside the
14100 return has no side effects or the right hand side of the modify expression
14101 inside the return. If either don't have side effects set we don't need to
14102 wrap the expression in a cleanup point expression. Note we don't check the
14103 left hand side of the modify because it should always be a return decl. */
14104 if (TREE_CODE (expr) == RETURN_EXPR)
14106 tree op = TREE_OPERAND (expr, 0);
14107 if (!op || !TREE_SIDE_EFFECTS (op))
14108 return expr;
14109 op = TREE_OPERAND (op, 1);
14110 if (!TREE_SIDE_EFFECTS (op))
14111 return expr;
14114 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14117 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14118 of an indirection through OP0, or NULL_TREE if no simplification is
14119 possible. */
14121 tree
14122 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14124 tree sub = op0;
14125 tree subtype;
14126 poly_uint64 const_op01;
14128 STRIP_NOPS (sub);
14129 subtype = TREE_TYPE (sub);
14130 if (!POINTER_TYPE_P (subtype)
14131 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14132 return NULL_TREE;
14134 if (TREE_CODE (sub) == ADDR_EXPR)
14136 tree op = TREE_OPERAND (sub, 0);
14137 tree optype = TREE_TYPE (op);
14139 /* *&CONST_DECL -> to the value of the const decl. */
14140 if (TREE_CODE (op) == CONST_DECL)
14141 return DECL_INITIAL (op);
14142 /* *&p => p; make sure to handle *&"str"[cst] here. */
14143 if (type == optype)
14145 tree fop = fold_read_from_constant_string (op);
14146 if (fop)
14147 return fop;
14148 else
14149 return op;
14151 /* *(foo *)&fooarray => fooarray[0] */
14152 else if (TREE_CODE (optype) == ARRAY_TYPE
14153 && type == TREE_TYPE (optype)
14154 && (!in_gimple_form
14155 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14157 tree type_domain = TYPE_DOMAIN (optype);
14158 tree min_val = size_zero_node;
14159 if (type_domain && TYPE_MIN_VALUE (type_domain))
14160 min_val = TYPE_MIN_VALUE (type_domain);
14161 if (in_gimple_form
14162 && TREE_CODE (min_val) != INTEGER_CST)
14163 return NULL_TREE;
14164 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14165 NULL_TREE, NULL_TREE);
14167 /* *(foo *)&complexfoo => __real__ complexfoo */
14168 else if (TREE_CODE (optype) == COMPLEX_TYPE
14169 && type == TREE_TYPE (optype))
14170 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14171 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14172 else if (VECTOR_TYPE_P (optype)
14173 && type == TREE_TYPE (optype))
14175 tree part_width = TYPE_SIZE (type);
14176 tree index = bitsize_int (0);
14177 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14178 index);
14182 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14183 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14185 tree op00 = TREE_OPERAND (sub, 0);
14186 tree op01 = TREE_OPERAND (sub, 1);
14188 STRIP_NOPS (op00);
14189 if (TREE_CODE (op00) == ADDR_EXPR)
14191 tree op00type;
14192 op00 = TREE_OPERAND (op00, 0);
14193 op00type = TREE_TYPE (op00);
14195 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14196 if (VECTOR_TYPE_P (op00type)
14197 && type == TREE_TYPE (op00type)
14198 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14199 but we want to treat offsets with MSB set as negative.
14200 For the code below negative offsets are invalid and
14201 TYPE_SIZE of the element is something unsigned, so
14202 check whether op01 fits into poly_int64, which implies
14203 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14204 then just use poly_uint64 because we want to treat the
14205 value as unsigned. */
14206 && tree_fits_poly_int64_p (op01))
14208 tree part_width = TYPE_SIZE (type);
14209 poly_uint64 max_offset
14210 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14211 * TYPE_VECTOR_SUBPARTS (op00type));
14212 if (known_lt (const_op01, max_offset))
14214 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14215 return fold_build3_loc (loc,
14216 BIT_FIELD_REF, type, op00,
14217 part_width, index);
14220 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14221 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14222 && type == TREE_TYPE (op00type))
14224 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14225 const_op01))
14226 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14228 /* ((foo *)&fooarray)[1] => fooarray[1] */
14229 else if (TREE_CODE (op00type) == ARRAY_TYPE
14230 && type == TREE_TYPE (op00type))
14232 tree type_domain = TYPE_DOMAIN (op00type);
14233 tree min_val = size_zero_node;
14234 if (type_domain && TYPE_MIN_VALUE (type_domain))
14235 min_val = TYPE_MIN_VALUE (type_domain);
14236 poly_uint64 type_size, index;
14237 if (poly_int_tree_p (min_val)
14238 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
14239 && multiple_p (const_op01, type_size, &index))
14241 poly_offset_int off = index + wi::to_poly_offset (min_val);
14242 op01 = wide_int_to_tree (sizetype, off);
14243 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14244 NULL_TREE, NULL_TREE);
14250 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14251 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14252 && type == TREE_TYPE (TREE_TYPE (subtype))
14253 && (!in_gimple_form
14254 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14256 tree type_domain;
14257 tree min_val = size_zero_node;
14258 sub = build_fold_indirect_ref_loc (loc, sub);
14259 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14260 if (type_domain && TYPE_MIN_VALUE (type_domain))
14261 min_val = TYPE_MIN_VALUE (type_domain);
14262 if (in_gimple_form
14263 && TREE_CODE (min_val) != INTEGER_CST)
14264 return NULL_TREE;
14265 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14266 NULL_TREE);
14269 return NULL_TREE;
14272 /* Builds an expression for an indirection through T, simplifying some
14273 cases. */
14275 tree
14276 build_fold_indirect_ref_loc (location_t loc, tree t)
14278 tree type = TREE_TYPE (TREE_TYPE (t));
14279 tree sub = fold_indirect_ref_1 (loc, type, t);
14281 if (sub)
14282 return sub;
14284 return build1_loc (loc, INDIRECT_REF, type, t);
14287 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14289 tree
14290 fold_indirect_ref_loc (location_t loc, tree t)
14292 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14294 if (sub)
14295 return sub;
14296 else
14297 return t;
14300 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14301 whose result is ignored. The type of the returned tree need not be
14302 the same as the original expression. */
14304 tree
14305 fold_ignored_result (tree t)
14307 if (!TREE_SIDE_EFFECTS (t))
14308 return integer_zero_node;
14310 for (;;)
14311 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14313 case tcc_unary:
14314 t = TREE_OPERAND (t, 0);
14315 break;
14317 case tcc_binary:
14318 case tcc_comparison:
14319 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14320 t = TREE_OPERAND (t, 0);
14321 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14322 t = TREE_OPERAND (t, 1);
14323 else
14324 return t;
14325 break;
14327 case tcc_expression:
14328 switch (TREE_CODE (t))
14330 case COMPOUND_EXPR:
14331 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14332 return t;
14333 t = TREE_OPERAND (t, 0);
14334 break;
14336 case COND_EXPR:
14337 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14338 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14339 return t;
14340 t = TREE_OPERAND (t, 0);
14341 break;
14343 default:
14344 return t;
14346 break;
14348 default:
14349 return t;
14353 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14355 tree
14356 round_up_loc (location_t loc, tree value, unsigned int divisor)
14358 tree div = NULL_TREE;
14360 if (divisor == 1)
14361 return value;
14363 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14364 have to do anything. Only do this when we are not given a const,
14365 because in that case, this check is more expensive than just
14366 doing it. */
14367 if (TREE_CODE (value) != INTEGER_CST)
14369 div = build_int_cst (TREE_TYPE (value), divisor);
14371 if (multiple_of_p (TREE_TYPE (value), value, div))
14372 return value;
14375 /* If divisor is a power of two, simplify this to bit manipulation. */
14376 if (pow2_or_zerop (divisor))
14378 if (TREE_CODE (value) == INTEGER_CST)
14380 wide_int val = wi::to_wide (value);
14381 bool overflow_p;
14383 if ((val & (divisor - 1)) == 0)
14384 return value;
14386 overflow_p = TREE_OVERFLOW (value);
14387 val += divisor - 1;
14388 val &= (int) -divisor;
14389 if (val == 0)
14390 overflow_p = true;
14392 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14394 else
14396 tree t;
14398 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14399 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14400 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14401 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14404 else
14406 if (!div)
14407 div = build_int_cst (TREE_TYPE (value), divisor);
14408 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14409 value = size_binop_loc (loc, MULT_EXPR, value, div);
14412 return value;
14415 /* Likewise, but round down. */
14417 tree
14418 round_down_loc (location_t loc, tree value, int divisor)
14420 tree div = NULL_TREE;
14422 gcc_assert (divisor > 0);
14423 if (divisor == 1)
14424 return value;
14426 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14427 have to do anything. Only do this when we are not given a const,
14428 because in that case, this check is more expensive than just
14429 doing it. */
14430 if (TREE_CODE (value) != INTEGER_CST)
14432 div = build_int_cst (TREE_TYPE (value), divisor);
14434 if (multiple_of_p (TREE_TYPE (value), value, div))
14435 return value;
14438 /* If divisor is a power of two, simplify this to bit manipulation. */
14439 if (pow2_or_zerop (divisor))
14441 tree t;
14443 t = build_int_cst (TREE_TYPE (value), -divisor);
14444 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14446 else
14448 if (!div)
14449 div = build_int_cst (TREE_TYPE (value), divisor);
14450 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14451 value = size_binop_loc (loc, MULT_EXPR, value, div);
14454 return value;
14457 /* Returns the pointer to the base of the object addressed by EXP and
14458 extracts the information about the offset of the access, storing it
14459 to PBITPOS and POFFSET. */
14461 static tree
14462 split_address_to_core_and_offset (tree exp,
14463 poly_int64_pod *pbitpos, tree *poffset)
14465 tree core;
14466 machine_mode mode;
14467 int unsignedp, reversep, volatilep;
14468 poly_int64 bitsize;
14469 location_t loc = EXPR_LOCATION (exp);
14471 if (TREE_CODE (exp) == ADDR_EXPR)
14473 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14474 poffset, &mode, &unsignedp, &reversep,
14475 &volatilep);
14476 core = build_fold_addr_expr_loc (loc, core);
14478 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14480 core = TREE_OPERAND (exp, 0);
14481 STRIP_NOPS (core);
14482 *pbitpos = 0;
14483 *poffset = TREE_OPERAND (exp, 1);
14484 if (poly_int_tree_p (*poffset))
14486 poly_offset_int tem
14487 = wi::sext (wi::to_poly_offset (*poffset),
14488 TYPE_PRECISION (TREE_TYPE (*poffset)));
14489 tem <<= LOG2_BITS_PER_UNIT;
14490 if (tem.to_shwi (pbitpos))
14491 *poffset = NULL_TREE;
14494 else
14496 core = exp;
14497 *pbitpos = 0;
14498 *poffset = NULL_TREE;
14501 return core;
14504 /* Returns true if addresses of E1 and E2 differ by a constant, false
14505 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14507 bool
14508 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14510 tree core1, core2;
14511 poly_int64 bitpos1, bitpos2;
14512 tree toffset1, toffset2, tdiff, type;
14514 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14515 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14517 poly_int64 bytepos1, bytepos2;
14518 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14519 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14520 || !operand_equal_p (core1, core2, 0))
14521 return false;
14523 if (toffset1 && toffset2)
14525 type = TREE_TYPE (toffset1);
14526 if (type != TREE_TYPE (toffset2))
14527 toffset2 = fold_convert (type, toffset2);
14529 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14530 if (!cst_and_fits_in_hwi (tdiff))
14531 return false;
14533 *diff = int_cst_value (tdiff);
14535 else if (toffset1 || toffset2)
14537 /* If only one of the offsets is non-constant, the difference cannot
14538 be a constant. */
14539 return false;
14541 else
14542 *diff = 0;
14544 *diff += bytepos1 - bytepos2;
14545 return true;
14548 /* Return OFF converted to a pointer offset type suitable as offset for
14549 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14550 tree
14551 convert_to_ptrofftype_loc (location_t loc, tree off)
14553 return fold_convert_loc (loc, sizetype, off);
14556 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14557 tree
14558 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14560 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14561 ptr, convert_to_ptrofftype_loc (loc, off));
14564 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14565 tree
14566 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14568 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14569 ptr, size_int (off));
14572 /* Return a pointer P to a NUL-terminated string representing the sequence
14573 of constant characters referred to by SRC (or a subsequence of such
14574 characters within it if SRC is a reference to a string plus some
14575 constant offset). If STRLEN is non-null, store stgrlen(P) in *STRLEN.
14576 If STRSIZE is non-null, store in *STRSIZE the size of the array
14577 the string is stored in; in that case, even though P points to a NUL
14578 terminated string, SRC need not refer to one. This can happen when
14579 SRC refers to a constant character array initialized to all non-NUL
14580 values, as in the C declaration: char a[4] = "1234"; */
14582 const char *
14583 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */,
14584 unsigned HOST_WIDE_INT *strsize /* = NULL */)
14586 tree offset_node;
14588 if (strlen)
14589 *strlen = 0;
14591 src = string_constant (src, &offset_node);
14592 if (src == 0)
14593 return NULL;
14595 unsigned HOST_WIDE_INT offset = 0;
14596 if (offset_node != NULL_TREE)
14598 if (!tree_fits_uhwi_p (offset_node))
14599 return NULL;
14600 else
14601 offset = tree_to_uhwi (offset_node);
14604 /* STRING_LENGTH is the size of the string literal, including any
14605 embedded NULs. STRING_SIZE is the size of the array the string
14606 literal is stored in. */
14607 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14608 unsigned HOST_WIDE_INT string_size = string_length;
14609 tree type = TREE_TYPE (src);
14610 if (tree size = TYPE_SIZE_UNIT (type))
14611 if (tree_fits_shwi_p (size))
14612 string_size = tree_to_uhwi (size);
14614 if (strlen)
14616 /* Compute and store the length of the substring at OFFSET.
14617 All offsets past the initial length refer to null strings. */
14618 if (offset <= string_length)
14619 *strlen = string_length - offset;
14620 else
14621 *strlen = 0;
14624 const char *string = TREE_STRING_POINTER (src);
14626 if (string_length == 0
14627 || offset >= string_size)
14628 return NULL;
14630 if (strsize)
14632 /* Support even constant character arrays that aren't proper
14633 NUL-terminated strings. */
14634 *strsize = string_size;
14636 else if (string[string_length - 1] != '\0')
14638 /* Support only properly NUL-terminated strings but handle
14639 consecutive strings within the same array, such as the six
14640 substrings in "1\0002\0003". */
14641 return NULL;
14644 return offset <= string_length ? string + offset : "";
14647 /* Given a tree T, compute which bits in T may be nonzero. */
14649 wide_int
14650 tree_nonzero_bits (const_tree t)
14652 switch (TREE_CODE (t))
14654 case INTEGER_CST:
14655 return wi::to_wide (t);
14656 case SSA_NAME:
14657 return get_nonzero_bits (t);
14658 case NON_LVALUE_EXPR:
14659 case SAVE_EXPR:
14660 return tree_nonzero_bits (TREE_OPERAND (t, 0));
14661 case BIT_AND_EXPR:
14662 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14663 tree_nonzero_bits (TREE_OPERAND (t, 1)));
14664 case BIT_IOR_EXPR:
14665 case BIT_XOR_EXPR:
14666 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14667 tree_nonzero_bits (TREE_OPERAND (t, 1)));
14668 case COND_EXPR:
14669 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
14670 tree_nonzero_bits (TREE_OPERAND (t, 2)));
14671 CASE_CONVERT:
14672 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14673 TYPE_PRECISION (TREE_TYPE (t)),
14674 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
14675 case PLUS_EXPR:
14676 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
14678 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
14679 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
14680 if (wi::bit_and (nzbits1, nzbits2) == 0)
14681 return wi::bit_or (nzbits1, nzbits2);
14683 break;
14684 case LSHIFT_EXPR:
14685 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
14687 tree type = TREE_TYPE (t);
14688 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
14689 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
14690 TYPE_PRECISION (type));
14691 return wi::neg_p (arg1)
14692 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
14693 : wi::lshift (nzbits, arg1);
14695 break;
14696 case RSHIFT_EXPR:
14697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
14699 tree type = TREE_TYPE (t);
14700 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
14701 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
14702 TYPE_PRECISION (type));
14703 return wi::neg_p (arg1)
14704 ? wi::lshift (nzbits, -arg1)
14705 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
14707 break;
14708 default:
14709 break;
14712 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
14715 #if CHECKING_P
14717 namespace selftest {
14719 /* Helper functions for writing tests of folding trees. */
14721 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14723 static void
14724 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14725 tree constant)
14727 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14730 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14731 wrapping WRAPPED_EXPR. */
14733 static void
14734 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14735 tree wrapped_expr)
14737 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14738 ASSERT_NE (wrapped_expr, result);
14739 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14740 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14743 /* Verify that various arithmetic binary operations are folded
14744 correctly. */
14746 static void
14747 test_arithmetic_folding ()
14749 tree type = integer_type_node;
14750 tree x = create_tmp_var_raw (type, "x");
14751 tree zero = build_zero_cst (type);
14752 tree one = build_int_cst (type, 1);
14754 /* Addition. */
14755 /* 1 <-- (0 + 1) */
14756 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14757 one);
14758 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14759 one);
14761 /* (nonlvalue)x <-- (x + 0) */
14762 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14765 /* Subtraction. */
14766 /* 0 <-- (x - x) */
14767 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14768 zero);
14769 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14772 /* Multiplication. */
14773 /* 0 <-- (x * 0) */
14774 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14775 zero);
14777 /* (nonlvalue)x <-- (x * 1) */
14778 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14782 /* Verify that various binary operations on vectors are folded
14783 correctly. */
14785 static void
14786 test_vector_folding ()
14788 tree inner_type = integer_type_node;
14789 tree type = build_vector_type (inner_type, 4);
14790 tree zero = build_zero_cst (type);
14791 tree one = build_one_cst (type);
14793 /* Verify equality tests that return a scalar boolean result. */
14794 tree res_type = boolean_type_node;
14795 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14796 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14797 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14798 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14801 /* Verify folding of VEC_DUPLICATE_EXPRs. */
14803 static void
14804 test_vec_duplicate_folding ()
14806 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
14807 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
14808 /* This will be 1 if VEC_MODE isn't a vector mode. */
14809 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
14811 tree type = build_vector_type (ssizetype, nunits);
14812 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
14813 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
14814 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
14817 /* Run all of the selftests within this file. */
14819 void
14820 fold_const_c_tests ()
14822 test_arithmetic_folding ();
14823 test_vector_folding ();
14824 test_vec_duplicate_folding ();
14827 } // namespace selftest
14829 #endif /* CHECKING_P */