match_asm_constraints: Use copy_rtx where needed (PR88001)
[official-gcc.git] / gcc / fold-const.c
blob45de94c93c5278ebedd698aa0eba481ce91fe93e
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 tree, tree, tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
147 static location_t
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc == UNKNOWN_LOCATION ? loc : tloc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
166 x = copy_node (x);
167 SET_EXPR_LOCATION (x, loc);
169 return x;
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173 division and returns the quotient. Otherwise returns
174 NULL_TREE. */
176 tree
177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
179 widest_int quo;
181 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 SIGNED, &quo))
183 return wide_int_to_tree (TREE_TYPE (arg1), quo);
185 return NULL_TREE;
188 /* This is nonzero if we should defer warnings about undefined
189 overflow. This facility exists because these warnings are a
190 special case. The code to estimate loop iterations does not want
191 to issue any warnings, since it works with expressions which do not
192 occur in user code. Various bits of cleanup code call fold(), but
193 only use the result if it has certain characteristics (e.g., is a
194 constant); that code only wants to issue a warning if the result is
195 used. */
197 static int fold_deferring_overflow_warnings;
199 /* If a warning about undefined overflow is deferred, this is the
200 warning. Note that this may cause us to turn two warnings into
201 one, but that is fine since it is sufficient to only give one
202 warning per expression. */
204 static const char* fold_deferred_overflow_warning;
206 /* If a warning about undefined overflow is deferred, this is the
207 level at which the warning should be emitted. */
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
211 /* Start deferring overflow warnings. We could use a stack here to
212 permit nested calls, but at present it is not necessary. */
214 void
215 fold_defer_overflow_warnings (void)
217 ++fold_deferring_overflow_warnings;
220 /* Stop deferring overflow warnings. If there is a pending warning,
221 and ISSUE is true, then issue the warning if appropriate. STMT is
222 the statement with which the warning should be associated (used for
223 location information); STMT may be NULL. CODE is the level of the
224 warning--a warn_strict_overflow_code value. This function will use
225 the smaller of CODE and the deferred code when deciding whether to
226 issue the warning. CODE may be zero to mean to always use the
227 deferred code. */
229 void
230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
232 const char *warnmsg;
233 location_t locus;
235 gcc_assert (fold_deferring_overflow_warnings > 0);
236 --fold_deferring_overflow_warnings;
237 if (fold_deferring_overflow_warnings > 0)
239 if (fold_deferred_overflow_warning != NULL
240 && code != 0
241 && code < (int) fold_deferred_overflow_code)
242 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243 return;
246 warnmsg = fold_deferred_overflow_warning;
247 fold_deferred_overflow_warning = NULL;
249 if (!issue || warnmsg == NULL)
250 return;
252 if (gimple_no_warning_p (stmt))
253 return;
255 /* Use the smallest code level when deciding to issue the
256 warning. */
257 if (code == 0 || code > (int) fold_deferred_overflow_code)
258 code = fold_deferred_overflow_code;
260 if (!issue_strict_overflow_warning (code))
261 return;
263 if (stmt == NULL)
264 locus = input_location;
265 else
266 locus = gimple_location (stmt);
267 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
270 /* Stop deferring overflow warnings, ignoring any deferred
271 warnings. */
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
276 fold_undefer_overflow_warnings (false, NULL, 0);
279 /* Whether we are deferring overflow warnings. */
281 bool
282 fold_deferring_overflow_warnings_p (void)
284 return fold_deferring_overflow_warnings > 0;
287 /* This is called when we fold something based on the fact that signed
288 overflow is undefined. */
290 void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
293 if (fold_deferring_overflow_warnings > 0)
295 if (fold_deferred_overflow_warning == NULL
296 || wc < fold_deferred_overflow_code)
298 fold_deferred_overflow_warning = gmsgid;
299 fold_deferred_overflow_code = wc;
302 else if (issue_strict_overflow_warning (wc))
303 warning (OPT_Wstrict_overflow, gmsgid);
306 /* Return true if the built-in mathematical function specified by CODE
307 is odd, i.e. -f(x) == f(-x). */
309 bool
310 negate_mathfn_p (combined_fn fn)
312 switch (fn)
314 CASE_CFN_ASIN:
315 CASE_CFN_ASINH:
316 CASE_CFN_ATAN:
317 CASE_CFN_ATANH:
318 CASE_CFN_CASIN:
319 CASE_CFN_CASINH:
320 CASE_CFN_CATAN:
321 CASE_CFN_CATANH:
322 CASE_CFN_CBRT:
323 CASE_CFN_CPROJ:
324 CASE_CFN_CSIN:
325 CASE_CFN_CSINH:
326 CASE_CFN_CTAN:
327 CASE_CFN_CTANH:
328 CASE_CFN_ERF:
329 CASE_CFN_LLROUND:
330 CASE_CFN_LROUND:
331 CASE_CFN_ROUND:
332 CASE_CFN_SIN:
333 CASE_CFN_SINH:
334 CASE_CFN_TAN:
335 CASE_CFN_TANH:
336 CASE_CFN_TRUNC:
337 return true;
339 CASE_CFN_LLRINT:
340 CASE_CFN_LRINT:
341 CASE_CFN_NEARBYINT:
342 CASE_CFN_RINT:
343 return !flag_rounding_math;
345 default:
346 break;
348 return false;
351 /* Check whether we may negate an integer constant T without causing
352 overflow. */
354 bool
355 may_negate_without_overflow_p (const_tree t)
357 tree type;
359 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361 type = TREE_TYPE (t);
362 if (TYPE_UNSIGNED (type))
363 return false;
365 return !wi::only_sign_bit_p (wi::to_wide (t));
368 /* Determine whether an expression T can be cheaply negated using
369 the function negate_expr without introducing undefined overflow. */
371 static bool
372 negate_expr_p (tree t)
374 tree type;
376 if (t == 0)
377 return false;
379 type = TREE_TYPE (t);
381 STRIP_SIGN_NOPS (t);
382 switch (TREE_CODE (t))
384 case INTEGER_CST:
385 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
386 return true;
388 /* Check that -CST will not overflow type. */
389 return may_negate_without_overflow_p (t);
390 case BIT_NOT_EXPR:
391 return (INTEGRAL_TYPE_P (type)
392 && TYPE_OVERFLOW_WRAPS (type));
394 case FIXED_CST:
395 return true;
397 case NEGATE_EXPR:
398 return !TYPE_OVERFLOW_SANITIZED (type);
400 case REAL_CST:
401 /* We want to canonicalize to positive real constants. Pretend
402 that only negative ones can be easily negated. */
403 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405 case COMPLEX_CST:
406 return negate_expr_p (TREE_REALPART (t))
407 && negate_expr_p (TREE_IMAGPART (t));
409 case VECTOR_CST:
411 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
412 return true;
414 /* Steps don't prevent negation. */
415 unsigned int count = vector_cst_encoded_nelts (t);
416 for (unsigned int i = 0; i < count; ++i)
417 if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
418 return false;
420 return true;
423 case COMPLEX_EXPR:
424 return negate_expr_p (TREE_OPERAND (t, 0))
425 && negate_expr_p (TREE_OPERAND (t, 1));
427 case CONJ_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0));
430 case PLUS_EXPR:
431 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
432 || HONOR_SIGNED_ZEROS (element_mode (type))
433 || (ANY_INTEGRAL_TYPE_P (type)
434 && ! TYPE_OVERFLOW_WRAPS (type)))
435 return false;
436 /* -(A + B) -> (-B) - A. */
437 if (negate_expr_p (TREE_OPERAND (t, 1)))
438 return true;
439 /* -(A + B) -> (-A) - B. */
440 return negate_expr_p (TREE_OPERAND (t, 0));
442 case MINUS_EXPR:
443 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
444 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 && !HONOR_SIGNED_ZEROS (element_mode (type))
446 && (! ANY_INTEGRAL_TYPE_P (type)
447 || TYPE_OVERFLOW_WRAPS (type));
449 case MULT_EXPR:
450 if (TYPE_UNSIGNED (type))
451 break;
452 /* INT_MIN/n * n doesn't overflow while negating one operand it does
453 if n is a (negative) power of two. */
454 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
455 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
456 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
457 && (wi::popcount
458 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
459 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 && (wi::popcount
461 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
462 break;
464 /* Fall through. */
466 case RDIV_EXPR:
467 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
468 return negate_expr_p (TREE_OPERAND (t, 1))
469 || negate_expr_p (TREE_OPERAND (t, 0));
470 break;
472 case TRUNC_DIV_EXPR:
473 case ROUND_DIV_EXPR:
474 case EXACT_DIV_EXPR:
475 if (TYPE_UNSIGNED (type))
476 break;
477 /* In general we can't negate A in A / B, because if A is INT_MIN and
478 B is not 1 we change the sign of the result. */
479 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
480 && negate_expr_p (TREE_OPERAND (t, 0)))
481 return true;
482 /* In general we can't negate B in A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. */
485 if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
486 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
487 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
488 && ! integer_onep (TREE_OPERAND (t, 1))))
489 return negate_expr_p (TREE_OPERAND (t, 1));
490 break;
492 case NOP_EXPR:
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type) == REAL_TYPE)
496 tree tem = strip_float_extensions (t);
497 if (tem != t)
498 return negate_expr_p (tem);
500 break;
502 case CALL_EXPR:
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (get_call_combined_fn (t)))
505 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 break;
508 case RSHIFT_EXPR:
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
510 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 tree op1 = TREE_OPERAND (t, 1);
513 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
514 return true;
516 break;
518 default:
519 break;
521 return false;
524 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
525 simplification is possible.
526 If negate_expr_p would return true for T, NULL_TREE will never be
527 returned. */
529 static tree
530 fold_negate_expr_1 (location_t loc, tree t)
532 tree type = TREE_TYPE (t);
533 tree tem;
535 switch (TREE_CODE (t))
537 /* Convert - (~A) to A + 1. */
538 case BIT_NOT_EXPR:
539 if (INTEGRAL_TYPE_P (type))
540 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
541 build_one_cst (type));
542 break;
544 case INTEGER_CST:
545 tem = fold_negate_const (t, type);
546 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
547 || (ANY_INTEGRAL_TYPE_P (type)
548 && !TYPE_OVERFLOW_TRAPS (type)
549 && TYPE_OVERFLOW_WRAPS (type))
550 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
551 return tem;
552 break;
554 case POLY_INT_CST:
555 case REAL_CST:
556 case FIXED_CST:
557 tem = fold_negate_const (t, type);
558 return tem;
560 case COMPLEX_CST:
562 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
563 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
564 if (rpart && ipart)
565 return build_complex (type, rpart, ipart);
567 break;
569 case VECTOR_CST:
571 tree_vector_builder elts;
572 elts.new_unary_operation (type, t, true);
573 unsigned int count = elts.encoded_nelts ();
574 for (unsigned int i = 0; i < count; ++i)
576 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
577 if (elt == NULL_TREE)
578 return NULL_TREE;
579 elts.quick_push (elt);
582 return elts.build ();
585 case COMPLEX_EXPR:
586 if (negate_expr_p (t))
587 return fold_build2_loc (loc, COMPLEX_EXPR, type,
588 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
589 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
590 break;
592 case CONJ_EXPR:
593 if (negate_expr_p (t))
594 return fold_build1_loc (loc, CONJ_EXPR, type,
595 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
596 break;
598 case NEGATE_EXPR:
599 if (!TYPE_OVERFLOW_SANITIZED (type))
600 return TREE_OPERAND (t, 0);
601 break;
603 case PLUS_EXPR:
604 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
605 && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 /* -(A + B) -> (-B) - A. */
608 if (negate_expr_p (TREE_OPERAND (t, 1)))
610 tem = negate_expr (TREE_OPERAND (t, 1));
611 return fold_build2_loc (loc, MINUS_EXPR, type,
612 tem, TREE_OPERAND (t, 0));
615 /* -(A + B) -> (-A) - B. */
616 if (negate_expr_p (TREE_OPERAND (t, 0)))
618 tem = negate_expr (TREE_OPERAND (t, 0));
619 return fold_build2_loc (loc, MINUS_EXPR, type,
620 tem, TREE_OPERAND (t, 1));
623 break;
625 case MINUS_EXPR:
626 /* - (A - B) -> B - A */
627 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
628 && !HONOR_SIGNED_ZEROS (element_mode (type)))
629 return fold_build2_loc (loc, MINUS_EXPR, type,
630 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
631 break;
633 case MULT_EXPR:
634 if (TYPE_UNSIGNED (type))
635 break;
637 /* Fall through. */
639 case RDIV_EXPR:
640 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 tem = TREE_OPERAND (t, 1);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 TREE_OPERAND (t, 0), negate_expr (tem));
646 tem = TREE_OPERAND (t, 0);
647 if (negate_expr_p (tem))
648 return fold_build2_loc (loc, TREE_CODE (t), type,
649 negate_expr (tem), TREE_OPERAND (t, 1));
651 break;
653 case TRUNC_DIV_EXPR:
654 case ROUND_DIV_EXPR:
655 case EXACT_DIV_EXPR:
656 if (TYPE_UNSIGNED (type))
657 break;
658 /* In general we can't negate A in A / B, because if A is INT_MIN and
659 B is not 1 we change the sign of the result. */
660 if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
661 && negate_expr_p (TREE_OPERAND (t, 0)))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 negate_expr (TREE_OPERAND (t, 0)),
664 TREE_OPERAND (t, 1));
665 /* In general we can't negate B in A / B, because if A is INT_MIN and
666 B is 1, we may turn this into INT_MIN / -1 which is undefined
667 and actually traps on some architectures. */
668 if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
669 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
670 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
671 && ! integer_onep (TREE_OPERAND (t, 1))))
672 && negate_expr_p (TREE_OPERAND (t, 1)))
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0),
675 negate_expr (TREE_OPERAND (t, 1)));
676 break;
678 case NOP_EXPR:
679 /* Convert -((double)float) into (double)(-float). */
680 if (TREE_CODE (type) == REAL_TYPE)
682 tem = strip_float_extensions (t);
683 if (tem != t && negate_expr_p (tem))
684 return fold_convert_loc (loc, type, negate_expr (tem));
686 break;
688 case CALL_EXPR:
689 /* Negate -f(x) as f(-x). */
690 if (negate_mathfn_p (get_call_combined_fn (t))
691 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 tree fndecl, arg;
695 fndecl = get_callee_fndecl (t);
696 arg = negate_expr (CALL_EXPR_ARG (t, 0));
697 return build_call_expr_loc (loc, fndecl, 1, arg);
699 break;
701 case RSHIFT_EXPR:
702 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
703 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 tree op1 = TREE_OPERAND (t, 1);
706 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
708 tree ntype = TYPE_UNSIGNED (type)
709 ? signed_type_for (type)
710 : unsigned_type_for (type);
711 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
712 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
713 return fold_convert_loc (loc, type, temp);
716 break;
718 default:
719 break;
722 return NULL_TREE;
725 /* A wrapper for fold_negate_expr_1. */
727 static tree
728 fold_negate_expr (location_t loc, tree t)
730 tree type = TREE_TYPE (t);
731 STRIP_SIGN_NOPS (t);
732 tree tem = fold_negate_expr_1 (loc, t);
733 if (tem == NULL_TREE)
734 return NULL_TREE;
735 return fold_convert_loc (loc, type, tem);
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
740 return NULL_TREE. */
742 static tree
743 negate_expr (tree t)
745 tree type, tem;
746 location_t loc;
748 if (t == NULL_TREE)
749 return NULL_TREE;
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
753 STRIP_SIGN_NOPS (t);
755 tem = fold_negate_expr (loc, t);
756 if (!tem)
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead. If a variable part is of pointer
775 type, it is negated after converting to TYPE. This prevents us from
776 generating illegal MINUS pointer expression. LOC is the location of
777 the converted variable part.
779 If IN is itself a literal or constant, return it as appropriate.
781 Note that we do not guarantee that any of the three values will be the
782 same type as IN, but they will have the same signedness and mode. */
784 static tree
785 split_tree (tree in, tree type, enum tree_code code,
786 tree *minus_varp, tree *conp, tree *minus_conp,
787 tree *litp, tree *minus_litp, int negate_p)
789 tree var = 0;
790 *minus_varp = 0;
791 *conp = 0;
792 *minus_conp = 0;
793 *litp = 0;
794 *minus_litp = 0;
796 /* Strip any conversions that don't change the machine mode or signedness. */
797 STRIP_SIGN_NOPS (in);
799 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
800 || TREE_CODE (in) == FIXED_CST)
801 *litp = in;
802 else if (TREE_CODE (in) == code
803 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
804 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
805 /* We can associate addition and subtraction together (even
806 though the C standard doesn't say so) for integers because
807 the value is not affected. For reals, the value might be
808 affected, so we can't. */
809 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
810 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
811 || (code == MINUS_EXPR
812 && (TREE_CODE (in) == PLUS_EXPR
813 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815 tree op0 = TREE_OPERAND (in, 0);
816 tree op1 = TREE_OPERAND (in, 1);
817 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
818 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820 /* First see if either of the operands is a literal, then a constant. */
821 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
822 || TREE_CODE (op0) == FIXED_CST)
823 *litp = op0, op0 = 0;
824 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
825 || TREE_CODE (op1) == FIXED_CST)
826 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
828 if (op0 != 0 && TREE_CONSTANT (op0))
829 *conp = op0, op0 = 0;
830 else if (op1 != 0 && TREE_CONSTANT (op1))
831 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
833 /* If we haven't dealt with either operand, this is not a case we can
834 decompose. Otherwise, VAR is either of the ones remaining, if any. */
835 if (op0 != 0 && op1 != 0)
836 var = in;
837 else if (op0 != 0)
838 var = op0;
839 else
840 var = op1, neg_var_p = neg1_p;
842 /* Now do any needed negations. */
843 if (neg_litp_p)
844 *minus_litp = *litp, *litp = 0;
845 if (neg_conp_p && *conp)
846 *minus_conp = *conp, *conp = 0;
847 if (neg_var_p && var)
848 *minus_varp = var, var = 0;
850 else if (TREE_CONSTANT (in))
851 *conp = in;
852 else if (TREE_CODE (in) == BIT_NOT_EXPR
853 && code == PLUS_EXPR)
855 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
856 when IN is constant. */
857 *litp = build_minus_one_cst (type);
858 *minus_varp = TREE_OPERAND (in, 0);
860 else
861 var = in;
863 if (negate_p)
865 if (*litp)
866 *minus_litp = *litp, *litp = 0;
867 else if (*minus_litp)
868 *litp = *minus_litp, *minus_litp = 0;
869 if (*conp)
870 *minus_conp = *conp, *conp = 0;
871 else if (*minus_conp)
872 *conp = *minus_conp, *minus_conp = 0;
873 if (var)
874 *minus_varp = var, var = 0;
875 else if (*minus_varp)
876 var = *minus_varp, *minus_varp = 0;
879 if (*litp
880 && TREE_OVERFLOW_P (*litp))
881 *litp = drop_tree_overflow (*litp);
882 if (*minus_litp
883 && TREE_OVERFLOW_P (*minus_litp))
884 *minus_litp = drop_tree_overflow (*minus_litp);
886 return var;
889 /* Re-associate trees split by the above function. T1 and T2 are
890 either expressions to associate or null. Return the new
891 expression, if any. LOC is the location of the new expression. If
892 we build an operation, do it in TYPE and with CODE. */
894 static tree
895 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 if (t1 == 0)
899 gcc_assert (t2 == 0 || code != MINUS_EXPR);
900 return t2;
902 else if (t2 == 0)
903 return t1;
905 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
906 try to fold this since we will have infinite recursion. But do
907 deal with any NEGATE_EXPRs. */
908 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
909 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
910 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 if (code == PLUS_EXPR)
914 if (TREE_CODE (t1) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t2),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t1, 0)));
919 else if (TREE_CODE (t2) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t1),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t2, 0)));
924 else if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
927 else if (code == MINUS_EXPR)
929 if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
933 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type, t2));
937 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942 for use in int_const_binop, size_binop and size_diffop. */
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948 return false;
949 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950 return false;
952 switch (code)
954 case LSHIFT_EXPR:
955 case RSHIFT_EXPR:
956 case LROTATE_EXPR:
957 case RROTATE_EXPR:
958 return true;
960 default:
961 break;
964 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 && TYPE_MODE (type1) == TYPE_MODE (type2);
969 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
970 a new constant in RES. Return FALSE if we don't know how to
971 evaluate CODE at compile-time. */
973 bool
974 wide_int_binop (wide_int &res,
975 enum tree_code code, const wide_int &arg1, const wide_int &arg2,
976 signop sign, wi::overflow_type *overflow)
978 wide_int tmp;
979 *overflow = wi::OVF_NONE;
980 switch (code)
982 case BIT_IOR_EXPR:
983 res = wi::bit_or (arg1, arg2);
984 break;
986 case BIT_XOR_EXPR:
987 res = wi::bit_xor (arg1, arg2);
988 break;
990 case BIT_AND_EXPR:
991 res = wi::bit_and (arg1, arg2);
992 break;
994 case RSHIFT_EXPR:
995 case LSHIFT_EXPR:
996 if (wi::neg_p (arg2))
998 tmp = -arg2;
999 if (code == RSHIFT_EXPR)
1000 code = LSHIFT_EXPR;
1001 else
1002 code = RSHIFT_EXPR;
1004 else
1005 tmp = arg2;
1007 if (code == RSHIFT_EXPR)
1008 /* It's unclear from the C standard whether shifts can overflow.
1009 The following code ignores overflow; perhaps a C standard
1010 interpretation ruling is needed. */
1011 res = wi::rshift (arg1, tmp, sign);
1012 else
1013 res = wi::lshift (arg1, tmp);
1014 break;
1016 case RROTATE_EXPR:
1017 case LROTATE_EXPR:
1018 if (wi::neg_p (arg2))
1020 tmp = -arg2;
1021 if (code == RROTATE_EXPR)
1022 code = LROTATE_EXPR;
1023 else
1024 code = RROTATE_EXPR;
1026 else
1027 tmp = arg2;
1029 if (code == RROTATE_EXPR)
1030 res = wi::rrotate (arg1, tmp);
1031 else
1032 res = wi::lrotate (arg1, tmp);
1033 break;
1035 case PLUS_EXPR:
1036 res = wi::add (arg1, arg2, sign, overflow);
1037 break;
1039 case MINUS_EXPR:
1040 res = wi::sub (arg1, arg2, sign, overflow);
1041 break;
1043 case MULT_EXPR:
1044 res = wi::mul (arg1, arg2, sign, overflow);
1045 break;
1047 case MULT_HIGHPART_EXPR:
1048 res = wi::mul_high (arg1, arg2, sign);
1049 break;
1051 case TRUNC_DIV_EXPR:
1052 case EXACT_DIV_EXPR:
1053 if (arg2 == 0)
1054 return false;
1055 res = wi::div_trunc (arg1, arg2, sign, overflow);
1056 break;
1058 case FLOOR_DIV_EXPR:
1059 if (arg2 == 0)
1060 return false;
1061 res = wi::div_floor (arg1, arg2, sign, overflow);
1062 break;
1064 case CEIL_DIV_EXPR:
1065 if (arg2 == 0)
1066 return false;
1067 res = wi::div_ceil (arg1, arg2, sign, overflow);
1068 break;
1070 case ROUND_DIV_EXPR:
1071 if (arg2 == 0)
1072 return false;
1073 res = wi::div_round (arg1, arg2, sign, overflow);
1074 break;
1076 case TRUNC_MOD_EXPR:
1077 if (arg2 == 0)
1078 return false;
1079 res = wi::mod_trunc (arg1, arg2, sign, overflow);
1080 break;
1082 case FLOOR_MOD_EXPR:
1083 if (arg2 == 0)
1084 return false;
1085 res = wi::mod_floor (arg1, arg2, sign, overflow);
1086 break;
1088 case CEIL_MOD_EXPR:
1089 if (arg2 == 0)
1090 return false;
1091 res = wi::mod_ceil (arg1, arg2, sign, overflow);
1092 break;
1094 case ROUND_MOD_EXPR:
1095 if (arg2 == 0)
1096 return false;
1097 res = wi::mod_round (arg1, arg2, sign, overflow);
1098 break;
1100 case MIN_EXPR:
1101 res = wi::min (arg1, arg2, sign);
1102 break;
1104 case MAX_EXPR:
1105 res = wi::max (arg1, arg2, sign);
1106 break;
1108 default:
1109 return false;
1111 return true;
1114 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1115 produce a new constant in RES. Return FALSE if we don't know how
1116 to evaluate CODE at compile-time. */
1118 static bool
1119 poly_int_binop (poly_wide_int &res, enum tree_code code,
1120 const_tree arg1, const_tree arg2,
1121 signop sign, wi::overflow_type *overflow)
1123 gcc_assert (NUM_POLY_INT_COEFFS != 1);
1124 gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1125 switch (code)
1127 case PLUS_EXPR:
1128 res = wi::add (wi::to_poly_wide (arg1),
1129 wi::to_poly_wide (arg2), sign, overflow);
1130 break;
1132 case MINUS_EXPR:
1133 res = wi::sub (wi::to_poly_wide (arg1),
1134 wi::to_poly_wide (arg2), sign, overflow);
1135 break;
1137 case MULT_EXPR:
1138 if (TREE_CODE (arg2) == INTEGER_CST)
1139 res = wi::mul (wi::to_poly_wide (arg1),
1140 wi::to_wide (arg2), sign, overflow);
1141 else if (TREE_CODE (arg1) == INTEGER_CST)
1142 res = wi::mul (wi::to_poly_wide (arg2),
1143 wi::to_wide (arg1), sign, overflow);
1144 else
1145 return NULL_TREE;
1146 break;
1148 case LSHIFT_EXPR:
1149 if (TREE_CODE (arg2) == INTEGER_CST)
1150 res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1151 else
1152 return false;
1153 break;
1155 case BIT_IOR_EXPR:
1156 if (TREE_CODE (arg2) != INTEGER_CST
1157 || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1158 &res))
1159 return false;
1160 break;
1162 default:
1163 return false;
1165 return true;
1168 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1169 produce a new constant. Return NULL_TREE if we don't know how to
1170 evaluate CODE at compile-time. */
1172 tree
1173 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1174 int overflowable)
1176 bool success = false;
1177 poly_wide_int poly_res;
1178 tree type = TREE_TYPE (arg1);
1179 signop sign = TYPE_SIGN (type);
1180 wi::overflow_type overflow = wi::OVF_NONE;
1182 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1184 wide_int warg1 = wi::to_wide (arg1), res;
1185 wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186 success = wide_int_binop (res, code, warg1, warg2, sign, &overflow);
1187 poly_res = res;
1189 else if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1190 success = poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow);
1191 if (success)
1192 return force_fit_type (type, poly_res, overflowable,
1193 (((sign == SIGNED || overflowable == -1)
1194 && overflow)
1195 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1196 return NULL_TREE;
1199 /* Return true if binary operation OP distributes over addition in operand
1200 OPNO, with the other operand being held constant. OPNO counts from 1. */
1202 static bool
1203 distributes_over_addition_p (tree_code op, int opno)
1205 switch (op)
1207 case PLUS_EXPR:
1208 case MINUS_EXPR:
1209 case MULT_EXPR:
1210 return true;
1212 case LSHIFT_EXPR:
1213 return opno == 1;
1215 default:
1216 return false;
1220 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1221 constant. We assume ARG1 and ARG2 have the same data type, or at least
1222 are the same kind of constant and the same machine mode. Return zero if
1223 combining the constants is not allowed in the current operating mode. */
1225 static tree
1226 const_binop (enum tree_code code, tree arg1, tree arg2)
1228 /* Sanity check for the recursive cases. */
1229 if (!arg1 || !arg2)
1230 return NULL_TREE;
1232 STRIP_NOPS (arg1);
1233 STRIP_NOPS (arg2);
1235 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1237 if (code == POINTER_PLUS_EXPR)
1238 return int_const_binop (PLUS_EXPR,
1239 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1241 return int_const_binop (code, arg1, arg2);
1244 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1246 machine_mode mode;
1247 REAL_VALUE_TYPE d1;
1248 REAL_VALUE_TYPE d2;
1249 REAL_VALUE_TYPE value;
1250 REAL_VALUE_TYPE result;
1251 bool inexact;
1252 tree t, type;
1254 /* The following codes are handled by real_arithmetic. */
1255 switch (code)
1257 case PLUS_EXPR:
1258 case MINUS_EXPR:
1259 case MULT_EXPR:
1260 case RDIV_EXPR:
1261 case MIN_EXPR:
1262 case MAX_EXPR:
1263 break;
1265 default:
1266 return NULL_TREE;
1269 d1 = TREE_REAL_CST (arg1);
1270 d2 = TREE_REAL_CST (arg2);
1272 type = TREE_TYPE (arg1);
1273 mode = TYPE_MODE (type);
1275 /* Don't perform operation if we honor signaling NaNs and
1276 either operand is a signaling NaN. */
1277 if (HONOR_SNANS (mode)
1278 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1279 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1280 return NULL_TREE;
1282 /* Don't perform operation if it would raise a division
1283 by zero exception. */
1284 if (code == RDIV_EXPR
1285 && real_equal (&d2, &dconst0)
1286 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1287 return NULL_TREE;
1289 /* If either operand is a NaN, just return it. Otherwise, set up
1290 for floating-point trap; we return an overflow. */
1291 if (REAL_VALUE_ISNAN (d1))
1293 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1294 is off. */
1295 d1.signalling = 0;
1296 t = build_real (type, d1);
1297 return t;
1299 else if (REAL_VALUE_ISNAN (d2))
1301 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1302 is off. */
1303 d2.signalling = 0;
1304 t = build_real (type, d2);
1305 return t;
1308 inexact = real_arithmetic (&value, code, &d1, &d2);
1309 real_convert (&result, mode, &value);
1311 /* Don't constant fold this floating point operation if
1312 the result has overflowed and flag_trapping_math. */
1313 if (flag_trapping_math
1314 && MODE_HAS_INFINITIES (mode)
1315 && REAL_VALUE_ISINF (result)
1316 && !REAL_VALUE_ISINF (d1)
1317 && !REAL_VALUE_ISINF (d2))
1318 return NULL_TREE;
1320 /* Don't constant fold this floating point operation if the
1321 result may dependent upon the run-time rounding mode and
1322 flag_rounding_math is set, or if GCC's software emulation
1323 is unable to accurately represent the result. */
1324 if ((flag_rounding_math
1325 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1326 && (inexact || !real_identical (&result, &value)))
1327 return NULL_TREE;
1329 t = build_real (type, result);
1331 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1332 return t;
1335 if (TREE_CODE (arg1) == FIXED_CST)
1337 FIXED_VALUE_TYPE f1;
1338 FIXED_VALUE_TYPE f2;
1339 FIXED_VALUE_TYPE result;
1340 tree t, type;
1341 int sat_p;
1342 bool overflow_p;
1344 /* The following codes are handled by fixed_arithmetic. */
1345 switch (code)
1347 case PLUS_EXPR:
1348 case MINUS_EXPR:
1349 case MULT_EXPR:
1350 case TRUNC_DIV_EXPR:
1351 if (TREE_CODE (arg2) != FIXED_CST)
1352 return NULL_TREE;
1353 f2 = TREE_FIXED_CST (arg2);
1354 break;
1356 case LSHIFT_EXPR:
1357 case RSHIFT_EXPR:
1359 if (TREE_CODE (arg2) != INTEGER_CST)
1360 return NULL_TREE;
1361 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1362 f2.data.high = w2.elt (1);
1363 f2.data.low = w2.ulow ();
1364 f2.mode = SImode;
1366 break;
1368 default:
1369 return NULL_TREE;
1372 f1 = TREE_FIXED_CST (arg1);
1373 type = TREE_TYPE (arg1);
1374 sat_p = TYPE_SATURATING (type);
1375 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1376 t = build_fixed (type, result);
1377 /* Propagate overflow flags. */
1378 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1379 TREE_OVERFLOW (t) = 1;
1380 return t;
1383 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1385 tree type = TREE_TYPE (arg1);
1386 tree r1 = TREE_REALPART (arg1);
1387 tree i1 = TREE_IMAGPART (arg1);
1388 tree r2 = TREE_REALPART (arg2);
1389 tree i2 = TREE_IMAGPART (arg2);
1390 tree real, imag;
1392 switch (code)
1394 case PLUS_EXPR:
1395 case MINUS_EXPR:
1396 real = const_binop (code, r1, r2);
1397 imag = const_binop (code, i1, i2);
1398 break;
1400 case MULT_EXPR:
1401 if (COMPLEX_FLOAT_TYPE_P (type))
1402 return do_mpc_arg2 (arg1, arg2, type,
1403 /* do_nonfinite= */ folding_initializer,
1404 mpc_mul);
1406 real = const_binop (MINUS_EXPR,
1407 const_binop (MULT_EXPR, r1, r2),
1408 const_binop (MULT_EXPR, i1, i2));
1409 imag = const_binop (PLUS_EXPR,
1410 const_binop (MULT_EXPR, r1, i2),
1411 const_binop (MULT_EXPR, i1, r2));
1412 break;
1414 case RDIV_EXPR:
1415 if (COMPLEX_FLOAT_TYPE_P (type))
1416 return do_mpc_arg2 (arg1, arg2, type,
1417 /* do_nonfinite= */ folding_initializer,
1418 mpc_div);
1419 /* Fallthru. */
1420 case TRUNC_DIV_EXPR:
1421 case CEIL_DIV_EXPR:
1422 case FLOOR_DIV_EXPR:
1423 case ROUND_DIV_EXPR:
1424 if (flag_complex_method == 0)
1426 /* Keep this algorithm in sync with
1427 tree-complex.c:expand_complex_div_straight().
1429 Expand complex division to scalars, straightforward algorithm.
1430 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1431 t = br*br + bi*bi
1433 tree magsquared
1434 = const_binop (PLUS_EXPR,
1435 const_binop (MULT_EXPR, r2, r2),
1436 const_binop (MULT_EXPR, i2, i2));
1437 tree t1
1438 = const_binop (PLUS_EXPR,
1439 const_binop (MULT_EXPR, r1, r2),
1440 const_binop (MULT_EXPR, i1, i2));
1441 tree t2
1442 = const_binop (MINUS_EXPR,
1443 const_binop (MULT_EXPR, i1, r2),
1444 const_binop (MULT_EXPR, r1, i2));
1446 real = const_binop (code, t1, magsquared);
1447 imag = const_binop (code, t2, magsquared);
1449 else
1451 /* Keep this algorithm in sync with
1452 tree-complex.c:expand_complex_div_wide().
1454 Expand complex division to scalars, modified algorithm to minimize
1455 overflow with wide input ranges. */
1456 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1457 fold_abs_const (r2, TREE_TYPE (type)),
1458 fold_abs_const (i2, TREE_TYPE (type)));
1460 if (integer_nonzerop (compare))
1462 /* In the TRUE branch, we compute
1463 ratio = br/bi;
1464 div = (br * ratio) + bi;
1465 tr = (ar * ratio) + ai;
1466 ti = (ai * ratio) - ar;
1467 tr = tr / div;
1468 ti = ti / div; */
1469 tree ratio = const_binop (code, r2, i2);
1470 tree div = const_binop (PLUS_EXPR, i2,
1471 const_binop (MULT_EXPR, r2, ratio));
1472 real = const_binop (MULT_EXPR, r1, ratio);
1473 real = const_binop (PLUS_EXPR, real, i1);
1474 real = const_binop (code, real, div);
1476 imag = const_binop (MULT_EXPR, i1, ratio);
1477 imag = const_binop (MINUS_EXPR, imag, r1);
1478 imag = const_binop (code, imag, div);
1480 else
1482 /* In the FALSE branch, we compute
1483 ratio = d/c;
1484 divisor = (d * ratio) + c;
1485 tr = (b * ratio) + a;
1486 ti = b - (a * ratio);
1487 tr = tr / div;
1488 ti = ti / div; */
1489 tree ratio = const_binop (code, i2, r2);
1490 tree div = const_binop (PLUS_EXPR, r2,
1491 const_binop (MULT_EXPR, i2, ratio));
1493 real = const_binop (MULT_EXPR, i1, ratio);
1494 real = const_binop (PLUS_EXPR, real, r1);
1495 real = const_binop (code, real, div);
1497 imag = const_binop (MULT_EXPR, r1, ratio);
1498 imag = const_binop (MINUS_EXPR, i1, imag);
1499 imag = const_binop (code, imag, div);
1502 break;
1504 default:
1505 return NULL_TREE;
1508 if (real && imag)
1509 return build_complex (type, real, imag);
1512 if (TREE_CODE (arg1) == VECTOR_CST
1513 && TREE_CODE (arg2) == VECTOR_CST
1514 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1515 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1517 tree type = TREE_TYPE (arg1);
1518 bool step_ok_p;
1519 if (VECTOR_CST_STEPPED_P (arg1)
1520 && VECTOR_CST_STEPPED_P (arg2))
1521 /* We can operate directly on the encoding if:
1523 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1524 implies
1525 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1527 Addition and subtraction are the supported operators
1528 for which this is true. */
1529 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1530 else if (VECTOR_CST_STEPPED_P (arg1))
1531 /* We can operate directly on stepped encodings if:
1533 a3 - a2 == a2 - a1
1534 implies:
1535 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1537 which is true if (x -> x op c) distributes over addition. */
1538 step_ok_p = distributes_over_addition_p (code, 1);
1539 else
1540 /* Similarly in reverse. */
1541 step_ok_p = distributes_over_addition_p (code, 2);
1542 tree_vector_builder elts;
1543 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1544 return NULL_TREE;
1545 unsigned int count = elts.encoded_nelts ();
1546 for (unsigned int i = 0; i < count; ++i)
1548 tree elem1 = VECTOR_CST_ELT (arg1, i);
1549 tree elem2 = VECTOR_CST_ELT (arg2, i);
1551 tree elt = const_binop (code, elem1, elem2);
1553 /* It is possible that const_binop cannot handle the given
1554 code and return NULL_TREE */
1555 if (elt == NULL_TREE)
1556 return NULL_TREE;
1557 elts.quick_push (elt);
1560 return elts.build ();
1563 /* Shifts allow a scalar offset for a vector. */
1564 if (TREE_CODE (arg1) == VECTOR_CST
1565 && TREE_CODE (arg2) == INTEGER_CST)
1567 tree type = TREE_TYPE (arg1);
1568 bool step_ok_p = distributes_over_addition_p (code, 1);
1569 tree_vector_builder elts;
1570 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1571 return NULL_TREE;
1572 unsigned int count = elts.encoded_nelts ();
1573 for (unsigned int i = 0; i < count; ++i)
1575 tree elem1 = VECTOR_CST_ELT (arg1, i);
1577 tree elt = const_binop (code, elem1, arg2);
1579 /* It is possible that const_binop cannot handle the given
1580 code and return NULL_TREE. */
1581 if (elt == NULL_TREE)
1582 return NULL_TREE;
1583 elts.quick_push (elt);
1586 return elts.build ();
1588 return NULL_TREE;
1591 /* Overload that adds a TYPE parameter to be able to dispatch
1592 to fold_relational_const. */
1594 tree
1595 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1597 if (TREE_CODE_CLASS (code) == tcc_comparison)
1598 return fold_relational_const (code, type, arg1, arg2);
1600 /* ??? Until we make the const_binop worker take the type of the
1601 result as argument put those cases that need it here. */
1602 switch (code)
1604 case VEC_SERIES_EXPR:
1605 if (CONSTANT_CLASS_P (arg1)
1606 && CONSTANT_CLASS_P (arg2))
1607 return build_vec_series (type, arg1, arg2);
1608 return NULL_TREE;
1610 case COMPLEX_EXPR:
1611 if ((TREE_CODE (arg1) == REAL_CST
1612 && TREE_CODE (arg2) == REAL_CST)
1613 || (TREE_CODE (arg1) == INTEGER_CST
1614 && TREE_CODE (arg2) == INTEGER_CST))
1615 return build_complex (type, arg1, arg2);
1616 return NULL_TREE;
1618 case POINTER_DIFF_EXPR:
1619 if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1621 poly_offset_int res = (wi::to_poly_offset (arg1)
1622 - wi::to_poly_offset (arg2));
1623 return force_fit_type (type, res, 1,
1624 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1626 return NULL_TREE;
1628 case VEC_PACK_TRUNC_EXPR:
1629 case VEC_PACK_FIX_TRUNC_EXPR:
1630 case VEC_PACK_FLOAT_EXPR:
1632 unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1634 if (TREE_CODE (arg1) != VECTOR_CST
1635 || TREE_CODE (arg2) != VECTOR_CST)
1636 return NULL_TREE;
1638 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1639 return NULL_TREE;
1641 out_nelts = in_nelts * 2;
1642 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1643 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1645 tree_vector_builder elts (type, out_nelts, 1);
1646 for (i = 0; i < out_nelts; i++)
1648 tree elt = (i < in_nelts
1649 ? VECTOR_CST_ELT (arg1, i)
1650 : VECTOR_CST_ELT (arg2, i - in_nelts));
1651 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1652 ? NOP_EXPR
1653 : code == VEC_PACK_FLOAT_EXPR
1654 ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1655 TREE_TYPE (type), elt);
1656 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1657 return NULL_TREE;
1658 elts.quick_push (elt);
1661 return elts.build ();
1664 case VEC_WIDEN_MULT_LO_EXPR:
1665 case VEC_WIDEN_MULT_HI_EXPR:
1666 case VEC_WIDEN_MULT_EVEN_EXPR:
1667 case VEC_WIDEN_MULT_ODD_EXPR:
1669 unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1671 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1672 return NULL_TREE;
1674 if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1675 return NULL_TREE;
1676 out_nelts = in_nelts / 2;
1677 gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1678 && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1680 if (code == VEC_WIDEN_MULT_LO_EXPR)
1681 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1682 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1683 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1684 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1685 scale = 1, ofs = 0;
1686 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1687 scale = 1, ofs = 1;
1689 tree_vector_builder elts (type, out_nelts, 1);
1690 for (out = 0; out < out_nelts; out++)
1692 unsigned int in = (out << scale) + ofs;
1693 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1694 VECTOR_CST_ELT (arg1, in));
1695 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1696 VECTOR_CST_ELT (arg2, in));
1698 if (t1 == NULL_TREE || t2 == NULL_TREE)
1699 return NULL_TREE;
1700 tree elt = const_binop (MULT_EXPR, t1, t2);
1701 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1702 return NULL_TREE;
1703 elts.quick_push (elt);
1706 return elts.build ();
1709 default:;
1712 if (TREE_CODE_CLASS (code) != tcc_binary)
1713 return NULL_TREE;
1715 /* Make sure type and arg0 have the same saturating flag. */
1716 gcc_checking_assert (TYPE_SATURATING (type)
1717 == TYPE_SATURATING (TREE_TYPE (arg1)));
1719 return const_binop (code, arg1, arg2);
1722 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1723 Return zero if computing the constants is not possible. */
1725 tree
1726 const_unop (enum tree_code code, tree type, tree arg0)
1728 /* Don't perform the operation, other than NEGATE and ABS, if
1729 flag_signaling_nans is on and the operand is a signaling NaN. */
1730 if (TREE_CODE (arg0) == REAL_CST
1731 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1732 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1733 && code != NEGATE_EXPR
1734 && code != ABS_EXPR
1735 && code != ABSU_EXPR)
1736 return NULL_TREE;
1738 switch (code)
1740 CASE_CONVERT:
1741 case FLOAT_EXPR:
1742 case FIX_TRUNC_EXPR:
1743 case FIXED_CONVERT_EXPR:
1744 return fold_convert_const (code, type, arg0);
1746 case ADDR_SPACE_CONVERT_EXPR:
1747 /* If the source address is 0, and the source address space
1748 cannot have a valid object at 0, fold to dest type null. */
1749 if (integer_zerop (arg0)
1750 && !(targetm.addr_space.zero_address_valid
1751 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1752 return fold_convert_const (code, type, arg0);
1753 break;
1755 case VIEW_CONVERT_EXPR:
1756 return fold_view_convert_expr (type, arg0);
1758 case NEGATE_EXPR:
1760 /* Can't call fold_negate_const directly here as that doesn't
1761 handle all cases and we might not be able to negate some
1762 constants. */
1763 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1764 if (tem && CONSTANT_CLASS_P (tem))
1765 return tem;
1766 break;
1769 case ABS_EXPR:
1770 case ABSU_EXPR:
1771 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1772 return fold_abs_const (arg0, type);
1773 break;
1775 case CONJ_EXPR:
1776 if (TREE_CODE (arg0) == COMPLEX_CST)
1778 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1779 TREE_TYPE (type));
1780 return build_complex (type, TREE_REALPART (arg0), ipart);
1782 break;
1784 case BIT_NOT_EXPR:
1785 if (TREE_CODE (arg0) == INTEGER_CST)
1786 return fold_not_const (arg0, type);
1787 else if (POLY_INT_CST_P (arg0))
1788 return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1789 /* Perform BIT_NOT_EXPR on each element individually. */
1790 else if (TREE_CODE (arg0) == VECTOR_CST)
1792 tree elem;
1794 /* This can cope with stepped encodings because ~x == -1 - x. */
1795 tree_vector_builder elements;
1796 elements.new_unary_operation (type, arg0, true);
1797 unsigned int i, count = elements.encoded_nelts ();
1798 for (i = 0; i < count; ++i)
1800 elem = VECTOR_CST_ELT (arg0, i);
1801 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1802 if (elem == NULL_TREE)
1803 break;
1804 elements.quick_push (elem);
1806 if (i == count)
1807 return elements.build ();
1809 break;
1811 case TRUTH_NOT_EXPR:
1812 if (TREE_CODE (arg0) == INTEGER_CST)
1813 return constant_boolean_node (integer_zerop (arg0), type);
1814 break;
1816 case REALPART_EXPR:
1817 if (TREE_CODE (arg0) == COMPLEX_CST)
1818 return fold_convert (type, TREE_REALPART (arg0));
1819 break;
1821 case IMAGPART_EXPR:
1822 if (TREE_CODE (arg0) == COMPLEX_CST)
1823 return fold_convert (type, TREE_IMAGPART (arg0));
1824 break;
1826 case VEC_UNPACK_LO_EXPR:
1827 case VEC_UNPACK_HI_EXPR:
1828 case VEC_UNPACK_FLOAT_LO_EXPR:
1829 case VEC_UNPACK_FLOAT_HI_EXPR:
1830 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1831 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1833 unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1834 enum tree_code subcode;
1836 if (TREE_CODE (arg0) != VECTOR_CST)
1837 return NULL_TREE;
1839 if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1840 return NULL_TREE;
1841 out_nelts = in_nelts / 2;
1842 gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1844 unsigned int offset = 0;
1845 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1846 || code == VEC_UNPACK_FLOAT_LO_EXPR
1847 || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1848 offset = out_nelts;
1850 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1851 subcode = NOP_EXPR;
1852 else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1853 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1854 subcode = FLOAT_EXPR;
1855 else
1856 subcode = FIX_TRUNC_EXPR;
1858 tree_vector_builder elts (type, out_nelts, 1);
1859 for (i = 0; i < out_nelts; i++)
1861 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1862 VECTOR_CST_ELT (arg0, i + offset));
1863 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1864 return NULL_TREE;
1865 elts.quick_push (elt);
1868 return elts.build ();
1871 case VEC_DUPLICATE_EXPR:
1872 if (CONSTANT_CLASS_P (arg0))
1873 return build_vector_from_val (type, arg0);
1874 return NULL_TREE;
1876 default:
1877 break;
1880 return NULL_TREE;
1883 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1884 indicates which particular sizetype to create. */
1886 tree
1887 size_int_kind (poly_int64 number, enum size_type_kind kind)
1889 return build_int_cst (sizetype_tab[(int) kind], number);
1892 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1893 is a tree code. The type of the result is taken from the operands.
1894 Both must be equivalent integer types, ala int_binop_types_match_p.
1895 If the operands are constant, so is the result. */
1897 tree
1898 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1900 tree type = TREE_TYPE (arg0);
1902 if (arg0 == error_mark_node || arg1 == error_mark_node)
1903 return error_mark_node;
1905 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1906 TREE_TYPE (arg1)));
1908 /* Handle the special case of two poly_int constants faster. */
1909 if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1911 /* And some specific cases even faster than that. */
1912 if (code == PLUS_EXPR)
1914 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1915 return arg1;
1916 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1917 return arg0;
1919 else if (code == MINUS_EXPR)
1921 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1922 return arg0;
1924 else if (code == MULT_EXPR)
1926 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1927 return arg1;
1930 /* Handle general case of two integer constants. For sizetype
1931 constant calculations we always want to know about overflow,
1932 even in the unsigned case. */
1933 tree res = int_const_binop (code, arg0, arg1, -1);
1934 if (res != NULL_TREE)
1935 return res;
1938 return fold_build2_loc (loc, code, type, arg0, arg1);
1941 /* Given two values, either both of sizetype or both of bitsizetype,
1942 compute the difference between the two values. Return the value
1943 in signed type corresponding to the type of the operands. */
1945 tree
1946 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1948 tree type = TREE_TYPE (arg0);
1949 tree ctype;
1951 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1952 TREE_TYPE (arg1)));
1954 /* If the type is already signed, just do the simple thing. */
1955 if (!TYPE_UNSIGNED (type))
1956 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1958 if (type == sizetype)
1959 ctype = ssizetype;
1960 else if (type == bitsizetype)
1961 ctype = sbitsizetype;
1962 else
1963 ctype = signed_type_for (type);
1965 /* If either operand is not a constant, do the conversions to the signed
1966 type and subtract. The hardware will do the right thing with any
1967 overflow in the subtraction. */
1968 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1969 return size_binop_loc (loc, MINUS_EXPR,
1970 fold_convert_loc (loc, ctype, arg0),
1971 fold_convert_loc (loc, ctype, arg1));
1973 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1974 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1975 overflow) and negate (which can't either). Special-case a result
1976 of zero while we're here. */
1977 if (tree_int_cst_equal (arg0, arg1))
1978 return build_int_cst (ctype, 0);
1979 else if (tree_int_cst_lt (arg1, arg0))
1980 return fold_convert_loc (loc, ctype,
1981 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1982 else
1983 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1984 fold_convert_loc (loc, ctype,
1985 size_binop_loc (loc,
1986 MINUS_EXPR,
1987 arg1, arg0)));
1990 /* A subroutine of fold_convert_const handling conversions of an
1991 INTEGER_CST to another integer type. */
1993 static tree
1994 fold_convert_const_int_from_int (tree type, const_tree arg1)
1996 /* Given an integer constant, make new constant with new type,
1997 appropriately sign-extended or truncated. Use widest_int
1998 so that any extension is done according ARG1's type. */
1999 return force_fit_type (type, wi::to_widest (arg1),
2000 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2001 TREE_OVERFLOW (arg1));
2004 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2005 to an integer type. */
2007 static tree
2008 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2010 bool overflow = false;
2011 tree t;
2013 /* The following code implements the floating point to integer
2014 conversion rules required by the Java Language Specification,
2015 that IEEE NaNs are mapped to zero and values that overflow
2016 the target precision saturate, i.e. values greater than
2017 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2018 are mapped to INT_MIN. These semantics are allowed by the
2019 C and C++ standards that simply state that the behavior of
2020 FP-to-integer conversion is unspecified upon overflow. */
2022 wide_int val;
2023 REAL_VALUE_TYPE r;
2024 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2026 switch (code)
2028 case FIX_TRUNC_EXPR:
2029 real_trunc (&r, VOIDmode, &x);
2030 break;
2032 default:
2033 gcc_unreachable ();
2036 /* If R is NaN, return zero and show we have an overflow. */
2037 if (REAL_VALUE_ISNAN (r))
2039 overflow = true;
2040 val = wi::zero (TYPE_PRECISION (type));
2043 /* See if R is less than the lower bound or greater than the
2044 upper bound. */
2046 if (! overflow)
2048 tree lt = TYPE_MIN_VALUE (type);
2049 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2050 if (real_less (&r, &l))
2052 overflow = true;
2053 val = wi::to_wide (lt);
2057 if (! overflow)
2059 tree ut = TYPE_MAX_VALUE (type);
2060 if (ut)
2062 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2063 if (real_less (&u, &r))
2065 overflow = true;
2066 val = wi::to_wide (ut);
2071 if (! overflow)
2072 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2074 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2075 return t;
2078 /* A subroutine of fold_convert_const handling conversions of a
2079 FIXED_CST to an integer type. */
2081 static tree
2082 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2084 tree t;
2085 double_int temp, temp_trunc;
2086 scalar_mode mode;
2088 /* Right shift FIXED_CST to temp by fbit. */
2089 temp = TREE_FIXED_CST (arg1).data;
2090 mode = TREE_FIXED_CST (arg1).mode;
2091 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2093 temp = temp.rshift (GET_MODE_FBIT (mode),
2094 HOST_BITS_PER_DOUBLE_INT,
2095 SIGNED_FIXED_POINT_MODE_P (mode));
2097 /* Left shift temp to temp_trunc by fbit. */
2098 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2099 HOST_BITS_PER_DOUBLE_INT,
2100 SIGNED_FIXED_POINT_MODE_P (mode));
2102 else
2104 temp = double_int_zero;
2105 temp_trunc = double_int_zero;
2108 /* If FIXED_CST is negative, we need to round the value toward 0.
2109 By checking if the fractional bits are not zero to add 1 to temp. */
2110 if (SIGNED_FIXED_POINT_MODE_P (mode)
2111 && temp_trunc.is_negative ()
2112 && TREE_FIXED_CST (arg1).data != temp_trunc)
2113 temp += double_int_one;
2115 /* Given a fixed-point constant, make new constant with new type,
2116 appropriately sign-extended or truncated. */
2117 t = force_fit_type (type, temp, -1,
2118 (temp.is_negative ()
2119 && (TYPE_UNSIGNED (type)
2120 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2121 | TREE_OVERFLOW (arg1));
2123 return t;
2126 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2127 to another floating point type. */
2129 static tree
2130 fold_convert_const_real_from_real (tree type, const_tree arg1)
2132 REAL_VALUE_TYPE value;
2133 tree t;
2135 /* Don't perform the operation if flag_signaling_nans is on
2136 and the operand is a signaling NaN. */
2137 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2138 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2139 return NULL_TREE;
2141 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2142 t = build_real (type, value);
2144 /* If converting an infinity or NAN to a representation that doesn't
2145 have one, set the overflow bit so that we can produce some kind of
2146 error message at the appropriate point if necessary. It's not the
2147 most user-friendly message, but it's better than nothing. */
2148 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2149 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2150 TREE_OVERFLOW (t) = 1;
2151 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2152 && !MODE_HAS_NANS (TYPE_MODE (type)))
2153 TREE_OVERFLOW (t) = 1;
2154 /* Regular overflow, conversion produced an infinity in a mode that
2155 can't represent them. */
2156 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2157 && REAL_VALUE_ISINF (value)
2158 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2159 TREE_OVERFLOW (t) = 1;
2160 else
2161 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2162 return t;
2165 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2166 to a floating point type. */
2168 static tree
2169 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2171 REAL_VALUE_TYPE value;
2172 tree t;
2174 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2175 &TREE_FIXED_CST (arg1));
2176 t = build_real (type, value);
2178 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2179 return t;
2182 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2183 to another fixed-point type. */
2185 static tree
2186 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2188 FIXED_VALUE_TYPE value;
2189 tree t;
2190 bool overflow_p;
2192 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2193 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2194 t = build_fixed (type, value);
2196 /* Propagate overflow flags. */
2197 if (overflow_p | TREE_OVERFLOW (arg1))
2198 TREE_OVERFLOW (t) = 1;
2199 return t;
2202 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2203 to a fixed-point type. */
2205 static tree
2206 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2208 FIXED_VALUE_TYPE value;
2209 tree t;
2210 bool overflow_p;
2211 double_int di;
2213 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2215 di.low = TREE_INT_CST_ELT (arg1, 0);
2216 if (TREE_INT_CST_NUNITS (arg1) == 1)
2217 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2218 else
2219 di.high = TREE_INT_CST_ELT (arg1, 1);
2221 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2222 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2223 TYPE_SATURATING (type));
2224 t = build_fixed (type, value);
2226 /* Propagate overflow flags. */
2227 if (overflow_p | TREE_OVERFLOW (arg1))
2228 TREE_OVERFLOW (t) = 1;
2229 return t;
2232 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2233 to a fixed-point type. */
2235 static tree
2236 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2238 FIXED_VALUE_TYPE value;
2239 tree t;
2240 bool overflow_p;
2242 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2243 &TREE_REAL_CST (arg1),
2244 TYPE_SATURATING (type));
2245 t = build_fixed (type, value);
2247 /* Propagate overflow flags. */
2248 if (overflow_p | TREE_OVERFLOW (arg1))
2249 TREE_OVERFLOW (t) = 1;
2250 return t;
2253 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2254 type TYPE. If no simplification can be done return NULL_TREE. */
2256 static tree
2257 fold_convert_const (enum tree_code code, tree type, tree arg1)
2259 tree arg_type = TREE_TYPE (arg1);
2260 if (arg_type == type)
2261 return arg1;
2263 /* We can't widen types, since the runtime value could overflow the
2264 original type before being extended to the new type. */
2265 if (POLY_INT_CST_P (arg1)
2266 && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2267 && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2268 return build_poly_int_cst (type,
2269 poly_wide_int::from (poly_int_cst_value (arg1),
2270 TYPE_PRECISION (type),
2271 TYPE_SIGN (arg_type)));
2273 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2274 || TREE_CODE (type) == OFFSET_TYPE)
2276 if (TREE_CODE (arg1) == INTEGER_CST)
2277 return fold_convert_const_int_from_int (type, arg1);
2278 else if (TREE_CODE (arg1) == REAL_CST)
2279 return fold_convert_const_int_from_real (code, type, arg1);
2280 else if (TREE_CODE (arg1) == FIXED_CST)
2281 return fold_convert_const_int_from_fixed (type, arg1);
2283 else if (TREE_CODE (type) == REAL_TYPE)
2285 if (TREE_CODE (arg1) == INTEGER_CST)
2286 return build_real_from_int_cst (type, arg1);
2287 else if (TREE_CODE (arg1) == REAL_CST)
2288 return fold_convert_const_real_from_real (type, arg1);
2289 else if (TREE_CODE (arg1) == FIXED_CST)
2290 return fold_convert_const_real_from_fixed (type, arg1);
2292 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2294 if (TREE_CODE (arg1) == FIXED_CST)
2295 return fold_convert_const_fixed_from_fixed (type, arg1);
2296 else if (TREE_CODE (arg1) == INTEGER_CST)
2297 return fold_convert_const_fixed_from_int (type, arg1);
2298 else if (TREE_CODE (arg1) == REAL_CST)
2299 return fold_convert_const_fixed_from_real (type, arg1);
2301 else if (TREE_CODE (type) == VECTOR_TYPE)
2303 if (TREE_CODE (arg1) == VECTOR_CST
2304 && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2306 tree elttype = TREE_TYPE (type);
2307 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2308 /* We can't handle steps directly when extending, since the
2309 values need to wrap at the original precision first. */
2310 bool step_ok_p
2311 = (INTEGRAL_TYPE_P (elttype)
2312 && INTEGRAL_TYPE_P (arg1_elttype)
2313 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2314 tree_vector_builder v;
2315 if (!v.new_unary_operation (type, arg1, step_ok_p))
2316 return NULL_TREE;
2317 unsigned int len = v.encoded_nelts ();
2318 for (unsigned int i = 0; i < len; ++i)
2320 tree elt = VECTOR_CST_ELT (arg1, i);
2321 tree cvt = fold_convert_const (code, elttype, elt);
2322 if (cvt == NULL_TREE)
2323 return NULL_TREE;
2324 v.quick_push (cvt);
2326 return v.build ();
2329 return NULL_TREE;
2332 /* Construct a vector of zero elements of vector type TYPE. */
2334 static tree
2335 build_zero_vector (tree type)
2337 tree t;
2339 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2340 return build_vector_from_val (type, t);
2343 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2345 bool
2346 fold_convertible_p (const_tree type, const_tree arg)
2348 tree orig = TREE_TYPE (arg);
2350 if (type == orig)
2351 return true;
2353 if (TREE_CODE (arg) == ERROR_MARK
2354 || TREE_CODE (type) == ERROR_MARK
2355 || TREE_CODE (orig) == ERROR_MARK)
2356 return false;
2358 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2359 return true;
2361 switch (TREE_CODE (type))
2363 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2364 case POINTER_TYPE: case REFERENCE_TYPE:
2365 case OFFSET_TYPE:
2366 return (INTEGRAL_TYPE_P (orig)
2367 || (POINTER_TYPE_P (orig)
2368 && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2369 || TREE_CODE (orig) == OFFSET_TYPE);
2371 case REAL_TYPE:
2372 case FIXED_POINT_TYPE:
2373 case VECTOR_TYPE:
2374 case VOID_TYPE:
2375 return TREE_CODE (type) == TREE_CODE (orig);
2377 default:
2378 return false;
2382 /* Convert expression ARG to type TYPE. Used by the middle-end for
2383 simple conversions in preference to calling the front-end's convert. */
2385 tree
2386 fold_convert_loc (location_t loc, tree type, tree arg)
2388 tree orig = TREE_TYPE (arg);
2389 tree tem;
2391 if (type == orig)
2392 return arg;
2394 if (TREE_CODE (arg) == ERROR_MARK
2395 || TREE_CODE (type) == ERROR_MARK
2396 || TREE_CODE (orig) == ERROR_MARK)
2397 return error_mark_node;
2399 switch (TREE_CODE (type))
2401 case POINTER_TYPE:
2402 case REFERENCE_TYPE:
2403 /* Handle conversions between pointers to different address spaces. */
2404 if (POINTER_TYPE_P (orig)
2405 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2406 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2407 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2408 /* fall through */
2410 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2411 case OFFSET_TYPE:
2412 if (TREE_CODE (arg) == INTEGER_CST)
2414 tem = fold_convert_const (NOP_EXPR, type, arg);
2415 if (tem != NULL_TREE)
2416 return tem;
2418 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2419 || TREE_CODE (orig) == OFFSET_TYPE)
2420 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2421 if (TREE_CODE (orig) == COMPLEX_TYPE)
2422 return fold_convert_loc (loc, type,
2423 fold_build1_loc (loc, REALPART_EXPR,
2424 TREE_TYPE (orig), arg));
2425 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2426 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2427 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2429 case REAL_TYPE:
2430 if (TREE_CODE (arg) == INTEGER_CST)
2432 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2433 if (tem != NULL_TREE)
2434 return tem;
2436 else if (TREE_CODE (arg) == REAL_CST)
2438 tem = fold_convert_const (NOP_EXPR, type, arg);
2439 if (tem != NULL_TREE)
2440 return tem;
2442 else if (TREE_CODE (arg) == FIXED_CST)
2444 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2445 if (tem != NULL_TREE)
2446 return tem;
2449 switch (TREE_CODE (orig))
2451 case INTEGER_TYPE:
2452 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2453 case POINTER_TYPE: case REFERENCE_TYPE:
2454 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2456 case REAL_TYPE:
2457 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2459 case FIXED_POINT_TYPE:
2460 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2462 case COMPLEX_TYPE:
2463 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2464 return fold_convert_loc (loc, type, tem);
2466 default:
2467 gcc_unreachable ();
2470 case FIXED_POINT_TYPE:
2471 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2472 || TREE_CODE (arg) == REAL_CST)
2474 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2475 if (tem != NULL_TREE)
2476 goto fold_convert_exit;
2479 switch (TREE_CODE (orig))
2481 case FIXED_POINT_TYPE:
2482 case INTEGER_TYPE:
2483 case ENUMERAL_TYPE:
2484 case BOOLEAN_TYPE:
2485 case REAL_TYPE:
2486 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2488 case COMPLEX_TYPE:
2489 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2490 return fold_convert_loc (loc, type, tem);
2492 default:
2493 gcc_unreachable ();
2496 case COMPLEX_TYPE:
2497 switch (TREE_CODE (orig))
2499 case INTEGER_TYPE:
2500 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2501 case POINTER_TYPE: case REFERENCE_TYPE:
2502 case REAL_TYPE:
2503 case FIXED_POINT_TYPE:
2504 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2505 fold_convert_loc (loc, TREE_TYPE (type), arg),
2506 fold_convert_loc (loc, TREE_TYPE (type),
2507 integer_zero_node));
2508 case COMPLEX_TYPE:
2510 tree rpart, ipart;
2512 if (TREE_CODE (arg) == COMPLEX_EXPR)
2514 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2515 TREE_OPERAND (arg, 0));
2516 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2517 TREE_OPERAND (arg, 1));
2518 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2521 arg = save_expr (arg);
2522 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2523 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2524 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2525 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2526 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2529 default:
2530 gcc_unreachable ();
2533 case VECTOR_TYPE:
2534 if (integer_zerop (arg))
2535 return build_zero_vector (type);
2536 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2537 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2538 || TREE_CODE (orig) == VECTOR_TYPE);
2539 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2541 case VOID_TYPE:
2542 tem = fold_ignored_result (arg);
2543 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2545 default:
2546 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2547 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2548 gcc_unreachable ();
2550 fold_convert_exit:
2551 protected_set_expr_location_unshare (tem, loc);
2552 return tem;
2555 /* Return false if expr can be assumed not to be an lvalue, true
2556 otherwise. */
2558 static bool
2559 maybe_lvalue_p (const_tree x)
2561 /* We only need to wrap lvalue tree codes. */
2562 switch (TREE_CODE (x))
2564 case VAR_DECL:
2565 case PARM_DECL:
2566 case RESULT_DECL:
2567 case LABEL_DECL:
2568 case FUNCTION_DECL:
2569 case SSA_NAME:
2571 case COMPONENT_REF:
2572 case MEM_REF:
2573 case INDIRECT_REF:
2574 case ARRAY_REF:
2575 case ARRAY_RANGE_REF:
2576 case BIT_FIELD_REF:
2577 case OBJ_TYPE_REF:
2579 case REALPART_EXPR:
2580 case IMAGPART_EXPR:
2581 case PREINCREMENT_EXPR:
2582 case PREDECREMENT_EXPR:
2583 case SAVE_EXPR:
2584 case TRY_CATCH_EXPR:
2585 case WITH_CLEANUP_EXPR:
2586 case COMPOUND_EXPR:
2587 case MODIFY_EXPR:
2588 case TARGET_EXPR:
2589 case COND_EXPR:
2590 case BIND_EXPR:
2591 break;
2593 default:
2594 /* Assume the worst for front-end tree codes. */
2595 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2596 break;
2597 return false;
2600 return true;
2603 /* Return an expr equal to X but certainly not valid as an lvalue. */
2605 tree
2606 non_lvalue_loc (location_t loc, tree x)
2608 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2609 us. */
2610 if (in_gimple_form)
2611 return x;
2613 if (! maybe_lvalue_p (x))
2614 return x;
2615 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2618 /* When pedantic, return an expr equal to X but certainly not valid as a
2619 pedantic lvalue. Otherwise, return X. */
2621 static tree
2622 pedantic_non_lvalue_loc (location_t loc, tree x)
2624 return protected_set_expr_location_unshare (x, loc);
2627 /* Given a tree comparison code, return the code that is the logical inverse.
2628 It is generally not safe to do this for floating-point comparisons, except
2629 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2630 ERROR_MARK in this case. */
2632 enum tree_code
2633 invert_tree_comparison (enum tree_code code, bool honor_nans)
2635 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2636 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2637 return ERROR_MARK;
2639 switch (code)
2641 case EQ_EXPR:
2642 return NE_EXPR;
2643 case NE_EXPR:
2644 return EQ_EXPR;
2645 case GT_EXPR:
2646 return honor_nans ? UNLE_EXPR : LE_EXPR;
2647 case GE_EXPR:
2648 return honor_nans ? UNLT_EXPR : LT_EXPR;
2649 case LT_EXPR:
2650 return honor_nans ? UNGE_EXPR : GE_EXPR;
2651 case LE_EXPR:
2652 return honor_nans ? UNGT_EXPR : GT_EXPR;
2653 case LTGT_EXPR:
2654 return UNEQ_EXPR;
2655 case UNEQ_EXPR:
2656 return LTGT_EXPR;
2657 case UNGT_EXPR:
2658 return LE_EXPR;
2659 case UNGE_EXPR:
2660 return LT_EXPR;
2661 case UNLT_EXPR:
2662 return GE_EXPR;
2663 case UNLE_EXPR:
2664 return GT_EXPR;
2665 case ORDERED_EXPR:
2666 return UNORDERED_EXPR;
2667 case UNORDERED_EXPR:
2668 return ORDERED_EXPR;
2669 default:
2670 gcc_unreachable ();
2674 /* Similar, but return the comparison that results if the operands are
2675 swapped. This is safe for floating-point. */
2677 enum tree_code
2678 swap_tree_comparison (enum tree_code code)
2680 switch (code)
2682 case EQ_EXPR:
2683 case NE_EXPR:
2684 case ORDERED_EXPR:
2685 case UNORDERED_EXPR:
2686 case LTGT_EXPR:
2687 case UNEQ_EXPR:
2688 return code;
2689 case GT_EXPR:
2690 return LT_EXPR;
2691 case GE_EXPR:
2692 return LE_EXPR;
2693 case LT_EXPR:
2694 return GT_EXPR;
2695 case LE_EXPR:
2696 return GE_EXPR;
2697 case UNGT_EXPR:
2698 return UNLT_EXPR;
2699 case UNGE_EXPR:
2700 return UNLE_EXPR;
2701 case UNLT_EXPR:
2702 return UNGT_EXPR;
2703 case UNLE_EXPR:
2704 return UNGE_EXPR;
2705 default:
2706 gcc_unreachable ();
2711 /* Convert a comparison tree code from an enum tree_code representation
2712 into a compcode bit-based encoding. This function is the inverse of
2713 compcode_to_comparison. */
2715 static enum comparison_code
2716 comparison_to_compcode (enum tree_code code)
2718 switch (code)
2720 case LT_EXPR:
2721 return COMPCODE_LT;
2722 case EQ_EXPR:
2723 return COMPCODE_EQ;
2724 case LE_EXPR:
2725 return COMPCODE_LE;
2726 case GT_EXPR:
2727 return COMPCODE_GT;
2728 case NE_EXPR:
2729 return COMPCODE_NE;
2730 case GE_EXPR:
2731 return COMPCODE_GE;
2732 case ORDERED_EXPR:
2733 return COMPCODE_ORD;
2734 case UNORDERED_EXPR:
2735 return COMPCODE_UNORD;
2736 case UNLT_EXPR:
2737 return COMPCODE_UNLT;
2738 case UNEQ_EXPR:
2739 return COMPCODE_UNEQ;
2740 case UNLE_EXPR:
2741 return COMPCODE_UNLE;
2742 case UNGT_EXPR:
2743 return COMPCODE_UNGT;
2744 case LTGT_EXPR:
2745 return COMPCODE_LTGT;
2746 case UNGE_EXPR:
2747 return COMPCODE_UNGE;
2748 default:
2749 gcc_unreachable ();
2753 /* Convert a compcode bit-based encoding of a comparison operator back
2754 to GCC's enum tree_code representation. This function is the
2755 inverse of comparison_to_compcode. */
2757 static enum tree_code
2758 compcode_to_comparison (enum comparison_code code)
2760 switch (code)
2762 case COMPCODE_LT:
2763 return LT_EXPR;
2764 case COMPCODE_EQ:
2765 return EQ_EXPR;
2766 case COMPCODE_LE:
2767 return LE_EXPR;
2768 case COMPCODE_GT:
2769 return GT_EXPR;
2770 case COMPCODE_NE:
2771 return NE_EXPR;
2772 case COMPCODE_GE:
2773 return GE_EXPR;
2774 case COMPCODE_ORD:
2775 return ORDERED_EXPR;
2776 case COMPCODE_UNORD:
2777 return UNORDERED_EXPR;
2778 case COMPCODE_UNLT:
2779 return UNLT_EXPR;
2780 case COMPCODE_UNEQ:
2781 return UNEQ_EXPR;
2782 case COMPCODE_UNLE:
2783 return UNLE_EXPR;
2784 case COMPCODE_UNGT:
2785 return UNGT_EXPR;
2786 case COMPCODE_LTGT:
2787 return LTGT_EXPR;
2788 case COMPCODE_UNGE:
2789 return UNGE_EXPR;
2790 default:
2791 gcc_unreachable ();
2795 /* Return true if COND1 tests the opposite condition of COND2. */
2797 bool
2798 inverse_conditions_p (const_tree cond1, const_tree cond2)
2800 return (COMPARISON_CLASS_P (cond1)
2801 && COMPARISON_CLASS_P (cond2)
2802 && (invert_tree_comparison
2803 (TREE_CODE (cond1),
2804 HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2805 && operand_equal_p (TREE_OPERAND (cond1, 0),
2806 TREE_OPERAND (cond2, 0), 0)
2807 && operand_equal_p (TREE_OPERAND (cond1, 1),
2808 TREE_OPERAND (cond2, 1), 0));
2811 /* Return a tree for the comparison which is the combination of
2812 doing the AND or OR (depending on CODE) of the two operations LCODE
2813 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2814 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2815 if this makes the transformation invalid. */
2817 tree
2818 combine_comparisons (location_t loc,
2819 enum tree_code code, enum tree_code lcode,
2820 enum tree_code rcode, tree truth_type,
2821 tree ll_arg, tree lr_arg)
2823 bool honor_nans = HONOR_NANS (ll_arg);
2824 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2825 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2826 int compcode;
2828 switch (code)
2830 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2831 compcode = lcompcode & rcompcode;
2832 break;
2834 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2835 compcode = lcompcode | rcompcode;
2836 break;
2838 default:
2839 return NULL_TREE;
2842 if (!honor_nans)
2844 /* Eliminate unordered comparisons, as well as LTGT and ORD
2845 which are not used unless the mode has NaNs. */
2846 compcode &= ~COMPCODE_UNORD;
2847 if (compcode == COMPCODE_LTGT)
2848 compcode = COMPCODE_NE;
2849 else if (compcode == COMPCODE_ORD)
2850 compcode = COMPCODE_TRUE;
2852 else if (flag_trapping_math)
2854 /* Check that the original operation and the optimized ones will trap
2855 under the same condition. */
2856 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2857 && (lcompcode != COMPCODE_EQ)
2858 && (lcompcode != COMPCODE_ORD);
2859 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2860 && (rcompcode != COMPCODE_EQ)
2861 && (rcompcode != COMPCODE_ORD);
2862 bool trap = (compcode & COMPCODE_UNORD) == 0
2863 && (compcode != COMPCODE_EQ)
2864 && (compcode != COMPCODE_ORD);
2866 /* In a short-circuited boolean expression the LHS might be
2867 such that the RHS, if evaluated, will never trap. For
2868 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2869 if neither x nor y is NaN. (This is a mixed blessing: for
2870 example, the expression above will never trap, hence
2871 optimizing it to x < y would be invalid). */
2872 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2873 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2874 rtrap = false;
2876 /* If the comparison was short-circuited, and only the RHS
2877 trapped, we may now generate a spurious trap. */
2878 if (rtrap && !ltrap
2879 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2880 return NULL_TREE;
2882 /* If we changed the conditions that cause a trap, we lose. */
2883 if ((ltrap || rtrap) != trap)
2884 return NULL_TREE;
2887 if (compcode == COMPCODE_TRUE)
2888 return constant_boolean_node (true, truth_type);
2889 else if (compcode == COMPCODE_FALSE)
2890 return constant_boolean_node (false, truth_type);
2891 else
2893 enum tree_code tcode;
2895 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2896 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2900 /* Return nonzero if two operands (typically of the same tree node)
2901 are necessarily equal. FLAGS modifies behavior as follows:
2903 If OEP_ONLY_CONST is set, only return nonzero for constants.
2904 This function tests whether the operands are indistinguishable;
2905 it does not test whether they are equal using C's == operation.
2906 The distinction is important for IEEE floating point, because
2907 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2908 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2910 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2911 even though it may hold multiple values during a function.
2912 This is because a GCC tree node guarantees that nothing else is
2913 executed between the evaluation of its "operands" (which may often
2914 be evaluated in arbitrary order). Hence if the operands themselves
2915 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2916 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2917 unset means assuming isochronic (or instantaneous) tree equivalence.
2918 Unless comparing arbitrary expression trees, such as from different
2919 statements, this flag can usually be left unset.
2921 If OEP_PURE_SAME is set, then pure functions with identical arguments
2922 are considered the same. It is used when the caller has other ways
2923 to ensure that global memory is unchanged in between.
2925 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2926 not values of expressions.
2928 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2929 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2931 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2932 any operand with side effect. This is unnecesarily conservative in the
2933 case we know that arg0 and arg1 are in disjoint code paths (such as in
2934 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2935 addresses with TREE_CONSTANT flag set so we know that &var == &var
2936 even if var is volatile. */
2939 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2941 /* When checking, verify at the outermost operand_equal_p call that
2942 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2943 hash value. */
2944 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2946 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2948 if (arg0 != arg1)
2950 inchash::hash hstate0 (0), hstate1 (0);
2951 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2952 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2953 hashval_t h0 = hstate0.end ();
2954 hashval_t h1 = hstate1.end ();
2955 gcc_assert (h0 == h1);
2957 return 1;
2959 else
2960 return 0;
2963 /* If either is ERROR_MARK, they aren't equal. */
2964 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2965 || TREE_TYPE (arg0) == error_mark_node
2966 || TREE_TYPE (arg1) == error_mark_node)
2967 return 0;
2969 /* Similar, if either does not have a type (like a released SSA name),
2970 they aren't equal. */
2971 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2972 return 0;
2974 /* We cannot consider pointers to different address space equal. */
2975 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2976 && POINTER_TYPE_P (TREE_TYPE (arg1))
2977 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2978 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2979 return 0;
2981 /* Check equality of integer constants before bailing out due to
2982 precision differences. */
2983 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2985 /* Address of INTEGER_CST is not defined; check that we did not forget
2986 to drop the OEP_ADDRESS_OF flags. */
2987 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2988 return tree_int_cst_equal (arg0, arg1);
2991 if (!(flags & OEP_ADDRESS_OF))
2993 /* If both types don't have the same signedness, then we can't consider
2994 them equal. We must check this before the STRIP_NOPS calls
2995 because they may change the signedness of the arguments. As pointers
2996 strictly don't have a signedness, require either two pointers or
2997 two non-pointers as well. */
2998 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2999 || POINTER_TYPE_P (TREE_TYPE (arg0))
3000 != POINTER_TYPE_P (TREE_TYPE (arg1)))
3001 return 0;
3003 /* If both types don't have the same precision, then it is not safe
3004 to strip NOPs. */
3005 if (element_precision (TREE_TYPE (arg0))
3006 != element_precision (TREE_TYPE (arg1)))
3007 return 0;
3009 STRIP_NOPS (arg0);
3010 STRIP_NOPS (arg1);
3012 #if 0
3013 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3014 sanity check once the issue is solved. */
3015 else
3016 /* Addresses of conversions and SSA_NAMEs (and many other things)
3017 are not defined. Check that we did not forget to drop the
3018 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
3019 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3020 && TREE_CODE (arg0) != SSA_NAME);
3021 #endif
3023 /* In case both args are comparisons but with different comparison
3024 code, try to swap the comparison operands of one arg to produce
3025 a match and compare that variant. */
3026 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3027 && COMPARISON_CLASS_P (arg0)
3028 && COMPARISON_CLASS_P (arg1))
3030 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3032 if (TREE_CODE (arg0) == swap_code)
3033 return operand_equal_p (TREE_OPERAND (arg0, 0),
3034 TREE_OPERAND (arg1, 1), flags)
3035 && operand_equal_p (TREE_OPERAND (arg0, 1),
3036 TREE_OPERAND (arg1, 0), flags);
3039 if (TREE_CODE (arg0) != TREE_CODE (arg1))
3041 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
3042 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3044 else if (flags & OEP_ADDRESS_OF)
3046 /* If we are interested in comparing addresses ignore
3047 MEM_REF wrappings of the base that can appear just for
3048 TBAA reasons. */
3049 if (TREE_CODE (arg0) == MEM_REF
3050 && DECL_P (arg1)
3051 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3052 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3053 && integer_zerop (TREE_OPERAND (arg0, 1)))
3054 return 1;
3055 else if (TREE_CODE (arg1) == MEM_REF
3056 && DECL_P (arg0)
3057 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3058 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3059 && integer_zerop (TREE_OPERAND (arg1, 1)))
3060 return 1;
3061 return 0;
3063 else
3064 return 0;
3067 /* When not checking adddresses, this is needed for conversions and for
3068 COMPONENT_REF. Might as well play it safe and always test this. */
3069 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3070 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3071 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3072 && !(flags & OEP_ADDRESS_OF)))
3073 return 0;
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below.
3082 If we are taking an invariant address of two identical objects
3083 they are necessarily equal as well. */
3084 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3085 && (TREE_CODE (arg0) == SAVE_EXPR
3086 || (flags & OEP_MATCH_SIDE_EFFECTS)
3087 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3088 return 1;
3090 /* Next handle constant cases, those for which we can return 1 even
3091 if ONLY_CONST is set. */
3092 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3093 switch (TREE_CODE (arg0))
3095 case INTEGER_CST:
3096 return tree_int_cst_equal (arg0, arg1);
3098 case FIXED_CST:
3099 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3100 TREE_FIXED_CST (arg1));
3102 case REAL_CST:
3103 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3104 return 1;
3107 if (!HONOR_SIGNED_ZEROS (arg0))
3109 /* If we do not distinguish between signed and unsigned zero,
3110 consider them equal. */
3111 if (real_zerop (arg0) && real_zerop (arg1))
3112 return 1;
3114 return 0;
3116 case VECTOR_CST:
3118 if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3119 != VECTOR_CST_LOG2_NPATTERNS (arg1))
3120 return 0;
3122 if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3123 != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3124 return 0;
3126 unsigned int count = vector_cst_encoded_nelts (arg0);
3127 for (unsigned int i = 0; i < count; ++i)
3128 if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3129 VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3130 return 0;
3131 return 1;
3134 case COMPLEX_CST:
3135 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3136 flags)
3137 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3138 flags));
3140 case STRING_CST:
3141 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3142 && ! memcmp (TREE_STRING_POINTER (arg0),
3143 TREE_STRING_POINTER (arg1),
3144 TREE_STRING_LENGTH (arg0)));
3146 case ADDR_EXPR:
3147 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3148 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3149 flags | OEP_ADDRESS_OF
3150 | OEP_MATCH_SIDE_EFFECTS);
3151 case CONSTRUCTOR:
3152 /* In GIMPLE empty constructors are allowed in initializers of
3153 aggregates. */
3154 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3155 default:
3156 break;
3159 if (flags & OEP_ONLY_CONST)
3160 return 0;
3162 /* Define macros to test an operand from arg0 and arg1 for equality and a
3163 variant that allows null and views null as being different from any
3164 non-null value. In the latter case, if either is null, the both
3165 must be; otherwise, do the normal comparison. */
3166 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3167 TREE_OPERAND (arg1, N), flags)
3169 #define OP_SAME_WITH_NULL(N) \
3170 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3171 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3173 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3175 case tcc_unary:
3176 /* Two conversions are equal only if signedness and modes match. */
3177 switch (TREE_CODE (arg0))
3179 CASE_CONVERT:
3180 case FIX_TRUNC_EXPR:
3181 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3182 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3183 return 0;
3184 break;
3185 default:
3186 break;
3189 return OP_SAME (0);
3192 case tcc_comparison:
3193 case tcc_binary:
3194 if (OP_SAME (0) && OP_SAME (1))
3195 return 1;
3197 /* For commutative ops, allow the other order. */
3198 return (commutative_tree_code (TREE_CODE (arg0))
3199 && operand_equal_p (TREE_OPERAND (arg0, 0),
3200 TREE_OPERAND (arg1, 1), flags)
3201 && operand_equal_p (TREE_OPERAND (arg0, 1),
3202 TREE_OPERAND (arg1, 0), flags));
3204 case tcc_reference:
3205 /* If either of the pointer (or reference) expressions we are
3206 dereferencing contain a side effect, these cannot be equal,
3207 but their addresses can be. */
3208 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3209 && (TREE_SIDE_EFFECTS (arg0)
3210 || TREE_SIDE_EFFECTS (arg1)))
3211 return 0;
3213 switch (TREE_CODE (arg0))
3215 case INDIRECT_REF:
3216 if (!(flags & OEP_ADDRESS_OF)
3217 && (TYPE_ALIGN (TREE_TYPE (arg0))
3218 != TYPE_ALIGN (TREE_TYPE (arg1))))
3219 return 0;
3220 flags &= ~OEP_ADDRESS_OF;
3221 return OP_SAME (0);
3223 case IMAGPART_EXPR:
3224 /* Require the same offset. */
3225 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3226 TYPE_SIZE (TREE_TYPE (arg1)),
3227 flags & ~OEP_ADDRESS_OF))
3228 return 0;
3230 /* Fallthru. */
3231 case REALPART_EXPR:
3232 case VIEW_CONVERT_EXPR:
3233 return OP_SAME (0);
3235 case TARGET_MEM_REF:
3236 case MEM_REF:
3237 if (!(flags & OEP_ADDRESS_OF))
3239 /* Require equal access sizes */
3240 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3241 && (!TYPE_SIZE (TREE_TYPE (arg0))
3242 || !TYPE_SIZE (TREE_TYPE (arg1))
3243 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3244 TYPE_SIZE (TREE_TYPE (arg1)),
3245 flags)))
3246 return 0;
3247 /* Verify that access happens in similar types. */
3248 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3249 return 0;
3250 /* Verify that accesses are TBAA compatible. */
3251 if (!alias_ptr_types_compatible_p
3252 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3253 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3254 || (MR_DEPENDENCE_CLIQUE (arg0)
3255 != MR_DEPENDENCE_CLIQUE (arg1))
3256 || (MR_DEPENDENCE_BASE (arg0)
3257 != MR_DEPENDENCE_BASE (arg1)))
3258 return 0;
3259 /* Verify that alignment is compatible. */
3260 if (TYPE_ALIGN (TREE_TYPE (arg0))
3261 != TYPE_ALIGN (TREE_TYPE (arg1)))
3262 return 0;
3264 flags &= ~OEP_ADDRESS_OF;
3265 return (OP_SAME (0) && OP_SAME (1)
3266 /* TARGET_MEM_REF require equal extra operands. */
3267 && (TREE_CODE (arg0) != TARGET_MEM_REF
3268 || (OP_SAME_WITH_NULL (2)
3269 && OP_SAME_WITH_NULL (3)
3270 && OP_SAME_WITH_NULL (4))));
3272 case ARRAY_REF:
3273 case ARRAY_RANGE_REF:
3274 if (!OP_SAME (0))
3275 return 0;
3276 flags &= ~OEP_ADDRESS_OF;
3277 /* Compare the array index by value if it is constant first as we
3278 may have different types but same value here. */
3279 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3280 TREE_OPERAND (arg1, 1))
3281 || OP_SAME (1))
3282 && OP_SAME_WITH_NULL (2)
3283 && OP_SAME_WITH_NULL (3)
3284 /* Compare low bound and element size as with OEP_ADDRESS_OF
3285 we have to account for the offset of the ref. */
3286 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3287 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3288 || (operand_equal_p (array_ref_low_bound
3289 (CONST_CAST_TREE (arg0)),
3290 array_ref_low_bound
3291 (CONST_CAST_TREE (arg1)), flags)
3292 && operand_equal_p (array_ref_element_size
3293 (CONST_CAST_TREE (arg0)),
3294 array_ref_element_size
3295 (CONST_CAST_TREE (arg1)),
3296 flags))));
3298 case COMPONENT_REF:
3299 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3300 may be NULL when we're called to compare MEM_EXPRs. */
3301 if (!OP_SAME_WITH_NULL (0)
3302 || !OP_SAME (1))
3303 return 0;
3304 flags &= ~OEP_ADDRESS_OF;
3305 return OP_SAME_WITH_NULL (2);
3307 case BIT_FIELD_REF:
3308 if (!OP_SAME (0))
3309 return 0;
3310 flags &= ~OEP_ADDRESS_OF;
3311 return OP_SAME (1) && OP_SAME (2);
3313 default:
3314 return 0;
3317 case tcc_expression:
3318 switch (TREE_CODE (arg0))
3320 case ADDR_EXPR:
3321 /* Be sure we pass right ADDRESS_OF flag. */
3322 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3323 return operand_equal_p (TREE_OPERAND (arg0, 0),
3324 TREE_OPERAND (arg1, 0),
3325 flags | OEP_ADDRESS_OF);
3327 case TRUTH_NOT_EXPR:
3328 return OP_SAME (0);
3330 case TRUTH_ANDIF_EXPR:
3331 case TRUTH_ORIF_EXPR:
3332 return OP_SAME (0) && OP_SAME (1);
3334 case WIDEN_MULT_PLUS_EXPR:
3335 case WIDEN_MULT_MINUS_EXPR:
3336 if (!OP_SAME (2))
3337 return 0;
3338 /* The multiplcation operands are commutative. */
3339 /* FALLTHRU */
3341 case TRUTH_AND_EXPR:
3342 case TRUTH_OR_EXPR:
3343 case TRUTH_XOR_EXPR:
3344 if (OP_SAME (0) && OP_SAME (1))
3345 return 1;
3347 /* Otherwise take into account this is a commutative operation. */
3348 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3349 TREE_OPERAND (arg1, 1), flags)
3350 && operand_equal_p (TREE_OPERAND (arg0, 1),
3351 TREE_OPERAND (arg1, 0), flags));
3353 case COND_EXPR:
3354 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3355 return 0;
3356 flags &= ~OEP_ADDRESS_OF;
3357 return OP_SAME (0);
3359 case BIT_INSERT_EXPR:
3360 /* BIT_INSERT_EXPR has an implict operand as the type precision
3361 of op1. Need to check to make sure they are the same. */
3362 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3363 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3364 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3365 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3366 return false;
3367 /* FALLTHRU */
3369 case VEC_COND_EXPR:
3370 case DOT_PROD_EXPR:
3371 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3373 case MODIFY_EXPR:
3374 case INIT_EXPR:
3375 case COMPOUND_EXPR:
3376 case PREDECREMENT_EXPR:
3377 case PREINCREMENT_EXPR:
3378 case POSTDECREMENT_EXPR:
3379 case POSTINCREMENT_EXPR:
3380 if (flags & OEP_LEXICOGRAPHIC)
3381 return OP_SAME (0) && OP_SAME (1);
3382 return 0;
3384 case CLEANUP_POINT_EXPR:
3385 case EXPR_STMT:
3386 case SAVE_EXPR:
3387 if (flags & OEP_LEXICOGRAPHIC)
3388 return OP_SAME (0);
3389 return 0;
3391 default:
3392 return 0;
3395 case tcc_vl_exp:
3396 switch (TREE_CODE (arg0))
3398 case CALL_EXPR:
3399 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3400 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3401 /* If not both CALL_EXPRs are either internal or normal function
3402 functions, then they are not equal. */
3403 return 0;
3404 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3406 /* If the CALL_EXPRs call different internal functions, then they
3407 are not equal. */
3408 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3409 return 0;
3411 else
3413 /* If the CALL_EXPRs call different functions, then they are not
3414 equal. */
3415 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3416 flags))
3417 return 0;
3420 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3422 unsigned int cef = call_expr_flags (arg0);
3423 if (flags & OEP_PURE_SAME)
3424 cef &= ECF_CONST | ECF_PURE;
3425 else
3426 cef &= ECF_CONST;
3427 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3428 return 0;
3431 /* Now see if all the arguments are the same. */
3433 const_call_expr_arg_iterator iter0, iter1;
3434 const_tree a0, a1;
3435 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3436 a1 = first_const_call_expr_arg (arg1, &iter1);
3437 a0 && a1;
3438 a0 = next_const_call_expr_arg (&iter0),
3439 a1 = next_const_call_expr_arg (&iter1))
3440 if (! operand_equal_p (a0, a1, flags))
3441 return 0;
3443 /* If we get here and both argument lists are exhausted
3444 then the CALL_EXPRs are equal. */
3445 return ! (a0 || a1);
3447 default:
3448 return 0;
3451 case tcc_declaration:
3452 /* Consider __builtin_sqrt equal to sqrt. */
3453 return (TREE_CODE (arg0) == FUNCTION_DECL
3454 && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3455 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3456 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3458 case tcc_exceptional:
3459 if (TREE_CODE (arg0) == CONSTRUCTOR)
3461 /* In GIMPLE constructors are used only to build vectors from
3462 elements. Individual elements in the constructor must be
3463 indexed in increasing order and form an initial sequence.
3465 We make no effort to compare constructors in generic.
3466 (see sem_variable::equals in ipa-icf which can do so for
3467 constants). */
3468 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3469 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3470 return 0;
3472 /* Be sure that vectors constructed have the same representation.
3473 We only tested element precision and modes to match.
3474 Vectors may be BLKmode and thus also check that the number of
3475 parts match. */
3476 if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3477 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3478 return 0;
3480 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3481 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3482 unsigned int len = vec_safe_length (v0);
3484 if (len != vec_safe_length (v1))
3485 return 0;
3487 for (unsigned int i = 0; i < len; i++)
3489 constructor_elt *c0 = &(*v0)[i];
3490 constructor_elt *c1 = &(*v1)[i];
3492 if (!operand_equal_p (c0->value, c1->value, flags)
3493 /* In GIMPLE the indexes can be either NULL or matching i.
3494 Double check this so we won't get false
3495 positives for GENERIC. */
3496 || (c0->index
3497 && (TREE_CODE (c0->index) != INTEGER_CST
3498 || !compare_tree_int (c0->index, i)))
3499 || (c1->index
3500 && (TREE_CODE (c1->index) != INTEGER_CST
3501 || !compare_tree_int (c1->index, i))))
3502 return 0;
3504 return 1;
3506 else if (TREE_CODE (arg0) == STATEMENT_LIST
3507 && (flags & OEP_LEXICOGRAPHIC))
3509 /* Compare the STATEMENT_LISTs. */
3510 tree_stmt_iterator tsi1, tsi2;
3511 tree body1 = CONST_CAST_TREE (arg0);
3512 tree body2 = CONST_CAST_TREE (arg1);
3513 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3514 tsi_next (&tsi1), tsi_next (&tsi2))
3516 /* The lists don't have the same number of statements. */
3517 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3518 return 0;
3519 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3520 return 1;
3521 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3522 flags & (OEP_LEXICOGRAPHIC
3523 | OEP_NO_HASH_CHECK)))
3524 return 0;
3527 return 0;
3529 case tcc_statement:
3530 switch (TREE_CODE (arg0))
3532 case RETURN_EXPR:
3533 if (flags & OEP_LEXICOGRAPHIC)
3534 return OP_SAME_WITH_NULL (0);
3535 return 0;
3536 case DEBUG_BEGIN_STMT:
3537 if (flags & OEP_LEXICOGRAPHIC)
3538 return 1;
3539 return 0;
3540 default:
3541 return 0;
3544 default:
3545 return 0;
3548 #undef OP_SAME
3549 #undef OP_SAME_WITH_NULL
3552 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3553 with a different signedness or a narrower precision. */
3555 static bool
3556 operand_equal_for_comparison_p (tree arg0, tree arg1)
3558 if (operand_equal_p (arg0, arg1, 0))
3559 return true;
3561 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3562 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3563 return false;
3565 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3566 and see if the inner values are the same. This removes any
3567 signedness comparison, which doesn't matter here. */
3568 tree op0 = arg0;
3569 tree op1 = arg1;
3570 STRIP_NOPS (op0);
3571 STRIP_NOPS (op1);
3572 if (operand_equal_p (op0, op1, 0))
3573 return true;
3575 /* Discard a single widening conversion from ARG1 and see if the inner
3576 value is the same as ARG0. */
3577 if (CONVERT_EXPR_P (arg1)
3578 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3579 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3580 < TYPE_PRECISION (TREE_TYPE (arg1))
3581 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3582 return true;
3584 return false;
3587 /* See if ARG is an expression that is either a comparison or is performing
3588 arithmetic on comparisons. The comparisons must only be comparing
3589 two different values, which will be stored in *CVAL1 and *CVAL2; if
3590 they are nonzero it means that some operands have already been found.
3591 No variables may be used anywhere else in the expression except in the
3592 comparisons.
3594 If this is true, return 1. Otherwise, return zero. */
3596 static int
3597 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3599 enum tree_code code = TREE_CODE (arg);
3600 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3602 /* We can handle some of the tcc_expression cases here. */
3603 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3604 tclass = tcc_unary;
3605 else if (tclass == tcc_expression
3606 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3607 || code == COMPOUND_EXPR))
3608 tclass = tcc_binary;
3610 switch (tclass)
3612 case tcc_unary:
3613 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3615 case tcc_binary:
3616 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3617 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3619 case tcc_constant:
3620 return 1;
3622 case tcc_expression:
3623 if (code == COND_EXPR)
3624 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3625 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3626 && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3627 return 0;
3629 case tcc_comparison:
3630 /* First see if we can handle the first operand, then the second. For
3631 the second operand, we know *CVAL1 can't be zero. It must be that
3632 one side of the comparison is each of the values; test for the
3633 case where this isn't true by failing if the two operands
3634 are the same. */
3636 if (operand_equal_p (TREE_OPERAND (arg, 0),
3637 TREE_OPERAND (arg, 1), 0))
3638 return 0;
3640 if (*cval1 == 0)
3641 *cval1 = TREE_OPERAND (arg, 0);
3642 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3644 else if (*cval2 == 0)
3645 *cval2 = TREE_OPERAND (arg, 0);
3646 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3648 else
3649 return 0;
3651 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3653 else if (*cval2 == 0)
3654 *cval2 = TREE_OPERAND (arg, 1);
3655 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3657 else
3658 return 0;
3660 return 1;
3662 default:
3663 return 0;
3667 /* ARG is a tree that is known to contain just arithmetic operations and
3668 comparisons. Evaluate the operations in the tree substituting NEW0 for
3669 any occurrence of OLD0 as an operand of a comparison and likewise for
3670 NEW1 and OLD1. */
3672 static tree
3673 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3674 tree old1, tree new1)
3676 tree type = TREE_TYPE (arg);
3677 enum tree_code code = TREE_CODE (arg);
3678 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3680 /* We can handle some of the tcc_expression cases here. */
3681 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3682 tclass = tcc_unary;
3683 else if (tclass == tcc_expression
3684 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3685 tclass = tcc_binary;
3687 switch (tclass)
3689 case tcc_unary:
3690 return fold_build1_loc (loc, code, type,
3691 eval_subst (loc, TREE_OPERAND (arg, 0),
3692 old0, new0, old1, new1));
3694 case tcc_binary:
3695 return fold_build2_loc (loc, code, type,
3696 eval_subst (loc, TREE_OPERAND (arg, 0),
3697 old0, new0, old1, new1),
3698 eval_subst (loc, TREE_OPERAND (arg, 1),
3699 old0, new0, old1, new1));
3701 case tcc_expression:
3702 switch (code)
3704 case SAVE_EXPR:
3705 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3706 old1, new1);
3708 case COMPOUND_EXPR:
3709 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3710 old1, new1);
3712 case COND_EXPR:
3713 return fold_build3_loc (loc, code, type,
3714 eval_subst (loc, TREE_OPERAND (arg, 0),
3715 old0, new0, old1, new1),
3716 eval_subst (loc, TREE_OPERAND (arg, 1),
3717 old0, new0, old1, new1),
3718 eval_subst (loc, TREE_OPERAND (arg, 2),
3719 old0, new0, old1, new1));
3720 default:
3721 break;
3723 /* Fall through - ??? */
3725 case tcc_comparison:
3727 tree arg0 = TREE_OPERAND (arg, 0);
3728 tree arg1 = TREE_OPERAND (arg, 1);
3730 /* We need to check both for exact equality and tree equality. The
3731 former will be true if the operand has a side-effect. In that
3732 case, we know the operand occurred exactly once. */
3734 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3735 arg0 = new0;
3736 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3737 arg0 = new1;
3739 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3740 arg1 = new0;
3741 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3742 arg1 = new1;
3744 return fold_build2_loc (loc, code, type, arg0, arg1);
3747 default:
3748 return arg;
3752 /* Return a tree for the case when the result of an expression is RESULT
3753 converted to TYPE and OMITTED was previously an operand of the expression
3754 but is now not needed (e.g., we folded OMITTED * 0).
3756 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3757 the conversion of RESULT to TYPE. */
3759 tree
3760 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3762 tree t = fold_convert_loc (loc, type, result);
3764 /* If the resulting operand is an empty statement, just return the omitted
3765 statement casted to void. */
3766 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3767 return build1_loc (loc, NOP_EXPR, void_type_node,
3768 fold_ignored_result (omitted));
3770 if (TREE_SIDE_EFFECTS (omitted))
3771 return build2_loc (loc, COMPOUND_EXPR, type,
3772 fold_ignored_result (omitted), t);
3774 return non_lvalue_loc (loc, t);
3777 /* Return a tree for the case when the result of an expression is RESULT
3778 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3779 of the expression but are now not needed.
3781 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3782 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3783 evaluated before OMITTED2. Otherwise, if neither has side effects,
3784 just do the conversion of RESULT to TYPE. */
3786 tree
3787 omit_two_operands_loc (location_t loc, tree type, tree result,
3788 tree omitted1, tree omitted2)
3790 tree t = fold_convert_loc (loc, type, result);
3792 if (TREE_SIDE_EFFECTS (omitted2))
3793 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3794 if (TREE_SIDE_EFFECTS (omitted1))
3795 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3797 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3801 /* Return a simplified tree node for the truth-negation of ARG. This
3802 never alters ARG itself. We assume that ARG is an operation that
3803 returns a truth value (0 or 1).
3805 FIXME: one would think we would fold the result, but it causes
3806 problems with the dominator optimizer. */
3808 static tree
3809 fold_truth_not_expr (location_t loc, tree arg)
3811 tree type = TREE_TYPE (arg);
3812 enum tree_code code = TREE_CODE (arg);
3813 location_t loc1, loc2;
3815 /* If this is a comparison, we can simply invert it, except for
3816 floating-point non-equality comparisons, in which case we just
3817 enclose a TRUTH_NOT_EXPR around what we have. */
3819 if (TREE_CODE_CLASS (code) == tcc_comparison)
3821 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3822 if (FLOAT_TYPE_P (op_type)
3823 && flag_trapping_math
3824 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3825 && code != NE_EXPR && code != EQ_EXPR)
3826 return NULL_TREE;
3828 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3829 if (code == ERROR_MARK)
3830 return NULL_TREE;
3832 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3833 TREE_OPERAND (arg, 1));
3834 if (TREE_NO_WARNING (arg))
3835 TREE_NO_WARNING (ret) = 1;
3836 return ret;
3839 switch (code)
3841 case INTEGER_CST:
3842 return constant_boolean_node (integer_zerop (arg), type);
3844 case TRUTH_AND_EXPR:
3845 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3846 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3847 return build2_loc (loc, TRUTH_OR_EXPR, type,
3848 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3849 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3851 case TRUTH_OR_EXPR:
3852 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3853 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3854 return build2_loc (loc, TRUTH_AND_EXPR, type,
3855 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3856 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3858 case TRUTH_XOR_EXPR:
3859 /* Here we can invert either operand. We invert the first operand
3860 unless the second operand is a TRUTH_NOT_EXPR in which case our
3861 result is the XOR of the first operand with the inside of the
3862 negation of the second operand. */
3864 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3865 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3866 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3867 else
3868 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3869 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3870 TREE_OPERAND (arg, 1));
3872 case TRUTH_ANDIF_EXPR:
3873 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3874 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3875 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3876 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3877 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3879 case TRUTH_ORIF_EXPR:
3880 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3881 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3882 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3883 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3884 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3886 case TRUTH_NOT_EXPR:
3887 return TREE_OPERAND (arg, 0);
3889 case COND_EXPR:
3891 tree arg1 = TREE_OPERAND (arg, 1);
3892 tree arg2 = TREE_OPERAND (arg, 2);
3894 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3895 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3897 /* A COND_EXPR may have a throw as one operand, which
3898 then has void type. Just leave void operands
3899 as they are. */
3900 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3901 VOID_TYPE_P (TREE_TYPE (arg1))
3902 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3903 VOID_TYPE_P (TREE_TYPE (arg2))
3904 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3907 case COMPOUND_EXPR:
3908 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3909 return build2_loc (loc, COMPOUND_EXPR, type,
3910 TREE_OPERAND (arg, 0),
3911 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3913 case NON_LVALUE_EXPR:
3914 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3915 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3917 CASE_CONVERT:
3918 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3919 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3921 /* fall through */
3923 case FLOAT_EXPR:
3924 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3925 return build1_loc (loc, TREE_CODE (arg), type,
3926 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3928 case BIT_AND_EXPR:
3929 if (!integer_onep (TREE_OPERAND (arg, 1)))
3930 return NULL_TREE;
3931 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3933 case SAVE_EXPR:
3934 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3936 case CLEANUP_POINT_EXPR:
3937 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3938 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3939 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3941 default:
3942 return NULL_TREE;
3946 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3947 assume that ARG is an operation that returns a truth value (0 or 1
3948 for scalars, 0 or -1 for vectors). Return the folded expression if
3949 folding is successful. Otherwise, return NULL_TREE. */
3951 static tree
3952 fold_invert_truthvalue (location_t loc, tree arg)
3954 tree type = TREE_TYPE (arg);
3955 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3956 ? BIT_NOT_EXPR
3957 : TRUTH_NOT_EXPR,
3958 type, arg);
3961 /* Return a simplified tree node for the truth-negation of ARG. This
3962 never alters ARG itself. We assume that ARG is an operation that
3963 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3965 tree
3966 invert_truthvalue_loc (location_t loc, tree arg)
3968 if (TREE_CODE (arg) == ERROR_MARK)
3969 return arg;
3971 tree type = TREE_TYPE (arg);
3972 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3973 ? BIT_NOT_EXPR
3974 : TRUTH_NOT_EXPR,
3975 type, arg);
3978 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3979 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3980 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3981 is the original memory reference used to preserve the alias set of
3982 the access. */
3984 static tree
3985 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3986 HOST_WIDE_INT bitsize, poly_int64 bitpos,
3987 int unsignedp, int reversep)
3989 tree result, bftype;
3991 /* Attempt not to lose the access path if possible. */
3992 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3994 tree ninner = TREE_OPERAND (orig_inner, 0);
3995 machine_mode nmode;
3996 poly_int64 nbitsize, nbitpos;
3997 tree noffset;
3998 int nunsignedp, nreversep, nvolatilep = 0;
3999 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4000 &noffset, &nmode, &nunsignedp,
4001 &nreversep, &nvolatilep);
4002 if (base == inner
4003 && noffset == NULL_TREE
4004 && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4005 && !reversep
4006 && !nreversep
4007 && !nvolatilep)
4009 inner = ninner;
4010 bitpos -= nbitpos;
4014 alias_set_type iset = get_alias_set (orig_inner);
4015 if (iset == 0 && get_alias_set (inner) != iset)
4016 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4017 build_fold_addr_expr (inner),
4018 build_int_cst (ptr_type_node, 0));
4020 if (known_eq (bitpos, 0) && !reversep)
4022 tree size = TYPE_SIZE (TREE_TYPE (inner));
4023 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4024 || POINTER_TYPE_P (TREE_TYPE (inner)))
4025 && tree_fits_shwi_p (size)
4026 && tree_to_shwi (size) == bitsize)
4027 return fold_convert_loc (loc, type, inner);
4030 bftype = type;
4031 if (TYPE_PRECISION (bftype) != bitsize
4032 || TYPE_UNSIGNED (bftype) == !unsignedp)
4033 bftype = build_nonstandard_integer_type (bitsize, 0);
4035 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4036 bitsize_int (bitsize), bitsize_int (bitpos));
4037 REF_REVERSE_STORAGE_ORDER (result) = reversep;
4039 if (bftype != type)
4040 result = fold_convert_loc (loc, type, result);
4042 return result;
4045 /* Optimize a bit-field compare.
4047 There are two cases: First is a compare against a constant and the
4048 second is a comparison of two items where the fields are at the same
4049 bit position relative to the start of a chunk (byte, halfword, word)
4050 large enough to contain it. In these cases we can avoid the shift
4051 implicit in bitfield extractions.
4053 For constants, we emit a compare of the shifted constant with the
4054 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4055 compared. For two fields at the same position, we do the ANDs with the
4056 similar mask and compare the result of the ANDs.
4058 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4059 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4060 are the left and right operands of the comparison, respectively.
4062 If the optimization described above can be done, we return the resulting
4063 tree. Otherwise we return zero. */
4065 static tree
4066 optimize_bit_field_compare (location_t loc, enum tree_code code,
4067 tree compare_type, tree lhs, tree rhs)
4069 poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4070 HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4071 tree type = TREE_TYPE (lhs);
4072 tree unsigned_type;
4073 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4074 machine_mode lmode, rmode;
4075 scalar_int_mode nmode;
4076 int lunsignedp, runsignedp;
4077 int lreversep, rreversep;
4078 int lvolatilep = 0, rvolatilep = 0;
4079 tree linner, rinner = NULL_TREE;
4080 tree mask;
4081 tree offset;
4083 /* Get all the information about the extractions being done. If the bit size
4084 is the same as the size of the underlying object, we aren't doing an
4085 extraction at all and so can do nothing. We also don't want to
4086 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4087 then will no longer be able to replace it. */
4088 linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4089 &lunsignedp, &lreversep, &lvolatilep);
4090 if (linner == lhs
4091 || !known_size_p (plbitsize)
4092 || !plbitsize.is_constant (&lbitsize)
4093 || !plbitpos.is_constant (&lbitpos)
4094 || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4095 || offset != 0
4096 || TREE_CODE (linner) == PLACEHOLDER_EXPR
4097 || lvolatilep)
4098 return 0;
4100 if (const_p)
4101 rreversep = lreversep;
4102 else
4104 /* If this is not a constant, we can only do something if bit positions,
4105 sizes, signedness and storage order are the same. */
4106 rinner
4107 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4108 &runsignedp, &rreversep, &rvolatilep);
4110 if (rinner == rhs
4111 || maybe_ne (lbitpos, rbitpos)
4112 || maybe_ne (lbitsize, rbitsize)
4113 || lunsignedp != runsignedp
4114 || lreversep != rreversep
4115 || offset != 0
4116 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4117 || rvolatilep)
4118 return 0;
4121 /* Honor the C++ memory model and mimic what RTL expansion does. */
4122 poly_uint64 bitstart = 0;
4123 poly_uint64 bitend = 0;
4124 if (TREE_CODE (lhs) == COMPONENT_REF)
4126 get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4127 if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4128 return 0;
4131 /* See if we can find a mode to refer to this field. We should be able to,
4132 but fail if we can't. */
4133 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4134 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4135 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4136 TYPE_ALIGN (TREE_TYPE (rinner))),
4137 BITS_PER_WORD, false, &nmode))
4138 return 0;
4140 /* Set signed and unsigned types of the precision of this mode for the
4141 shifts below. */
4142 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4144 /* Compute the bit position and size for the new reference and our offset
4145 within it. If the new reference is the same size as the original, we
4146 won't optimize anything, so return zero. */
4147 nbitsize = GET_MODE_BITSIZE (nmode);
4148 nbitpos = lbitpos & ~ (nbitsize - 1);
4149 lbitpos -= nbitpos;
4150 if (nbitsize == lbitsize)
4151 return 0;
4153 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4154 lbitpos = nbitsize - lbitsize - lbitpos;
4156 /* Make the mask to be used against the extracted field. */
4157 mask = build_int_cst_type (unsigned_type, -1);
4158 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4159 mask = const_binop (RSHIFT_EXPR, mask,
4160 size_int (nbitsize - lbitsize - lbitpos));
4162 if (! const_p)
4164 if (nbitpos < 0)
4165 return 0;
4167 /* If not comparing with constant, just rework the comparison
4168 and return. */
4169 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4170 nbitsize, nbitpos, 1, lreversep);
4171 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4172 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4173 nbitsize, nbitpos, 1, rreversep);
4174 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4175 return fold_build2_loc (loc, code, compare_type, t1, t2);
4178 /* Otherwise, we are handling the constant case. See if the constant is too
4179 big for the field. Warn and return a tree for 0 (false) if so. We do
4180 this not only for its own sake, but to avoid having to test for this
4181 error case below. If we didn't, we might generate wrong code.
4183 For unsigned fields, the constant shifted right by the field length should
4184 be all zero. For signed fields, the high-order bits should agree with
4185 the sign bit. */
4187 if (lunsignedp)
4189 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4191 warning (0, "comparison is always %d due to width of bit-field",
4192 code == NE_EXPR);
4193 return constant_boolean_node (code == NE_EXPR, compare_type);
4196 else
4198 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4199 if (tem != 0 && tem != -1)
4201 warning (0, "comparison is always %d due to width of bit-field",
4202 code == NE_EXPR);
4203 return constant_boolean_node (code == NE_EXPR, compare_type);
4207 if (nbitpos < 0)
4208 return 0;
4210 /* Single-bit compares should always be against zero. */
4211 if (lbitsize == 1 && ! integer_zerop (rhs))
4213 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4214 rhs = build_int_cst (type, 0);
4217 /* Make a new bitfield reference, shift the constant over the
4218 appropriate number of bits and mask it with the computed mask
4219 (in case this was a signed field). If we changed it, make a new one. */
4220 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4221 nbitsize, nbitpos, 1, lreversep);
4223 rhs = const_binop (BIT_AND_EXPR,
4224 const_binop (LSHIFT_EXPR,
4225 fold_convert_loc (loc, unsigned_type, rhs),
4226 size_int (lbitpos)),
4227 mask);
4229 lhs = build2_loc (loc, code, compare_type,
4230 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4231 return lhs;
4234 /* Subroutine for fold_truth_andor_1: decode a field reference.
4236 If EXP is a comparison reference, we return the innermost reference.
4238 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4239 set to the starting bit number.
4241 If the innermost field can be completely contained in a mode-sized
4242 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4244 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4245 otherwise it is not changed.
4247 *PUNSIGNEDP is set to the signedness of the field.
4249 *PREVERSEP is set to the storage order of the field.
4251 *PMASK is set to the mask used. This is either contained in a
4252 BIT_AND_EXPR or derived from the width of the field.
4254 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4256 Return 0 if this is not a component reference or is one that we can't
4257 do anything with. */
4259 static tree
4260 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4261 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4262 int *punsignedp, int *preversep, int *pvolatilep,
4263 tree *pmask, tree *pand_mask)
4265 tree exp = *exp_;
4266 tree outer_type = 0;
4267 tree and_mask = 0;
4268 tree mask, inner, offset;
4269 tree unsigned_type;
4270 unsigned int precision;
4272 /* All the optimizations using this function assume integer fields.
4273 There are problems with FP fields since the type_for_size call
4274 below can fail for, e.g., XFmode. */
4275 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4276 return 0;
4278 /* We are interested in the bare arrangement of bits, so strip everything
4279 that doesn't affect the machine mode. However, record the type of the
4280 outermost expression if it may matter below. */
4281 if (CONVERT_EXPR_P (exp)
4282 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4283 outer_type = TREE_TYPE (exp);
4284 STRIP_NOPS (exp);
4286 if (TREE_CODE (exp) == BIT_AND_EXPR)
4288 and_mask = TREE_OPERAND (exp, 1);
4289 exp = TREE_OPERAND (exp, 0);
4290 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4291 if (TREE_CODE (and_mask) != INTEGER_CST)
4292 return 0;
4295 poly_int64 poly_bitsize, poly_bitpos;
4296 inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4297 pmode, punsignedp, preversep, pvolatilep);
4298 if ((inner == exp && and_mask == 0)
4299 || !poly_bitsize.is_constant (pbitsize)
4300 || !poly_bitpos.is_constant (pbitpos)
4301 || *pbitsize < 0
4302 || offset != 0
4303 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4304 /* Reject out-of-bound accesses (PR79731). */
4305 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4306 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4307 *pbitpos + *pbitsize) < 0))
4308 return 0;
4310 *exp_ = exp;
4312 /* If the number of bits in the reference is the same as the bitsize of
4313 the outer type, then the outer type gives the signedness. Otherwise
4314 (in case of a small bitfield) the signedness is unchanged. */
4315 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4316 *punsignedp = TYPE_UNSIGNED (outer_type);
4318 /* Compute the mask to access the bitfield. */
4319 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4320 precision = TYPE_PRECISION (unsigned_type);
4322 mask = build_int_cst_type (unsigned_type, -1);
4324 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4325 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4327 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4328 if (and_mask != 0)
4329 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4330 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4332 *pmask = mask;
4333 *pand_mask = and_mask;
4334 return inner;
4337 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4338 bit positions and MASK is SIGNED. */
4340 static int
4341 all_ones_mask_p (const_tree mask, unsigned int size)
4343 tree type = TREE_TYPE (mask);
4344 unsigned int precision = TYPE_PRECISION (type);
4346 /* If this function returns true when the type of the mask is
4347 UNSIGNED, then there will be errors. In particular see
4348 gcc.c-torture/execute/990326-1.c. There does not appear to be
4349 any documentation paper trail as to why this is so. But the pre
4350 wide-int worked with that restriction and it has been preserved
4351 here. */
4352 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4353 return false;
4355 return wi::mask (size, false, precision) == wi::to_wide (mask);
4358 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4359 represents the sign bit of EXP's type. If EXP represents a sign
4360 or zero extension, also test VAL against the unextended type.
4361 The return value is the (sub)expression whose sign bit is VAL,
4362 or NULL_TREE otherwise. */
4364 tree
4365 sign_bit_p (tree exp, const_tree val)
4367 int width;
4368 tree t;
4370 /* Tree EXP must have an integral type. */
4371 t = TREE_TYPE (exp);
4372 if (! INTEGRAL_TYPE_P (t))
4373 return NULL_TREE;
4375 /* Tree VAL must be an integer constant. */
4376 if (TREE_CODE (val) != INTEGER_CST
4377 || TREE_OVERFLOW (val))
4378 return NULL_TREE;
4380 width = TYPE_PRECISION (t);
4381 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4382 return exp;
4384 /* Handle extension from a narrower type. */
4385 if (TREE_CODE (exp) == NOP_EXPR
4386 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4387 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4389 return NULL_TREE;
4392 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4393 to be evaluated unconditionally. */
4395 static int
4396 simple_operand_p (const_tree exp)
4398 /* Strip any conversions that don't change the machine mode. */
4399 STRIP_NOPS (exp);
4401 return (CONSTANT_CLASS_P (exp)
4402 || TREE_CODE (exp) == SSA_NAME
4403 || (DECL_P (exp)
4404 && ! TREE_ADDRESSABLE (exp)
4405 && ! TREE_THIS_VOLATILE (exp)
4406 && ! DECL_NONLOCAL (exp)
4407 /* Don't regard global variables as simple. They may be
4408 allocated in ways unknown to the compiler (shared memory,
4409 #pragma weak, etc). */
4410 && ! TREE_PUBLIC (exp)
4411 && ! DECL_EXTERNAL (exp)
4412 /* Weakrefs are not safe to be read, since they can be NULL.
4413 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4414 have DECL_WEAK flag set. */
4415 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4416 /* Loading a static variable is unduly expensive, but global
4417 registers aren't expensive. */
4418 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4421 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4422 to be evaluated unconditionally.
4423 I addition to simple_operand_p, we assume that comparisons, conversions,
4424 and logic-not operations are simple, if their operands are simple, too. */
4426 static bool
4427 simple_operand_p_2 (tree exp)
4429 enum tree_code code;
4431 if (TREE_SIDE_EFFECTS (exp)
4432 || tree_could_trap_p (exp))
4433 return false;
4435 while (CONVERT_EXPR_P (exp))
4436 exp = TREE_OPERAND (exp, 0);
4438 code = TREE_CODE (exp);
4440 if (TREE_CODE_CLASS (code) == tcc_comparison)
4441 return (simple_operand_p (TREE_OPERAND (exp, 0))
4442 && simple_operand_p (TREE_OPERAND (exp, 1)));
4444 if (code == TRUTH_NOT_EXPR)
4445 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4447 return simple_operand_p (exp);
4451 /* The following functions are subroutines to fold_range_test and allow it to
4452 try to change a logical combination of comparisons into a range test.
4454 For example, both
4455 X == 2 || X == 3 || X == 4 || X == 5
4457 X >= 2 && X <= 5
4458 are converted to
4459 (unsigned) (X - 2) <= 3
4461 We describe each set of comparisons as being either inside or outside
4462 a range, using a variable named like IN_P, and then describe the
4463 range with a lower and upper bound. If one of the bounds is omitted,
4464 it represents either the highest or lowest value of the type.
4466 In the comments below, we represent a range by two numbers in brackets
4467 preceded by a "+" to designate being inside that range, or a "-" to
4468 designate being outside that range, so the condition can be inverted by
4469 flipping the prefix. An omitted bound is represented by a "-". For
4470 example, "- [-, 10]" means being outside the range starting at the lowest
4471 possible value and ending at 10, in other words, being greater than 10.
4472 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4473 always false.
4475 We set up things so that the missing bounds are handled in a consistent
4476 manner so neither a missing bound nor "true" and "false" need to be
4477 handled using a special case. */
4479 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4480 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4481 and UPPER1_P are nonzero if the respective argument is an upper bound
4482 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4483 must be specified for a comparison. ARG1 will be converted to ARG0's
4484 type if both are specified. */
4486 static tree
4487 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4488 tree arg1, int upper1_p)
4490 tree tem;
4491 int result;
4492 int sgn0, sgn1;
4494 /* If neither arg represents infinity, do the normal operation.
4495 Else, if not a comparison, return infinity. Else handle the special
4496 comparison rules. Note that most of the cases below won't occur, but
4497 are handled for consistency. */
4499 if (arg0 != 0 && arg1 != 0)
4501 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4502 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4503 STRIP_NOPS (tem);
4504 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4507 if (TREE_CODE_CLASS (code) != tcc_comparison)
4508 return 0;
4510 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4511 for neither. In real maths, we cannot assume open ended ranges are
4512 the same. But, this is computer arithmetic, where numbers are finite.
4513 We can therefore make the transformation of any unbounded range with
4514 the value Z, Z being greater than any representable number. This permits
4515 us to treat unbounded ranges as equal. */
4516 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4517 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4518 switch (code)
4520 case EQ_EXPR:
4521 result = sgn0 == sgn1;
4522 break;
4523 case NE_EXPR:
4524 result = sgn0 != sgn1;
4525 break;
4526 case LT_EXPR:
4527 result = sgn0 < sgn1;
4528 break;
4529 case LE_EXPR:
4530 result = sgn0 <= sgn1;
4531 break;
4532 case GT_EXPR:
4533 result = sgn0 > sgn1;
4534 break;
4535 case GE_EXPR:
4536 result = sgn0 >= sgn1;
4537 break;
4538 default:
4539 gcc_unreachable ();
4542 return constant_boolean_node (result, type);
4545 /* Helper routine for make_range. Perform one step for it, return
4546 new expression if the loop should continue or NULL_TREE if it should
4547 stop. */
4549 tree
4550 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4551 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4552 bool *strict_overflow_p)
4554 tree arg0_type = TREE_TYPE (arg0);
4555 tree n_low, n_high, low = *p_low, high = *p_high;
4556 int in_p = *p_in_p, n_in_p;
4558 switch (code)
4560 case TRUTH_NOT_EXPR:
4561 /* We can only do something if the range is testing for zero. */
4562 if (low == NULL_TREE || high == NULL_TREE
4563 || ! integer_zerop (low) || ! integer_zerop (high))
4564 return NULL_TREE;
4565 *p_in_p = ! in_p;
4566 return arg0;
4568 case EQ_EXPR: case NE_EXPR:
4569 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4570 /* We can only do something if the range is testing for zero
4571 and if the second operand is an integer constant. Note that
4572 saying something is "in" the range we make is done by
4573 complementing IN_P since it will set in the initial case of
4574 being not equal to zero; "out" is leaving it alone. */
4575 if (low == NULL_TREE || high == NULL_TREE
4576 || ! integer_zerop (low) || ! integer_zerop (high)
4577 || TREE_CODE (arg1) != INTEGER_CST)
4578 return NULL_TREE;
4580 switch (code)
4582 case NE_EXPR: /* - [c, c] */
4583 low = high = arg1;
4584 break;
4585 case EQ_EXPR: /* + [c, c] */
4586 in_p = ! in_p, low = high = arg1;
4587 break;
4588 case GT_EXPR: /* - [-, c] */
4589 low = 0, high = arg1;
4590 break;
4591 case GE_EXPR: /* + [c, -] */
4592 in_p = ! in_p, low = arg1, high = 0;
4593 break;
4594 case LT_EXPR: /* - [c, -] */
4595 low = arg1, high = 0;
4596 break;
4597 case LE_EXPR: /* + [-, c] */
4598 in_p = ! in_p, low = 0, high = arg1;
4599 break;
4600 default:
4601 gcc_unreachable ();
4604 /* If this is an unsigned comparison, we also know that EXP is
4605 greater than or equal to zero. We base the range tests we make
4606 on that fact, so we record it here so we can parse existing
4607 range tests. We test arg0_type since often the return type
4608 of, e.g. EQ_EXPR, is boolean. */
4609 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4611 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4612 in_p, low, high, 1,
4613 build_int_cst (arg0_type, 0),
4614 NULL_TREE))
4615 return NULL_TREE;
4617 in_p = n_in_p, low = n_low, high = n_high;
4619 /* If the high bound is missing, but we have a nonzero low
4620 bound, reverse the range so it goes from zero to the low bound
4621 minus 1. */
4622 if (high == 0 && low && ! integer_zerop (low))
4624 in_p = ! in_p;
4625 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4626 build_int_cst (TREE_TYPE (low), 1), 0);
4627 low = build_int_cst (arg0_type, 0);
4631 *p_low = low;
4632 *p_high = high;
4633 *p_in_p = in_p;
4634 return arg0;
4636 case NEGATE_EXPR:
4637 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4638 low and high are non-NULL, then normalize will DTRT. */
4639 if (!TYPE_UNSIGNED (arg0_type)
4640 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4642 if (low == NULL_TREE)
4643 low = TYPE_MIN_VALUE (arg0_type);
4644 if (high == NULL_TREE)
4645 high = TYPE_MAX_VALUE (arg0_type);
4648 /* (-x) IN [a,b] -> x in [-b, -a] */
4649 n_low = range_binop (MINUS_EXPR, exp_type,
4650 build_int_cst (exp_type, 0),
4651 0, high, 1);
4652 n_high = range_binop (MINUS_EXPR, exp_type,
4653 build_int_cst (exp_type, 0),
4654 0, low, 0);
4655 if (n_high != 0 && TREE_OVERFLOW (n_high))
4656 return NULL_TREE;
4657 goto normalize;
4659 case BIT_NOT_EXPR:
4660 /* ~ X -> -X - 1 */
4661 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4662 build_int_cst (exp_type, 1));
4664 case PLUS_EXPR:
4665 case MINUS_EXPR:
4666 if (TREE_CODE (arg1) != INTEGER_CST)
4667 return NULL_TREE;
4669 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4670 move a constant to the other side. */
4671 if (!TYPE_UNSIGNED (arg0_type)
4672 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4673 return NULL_TREE;
4675 /* If EXP is signed, any overflow in the computation is undefined,
4676 so we don't worry about it so long as our computations on
4677 the bounds don't overflow. For unsigned, overflow is defined
4678 and this is exactly the right thing. */
4679 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4680 arg0_type, low, 0, arg1, 0);
4681 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4682 arg0_type, high, 1, arg1, 0);
4683 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4684 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4685 return NULL_TREE;
4687 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4688 *strict_overflow_p = true;
4690 normalize:
4691 /* Check for an unsigned range which has wrapped around the maximum
4692 value thus making n_high < n_low, and normalize it. */
4693 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4695 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4696 build_int_cst (TREE_TYPE (n_high), 1), 0);
4697 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4698 build_int_cst (TREE_TYPE (n_low), 1), 0);
4700 /* If the range is of the form +/- [ x+1, x ], we won't
4701 be able to normalize it. But then, it represents the
4702 whole range or the empty set, so make it
4703 +/- [ -, - ]. */
4704 if (tree_int_cst_equal (n_low, low)
4705 && tree_int_cst_equal (n_high, high))
4706 low = high = 0;
4707 else
4708 in_p = ! in_p;
4710 else
4711 low = n_low, high = n_high;
4713 *p_low = low;
4714 *p_high = high;
4715 *p_in_p = in_p;
4716 return arg0;
4718 CASE_CONVERT:
4719 case NON_LVALUE_EXPR:
4720 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4721 return NULL_TREE;
4723 if (! INTEGRAL_TYPE_P (arg0_type)
4724 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4725 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4726 return NULL_TREE;
4728 n_low = low, n_high = high;
4730 if (n_low != 0)
4731 n_low = fold_convert_loc (loc, arg0_type, n_low);
4733 if (n_high != 0)
4734 n_high = fold_convert_loc (loc, arg0_type, n_high);
4736 /* If we're converting arg0 from an unsigned type, to exp,
4737 a signed type, we will be doing the comparison as unsigned.
4738 The tests above have already verified that LOW and HIGH
4739 are both positive.
4741 So we have to ensure that we will handle large unsigned
4742 values the same way that the current signed bounds treat
4743 negative values. */
4745 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4747 tree high_positive;
4748 tree equiv_type;
4749 /* For fixed-point modes, we need to pass the saturating flag
4750 as the 2nd parameter. */
4751 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4752 equiv_type
4753 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4754 TYPE_SATURATING (arg0_type));
4755 else
4756 equiv_type
4757 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4759 /* A range without an upper bound is, naturally, unbounded.
4760 Since convert would have cropped a very large value, use
4761 the max value for the destination type. */
4762 high_positive
4763 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4764 : TYPE_MAX_VALUE (arg0_type);
4766 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4767 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4768 fold_convert_loc (loc, arg0_type,
4769 high_positive),
4770 build_int_cst (arg0_type, 1));
4772 /* If the low bound is specified, "and" the range with the
4773 range for which the original unsigned value will be
4774 positive. */
4775 if (low != 0)
4777 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4778 1, fold_convert_loc (loc, arg0_type,
4779 integer_zero_node),
4780 high_positive))
4781 return NULL_TREE;
4783 in_p = (n_in_p == in_p);
4785 else
4787 /* Otherwise, "or" the range with the range of the input
4788 that will be interpreted as negative. */
4789 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4790 1, fold_convert_loc (loc, arg0_type,
4791 integer_zero_node),
4792 high_positive))
4793 return NULL_TREE;
4795 in_p = (in_p != n_in_p);
4799 *p_low = n_low;
4800 *p_high = n_high;
4801 *p_in_p = in_p;
4802 return arg0;
4804 default:
4805 return NULL_TREE;
4809 /* Given EXP, a logical expression, set the range it is testing into
4810 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4811 actually being tested. *PLOW and *PHIGH will be made of the same
4812 type as the returned expression. If EXP is not a comparison, we
4813 will most likely not be returning a useful value and range. Set
4814 *STRICT_OVERFLOW_P to true if the return value is only valid
4815 because signed overflow is undefined; otherwise, do not change
4816 *STRICT_OVERFLOW_P. */
4818 tree
4819 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4820 bool *strict_overflow_p)
4822 enum tree_code code;
4823 tree arg0, arg1 = NULL_TREE;
4824 tree exp_type, nexp;
4825 int in_p;
4826 tree low, high;
4827 location_t loc = EXPR_LOCATION (exp);
4829 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4830 and see if we can refine the range. Some of the cases below may not
4831 happen, but it doesn't seem worth worrying about this. We "continue"
4832 the outer loop when we've changed something; otherwise we "break"
4833 the switch, which will "break" the while. */
4835 in_p = 0;
4836 low = high = build_int_cst (TREE_TYPE (exp), 0);
4838 while (1)
4840 code = TREE_CODE (exp);
4841 exp_type = TREE_TYPE (exp);
4842 arg0 = NULL_TREE;
4844 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4846 if (TREE_OPERAND_LENGTH (exp) > 0)
4847 arg0 = TREE_OPERAND (exp, 0);
4848 if (TREE_CODE_CLASS (code) == tcc_binary
4849 || TREE_CODE_CLASS (code) == tcc_comparison
4850 || (TREE_CODE_CLASS (code) == tcc_expression
4851 && TREE_OPERAND_LENGTH (exp) > 1))
4852 arg1 = TREE_OPERAND (exp, 1);
4854 if (arg0 == NULL_TREE)
4855 break;
4857 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4858 &high, &in_p, strict_overflow_p);
4859 if (nexp == NULL_TREE)
4860 break;
4861 exp = nexp;
4864 /* If EXP is a constant, we can evaluate whether this is true or false. */
4865 if (TREE_CODE (exp) == INTEGER_CST)
4867 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4868 exp, 0, low, 0))
4869 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4870 exp, 1, high, 1)));
4871 low = high = 0;
4872 exp = 0;
4875 *pin_p = in_p, *plow = low, *phigh = high;
4876 return exp;
4879 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4880 a bitwise check i.e. when
4881 LOW == 0xXX...X00...0
4882 HIGH == 0xXX...X11...1
4883 Return corresponding mask in MASK and stem in VALUE. */
4885 static bool
4886 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4887 tree *value)
4889 if (TREE_CODE (low) != INTEGER_CST
4890 || TREE_CODE (high) != INTEGER_CST)
4891 return false;
4893 unsigned prec = TYPE_PRECISION (type);
4894 wide_int lo = wi::to_wide (low, prec);
4895 wide_int hi = wi::to_wide (high, prec);
4897 wide_int end_mask = lo ^ hi;
4898 if ((end_mask & (end_mask + 1)) != 0
4899 || (lo & end_mask) != 0)
4900 return false;
4902 wide_int stem_mask = ~end_mask;
4903 wide_int stem = lo & stem_mask;
4904 if (stem != (hi & stem_mask))
4905 return false;
4907 *mask = wide_int_to_tree (type, stem_mask);
4908 *value = wide_int_to_tree (type, stem);
4910 return true;
4913 /* Helper routine for build_range_check and match.pd. Return the type to
4914 perform the check or NULL if it shouldn't be optimized. */
4916 tree
4917 range_check_type (tree etype)
4919 /* First make sure that arithmetics in this type is valid, then make sure
4920 that it wraps around. */
4921 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4922 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4923 TYPE_UNSIGNED (etype));
4925 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4927 tree utype, minv, maxv;
4929 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4930 for the type in question, as we rely on this here. */
4931 utype = unsigned_type_for (etype);
4932 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4933 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4934 build_int_cst (TREE_TYPE (maxv), 1), 1);
4935 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4937 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4938 minv, 1, maxv, 1)))
4939 etype = utype;
4940 else
4941 return NULL_TREE;
4943 return etype;
4946 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4947 type, TYPE, return an expression to test if EXP is in (or out of, depending
4948 on IN_P) the range. Return 0 if the test couldn't be created. */
4950 tree
4951 build_range_check (location_t loc, tree type, tree exp, int in_p,
4952 tree low, tree high)
4954 tree etype = TREE_TYPE (exp), mask, value;
4956 /* Disable this optimization for function pointer expressions
4957 on targets that require function pointer canonicalization. */
4958 if (targetm.have_canonicalize_funcptr_for_compare ()
4959 && POINTER_TYPE_P (etype)
4960 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
4961 return NULL_TREE;
4963 if (! in_p)
4965 value = build_range_check (loc, type, exp, 1, low, high);
4966 if (value != 0)
4967 return invert_truthvalue_loc (loc, value);
4969 return 0;
4972 if (low == 0 && high == 0)
4973 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4975 if (low == 0)
4976 return fold_build2_loc (loc, LE_EXPR, type, exp,
4977 fold_convert_loc (loc, etype, high));
4979 if (high == 0)
4980 return fold_build2_loc (loc, GE_EXPR, type, exp,
4981 fold_convert_loc (loc, etype, low));
4983 if (operand_equal_p (low, high, 0))
4984 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4985 fold_convert_loc (loc, etype, low));
4987 if (TREE_CODE (exp) == BIT_AND_EXPR
4988 && maskable_range_p (low, high, etype, &mask, &value))
4989 return fold_build2_loc (loc, EQ_EXPR, type,
4990 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4991 exp, mask),
4992 value);
4994 if (integer_zerop (low))
4996 if (! TYPE_UNSIGNED (etype))
4998 etype = unsigned_type_for (etype);
4999 high = fold_convert_loc (loc, etype, high);
5000 exp = fold_convert_loc (loc, etype, exp);
5002 return build_range_check (loc, type, exp, 1, 0, high);
5005 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
5006 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5008 int prec = TYPE_PRECISION (etype);
5010 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5012 if (TYPE_UNSIGNED (etype))
5014 tree signed_etype = signed_type_for (etype);
5015 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5016 etype
5017 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5018 else
5019 etype = signed_etype;
5020 exp = fold_convert_loc (loc, etype, exp);
5022 return fold_build2_loc (loc, GT_EXPR, type, exp,
5023 build_int_cst (etype, 0));
5027 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5028 This requires wrap-around arithmetics for the type of the expression. */
5029 etype = range_check_type (etype);
5030 if (etype == NULL_TREE)
5031 return NULL_TREE;
5033 if (POINTER_TYPE_P (etype))
5034 etype = unsigned_type_for (etype);
5036 high = fold_convert_loc (loc, etype, high);
5037 low = fold_convert_loc (loc, etype, low);
5038 exp = fold_convert_loc (loc, etype, exp);
5040 value = const_binop (MINUS_EXPR, high, low);
5042 if (value != 0 && !TREE_OVERFLOW (value))
5043 return build_range_check (loc, type,
5044 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5045 1, build_int_cst (etype, 0), value);
5047 return 0;
5050 /* Return the predecessor of VAL in its type, handling the infinite case. */
5052 static tree
5053 range_predecessor (tree val)
5055 tree type = TREE_TYPE (val);
5057 if (INTEGRAL_TYPE_P (type)
5058 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5059 return 0;
5060 else
5061 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5062 build_int_cst (TREE_TYPE (val), 1), 0);
5065 /* Return the successor of VAL in its type, handling the infinite case. */
5067 static tree
5068 range_successor (tree val)
5070 tree type = TREE_TYPE (val);
5072 if (INTEGRAL_TYPE_P (type)
5073 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5074 return 0;
5075 else
5076 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5077 build_int_cst (TREE_TYPE (val), 1), 0);
5080 /* Given two ranges, see if we can merge them into one. Return 1 if we
5081 can, 0 if we can't. Set the output range into the specified parameters. */
5083 bool
5084 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5085 tree high0, int in1_p, tree low1, tree high1)
5087 int no_overlap;
5088 int subset;
5089 int temp;
5090 tree tem;
5091 int in_p;
5092 tree low, high;
5093 int lowequal = ((low0 == 0 && low1 == 0)
5094 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5095 low0, 0, low1, 0)));
5096 int highequal = ((high0 == 0 && high1 == 0)
5097 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5098 high0, 1, high1, 1)));
5100 /* Make range 0 be the range that starts first, or ends last if they
5101 start at the same value. Swap them if it isn't. */
5102 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5103 low0, 0, low1, 0))
5104 || (lowequal
5105 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5106 high1, 1, high0, 1))))
5108 temp = in0_p, in0_p = in1_p, in1_p = temp;
5109 tem = low0, low0 = low1, low1 = tem;
5110 tem = high0, high0 = high1, high1 = tem;
5113 /* If the second range is != high1 where high1 is the type maximum of
5114 the type, try first merging with < high1 range. */
5115 if (low1
5116 && high1
5117 && TREE_CODE (low1) == INTEGER_CST
5118 && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5119 || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5120 && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5121 GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5122 && operand_equal_p (low1, high1, 0))
5124 if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5125 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5126 !in1_p, NULL_TREE, range_predecessor (low1)))
5127 return true;
5128 /* Similarly for the second range != low1 where low1 is the type minimum
5129 of the type, try first merging with > low1 range. */
5130 if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5131 && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5132 !in1_p, range_successor (low1), NULL_TREE))
5133 return true;
5136 /* Now flag two cases, whether the ranges are disjoint or whether the
5137 second range is totally subsumed in the first. Note that the tests
5138 below are simplified by the ones above. */
5139 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5140 high0, 1, low1, 0));
5141 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5142 high1, 1, high0, 1));
5144 /* We now have four cases, depending on whether we are including or
5145 excluding the two ranges. */
5146 if (in0_p && in1_p)
5148 /* If they don't overlap, the result is false. If the second range
5149 is a subset it is the result. Otherwise, the range is from the start
5150 of the second to the end of the first. */
5151 if (no_overlap)
5152 in_p = 0, low = high = 0;
5153 else if (subset)
5154 in_p = 1, low = low1, high = high1;
5155 else
5156 in_p = 1, low = low1, high = high0;
5159 else if (in0_p && ! in1_p)
5161 /* If they don't overlap, the result is the first range. If they are
5162 equal, the result is false. If the second range is a subset of the
5163 first, and the ranges begin at the same place, we go from just after
5164 the end of the second range to the end of the first. If the second
5165 range is not a subset of the first, or if it is a subset and both
5166 ranges end at the same place, the range starts at the start of the
5167 first range and ends just before the second range.
5168 Otherwise, we can't describe this as a single range. */
5169 if (no_overlap)
5170 in_p = 1, low = low0, high = high0;
5171 else if (lowequal && highequal)
5172 in_p = 0, low = high = 0;
5173 else if (subset && lowequal)
5175 low = range_successor (high1);
5176 high = high0;
5177 in_p = 1;
5178 if (low == 0)
5180 /* We are in the weird situation where high0 > high1 but
5181 high1 has no successor. Punt. */
5182 return 0;
5185 else if (! subset || highequal)
5187 low = low0;
5188 high = range_predecessor (low1);
5189 in_p = 1;
5190 if (high == 0)
5192 /* low0 < low1 but low1 has no predecessor. Punt. */
5193 return 0;
5196 else
5197 return 0;
5200 else if (! in0_p && in1_p)
5202 /* If they don't overlap, the result is the second range. If the second
5203 is a subset of the first, the result is false. Otherwise,
5204 the range starts just after the first range and ends at the
5205 end of the second. */
5206 if (no_overlap)
5207 in_p = 1, low = low1, high = high1;
5208 else if (subset || highequal)
5209 in_p = 0, low = high = 0;
5210 else
5212 low = range_successor (high0);
5213 high = high1;
5214 in_p = 1;
5215 if (low == 0)
5217 /* high1 > high0 but high0 has no successor. Punt. */
5218 return 0;
5223 else
5225 /* The case where we are excluding both ranges. Here the complex case
5226 is if they don't overlap. In that case, the only time we have a
5227 range is if they are adjacent. If the second is a subset of the
5228 first, the result is the first. Otherwise, the range to exclude
5229 starts at the beginning of the first range and ends at the end of the
5230 second. */
5231 if (no_overlap)
5233 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5234 range_successor (high0),
5235 1, low1, 0)))
5236 in_p = 0, low = low0, high = high1;
5237 else
5239 /* Canonicalize - [min, x] into - [-, x]. */
5240 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5241 switch (TREE_CODE (TREE_TYPE (low0)))
5243 case ENUMERAL_TYPE:
5244 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5245 GET_MODE_BITSIZE
5246 (TYPE_MODE (TREE_TYPE (low0)))))
5247 break;
5248 /* FALLTHROUGH */
5249 case INTEGER_TYPE:
5250 if (tree_int_cst_equal (low0,
5251 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5252 low0 = 0;
5253 break;
5254 case POINTER_TYPE:
5255 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5256 && integer_zerop (low0))
5257 low0 = 0;
5258 break;
5259 default:
5260 break;
5263 /* Canonicalize - [x, max] into - [x, -]. */
5264 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5265 switch (TREE_CODE (TREE_TYPE (high1)))
5267 case ENUMERAL_TYPE:
5268 if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5269 GET_MODE_BITSIZE
5270 (TYPE_MODE (TREE_TYPE (high1)))))
5271 break;
5272 /* FALLTHROUGH */
5273 case INTEGER_TYPE:
5274 if (tree_int_cst_equal (high1,
5275 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5276 high1 = 0;
5277 break;
5278 case POINTER_TYPE:
5279 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5280 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5281 high1, 1,
5282 build_int_cst (TREE_TYPE (high1), 1),
5283 1)))
5284 high1 = 0;
5285 break;
5286 default:
5287 break;
5290 /* The ranges might be also adjacent between the maximum and
5291 minimum values of the given type. For
5292 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5293 return + [x + 1, y - 1]. */
5294 if (low0 == 0 && high1 == 0)
5296 low = range_successor (high0);
5297 high = range_predecessor (low1);
5298 if (low == 0 || high == 0)
5299 return 0;
5301 in_p = 1;
5303 else
5304 return 0;
5307 else if (subset)
5308 in_p = 0, low = low0, high = high0;
5309 else
5310 in_p = 0, low = low0, high = high1;
5313 *pin_p = in_p, *plow = low, *phigh = high;
5314 return 1;
5318 /* Subroutine of fold, looking inside expressions of the form
5319 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5320 of the COND_EXPR. This function is being used also to optimize
5321 A op B ? C : A, by reversing the comparison first.
5323 Return a folded expression whose code is not a COND_EXPR
5324 anymore, or NULL_TREE if no folding opportunity is found. */
5326 static tree
5327 fold_cond_expr_with_comparison (location_t loc, tree type,
5328 tree arg0, tree arg1, tree arg2)
5330 enum tree_code comp_code = TREE_CODE (arg0);
5331 tree arg00 = TREE_OPERAND (arg0, 0);
5332 tree arg01 = TREE_OPERAND (arg0, 1);
5333 tree arg1_type = TREE_TYPE (arg1);
5334 tree tem;
5336 STRIP_NOPS (arg1);
5337 STRIP_NOPS (arg2);
5339 /* If we have A op 0 ? A : -A, consider applying the following
5340 transformations:
5342 A == 0? A : -A same as -A
5343 A != 0? A : -A same as A
5344 A >= 0? A : -A same as abs (A)
5345 A > 0? A : -A same as abs (A)
5346 A <= 0? A : -A same as -abs (A)
5347 A < 0? A : -A same as -abs (A)
5349 None of these transformations work for modes with signed
5350 zeros. If A is +/-0, the first two transformations will
5351 change the sign of the result (from +0 to -0, or vice
5352 versa). The last four will fix the sign of the result,
5353 even though the original expressions could be positive or
5354 negative, depending on the sign of A.
5356 Note that all these transformations are correct if A is
5357 NaN, since the two alternatives (A and -A) are also NaNs. */
5358 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5359 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5360 ? real_zerop (arg01)
5361 : integer_zerop (arg01))
5362 && ((TREE_CODE (arg2) == NEGATE_EXPR
5363 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5364 /* In the case that A is of the form X-Y, '-A' (arg2) may
5365 have already been folded to Y-X, check for that. */
5366 || (TREE_CODE (arg1) == MINUS_EXPR
5367 && TREE_CODE (arg2) == MINUS_EXPR
5368 && operand_equal_p (TREE_OPERAND (arg1, 0),
5369 TREE_OPERAND (arg2, 1), 0)
5370 && operand_equal_p (TREE_OPERAND (arg1, 1),
5371 TREE_OPERAND (arg2, 0), 0))))
5372 switch (comp_code)
5374 case EQ_EXPR:
5375 case UNEQ_EXPR:
5376 tem = fold_convert_loc (loc, arg1_type, arg1);
5377 return fold_convert_loc (loc, type, negate_expr (tem));
5378 case NE_EXPR:
5379 case LTGT_EXPR:
5380 return fold_convert_loc (loc, type, arg1);
5381 case UNGE_EXPR:
5382 case UNGT_EXPR:
5383 if (flag_trapping_math)
5384 break;
5385 /* Fall through. */
5386 case GE_EXPR:
5387 case GT_EXPR:
5388 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5389 break;
5390 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5391 return fold_convert_loc (loc, type, tem);
5392 case UNLE_EXPR:
5393 case UNLT_EXPR:
5394 if (flag_trapping_math)
5395 break;
5396 /* FALLTHRU */
5397 case LE_EXPR:
5398 case LT_EXPR:
5399 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5400 break;
5401 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5402 return negate_expr (fold_convert_loc (loc, type, tem));
5403 default:
5404 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5405 break;
5408 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5409 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5410 both transformations are correct when A is NaN: A != 0
5411 is then true, and A == 0 is false. */
5413 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5414 && integer_zerop (arg01) && integer_zerop (arg2))
5416 if (comp_code == NE_EXPR)
5417 return fold_convert_loc (loc, type, arg1);
5418 else if (comp_code == EQ_EXPR)
5419 return build_zero_cst (type);
5422 /* Try some transformations of A op B ? A : B.
5424 A == B? A : B same as B
5425 A != B? A : B same as A
5426 A >= B? A : B same as max (A, B)
5427 A > B? A : B same as max (B, A)
5428 A <= B? A : B same as min (A, B)
5429 A < B? A : B same as min (B, A)
5431 As above, these transformations don't work in the presence
5432 of signed zeros. For example, if A and B are zeros of
5433 opposite sign, the first two transformations will change
5434 the sign of the result. In the last four, the original
5435 expressions give different results for (A=+0, B=-0) and
5436 (A=-0, B=+0), but the transformed expressions do not.
5438 The first two transformations are correct if either A or B
5439 is a NaN. In the first transformation, the condition will
5440 be false, and B will indeed be chosen. In the case of the
5441 second transformation, the condition A != B will be true,
5442 and A will be chosen.
5444 The conversions to max() and min() are not correct if B is
5445 a number and A is not. The conditions in the original
5446 expressions will be false, so all four give B. The min()
5447 and max() versions would give a NaN instead. */
5448 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5449 && operand_equal_for_comparison_p (arg01, arg2)
5450 /* Avoid these transformations if the COND_EXPR may be used
5451 as an lvalue in the C++ front-end. PR c++/19199. */
5452 && (in_gimple_form
5453 || VECTOR_TYPE_P (type)
5454 || (! lang_GNU_CXX ()
5455 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5456 || ! maybe_lvalue_p (arg1)
5457 || ! maybe_lvalue_p (arg2)))
5459 tree comp_op0 = arg00;
5460 tree comp_op1 = arg01;
5461 tree comp_type = TREE_TYPE (comp_op0);
5463 switch (comp_code)
5465 case EQ_EXPR:
5466 return fold_convert_loc (loc, type, arg2);
5467 case NE_EXPR:
5468 return fold_convert_loc (loc, type, arg1);
5469 case LE_EXPR:
5470 case LT_EXPR:
5471 case UNLE_EXPR:
5472 case UNLT_EXPR:
5473 /* In C++ a ?: expression can be an lvalue, so put the
5474 operand which will be used if they are equal first
5475 so that we can convert this back to the
5476 corresponding COND_EXPR. */
5477 if (!HONOR_NANS (arg1))
5479 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5480 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5481 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5482 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5483 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5484 comp_op1, comp_op0);
5485 return fold_convert_loc (loc, type, tem);
5487 break;
5488 case GE_EXPR:
5489 case GT_EXPR:
5490 case UNGE_EXPR:
5491 case UNGT_EXPR:
5492 if (!HONOR_NANS (arg1))
5494 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5495 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5496 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5497 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5498 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5499 comp_op1, comp_op0);
5500 return fold_convert_loc (loc, type, tem);
5502 break;
5503 case UNEQ_EXPR:
5504 if (!HONOR_NANS (arg1))
5505 return fold_convert_loc (loc, type, arg2);
5506 break;
5507 case LTGT_EXPR:
5508 if (!HONOR_NANS (arg1))
5509 return fold_convert_loc (loc, type, arg1);
5510 break;
5511 default:
5512 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5513 break;
5517 return NULL_TREE;
5522 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5523 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5524 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5525 false) >= 2)
5526 #endif
5528 /* EXP is some logical combination of boolean tests. See if we can
5529 merge it into some range test. Return the new tree if so. */
5531 static tree
5532 fold_range_test (location_t loc, enum tree_code code, tree type,
5533 tree op0, tree op1)
5535 int or_op = (code == TRUTH_ORIF_EXPR
5536 || code == TRUTH_OR_EXPR);
5537 int in0_p, in1_p, in_p;
5538 tree low0, low1, low, high0, high1, high;
5539 bool strict_overflow_p = false;
5540 tree tem, lhs, rhs;
5541 const char * const warnmsg = G_("assuming signed overflow does not occur "
5542 "when simplifying range test");
5544 if (!INTEGRAL_TYPE_P (type))
5545 return 0;
5547 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5548 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5550 /* If this is an OR operation, invert both sides; we will invert
5551 again at the end. */
5552 if (or_op)
5553 in0_p = ! in0_p, in1_p = ! in1_p;
5555 /* If both expressions are the same, if we can merge the ranges, and we
5556 can build the range test, return it or it inverted. If one of the
5557 ranges is always true or always false, consider it to be the same
5558 expression as the other. */
5559 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5560 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5561 in1_p, low1, high1)
5562 && (tem = (build_range_check (loc, type,
5563 lhs != 0 ? lhs
5564 : rhs != 0 ? rhs : integer_zero_node,
5565 in_p, low, high))) != 0)
5567 if (strict_overflow_p)
5568 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5569 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5572 /* On machines where the branch cost is expensive, if this is a
5573 short-circuited branch and the underlying object on both sides
5574 is the same, make a non-short-circuit operation. */
5575 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5576 if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
5577 logical_op_non_short_circuit
5578 = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
5579 if (logical_op_non_short_circuit
5580 && !flag_sanitize_coverage
5581 && lhs != 0 && rhs != 0
5582 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5583 && operand_equal_p (lhs, rhs, 0))
5585 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5586 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5587 which cases we can't do this. */
5588 if (simple_operand_p (lhs))
5589 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5590 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5591 type, op0, op1);
5593 else if (!lang_hooks.decls.global_bindings_p ()
5594 && !CONTAINS_PLACEHOLDER_P (lhs))
5596 tree common = save_expr (lhs);
5598 if ((lhs = build_range_check (loc, type, common,
5599 or_op ? ! in0_p : in0_p,
5600 low0, high0)) != 0
5601 && (rhs = build_range_check (loc, type, common,
5602 or_op ? ! in1_p : in1_p,
5603 low1, high1)) != 0)
5605 if (strict_overflow_p)
5606 fold_overflow_warning (warnmsg,
5607 WARN_STRICT_OVERFLOW_COMPARISON);
5608 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5609 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5610 type, lhs, rhs);
5615 return 0;
5618 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5619 bit value. Arrange things so the extra bits will be set to zero if and
5620 only if C is signed-extended to its full width. If MASK is nonzero,
5621 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5623 static tree
5624 unextend (tree c, int p, int unsignedp, tree mask)
5626 tree type = TREE_TYPE (c);
5627 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5628 tree temp;
5630 if (p == modesize || unsignedp)
5631 return c;
5633 /* We work by getting just the sign bit into the low-order bit, then
5634 into the high-order bit, then sign-extend. We then XOR that value
5635 with C. */
5636 temp = build_int_cst (TREE_TYPE (c),
5637 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5639 /* We must use a signed type in order to get an arithmetic right shift.
5640 However, we must also avoid introducing accidental overflows, so that
5641 a subsequent call to integer_zerop will work. Hence we must
5642 do the type conversion here. At this point, the constant is either
5643 zero or one, and the conversion to a signed type can never overflow.
5644 We could get an overflow if this conversion is done anywhere else. */
5645 if (TYPE_UNSIGNED (type))
5646 temp = fold_convert (signed_type_for (type), temp);
5648 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5649 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5650 if (mask != 0)
5651 temp = const_binop (BIT_AND_EXPR, temp,
5652 fold_convert (TREE_TYPE (c), mask));
5653 /* If necessary, convert the type back to match the type of C. */
5654 if (TYPE_UNSIGNED (type))
5655 temp = fold_convert (type, temp);
5657 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5660 /* For an expression that has the form
5661 (A && B) || ~B
5663 (A || B) && ~B,
5664 we can drop one of the inner expressions and simplify to
5665 A || ~B
5667 A && ~B
5668 LOC is the location of the resulting expression. OP is the inner
5669 logical operation; the left-hand side in the examples above, while CMPOP
5670 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5671 removing a condition that guards another, as in
5672 (A != NULL && A->...) || A == NULL
5673 which we must not transform. If RHS_ONLY is true, only eliminate the
5674 right-most operand of the inner logical operation. */
5676 static tree
5677 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5678 bool rhs_only)
5680 tree type = TREE_TYPE (cmpop);
5681 enum tree_code code = TREE_CODE (cmpop);
5682 enum tree_code truthop_code = TREE_CODE (op);
5683 tree lhs = TREE_OPERAND (op, 0);
5684 tree rhs = TREE_OPERAND (op, 1);
5685 tree orig_lhs = lhs, orig_rhs = rhs;
5686 enum tree_code rhs_code = TREE_CODE (rhs);
5687 enum tree_code lhs_code = TREE_CODE (lhs);
5688 enum tree_code inv_code;
5690 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5691 return NULL_TREE;
5693 if (TREE_CODE_CLASS (code) != tcc_comparison)
5694 return NULL_TREE;
5696 if (rhs_code == truthop_code)
5698 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5699 if (newrhs != NULL_TREE)
5701 rhs = newrhs;
5702 rhs_code = TREE_CODE (rhs);
5705 if (lhs_code == truthop_code && !rhs_only)
5707 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5708 if (newlhs != NULL_TREE)
5710 lhs = newlhs;
5711 lhs_code = TREE_CODE (lhs);
5715 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5716 if (inv_code == rhs_code
5717 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5718 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5719 return lhs;
5720 if (!rhs_only && inv_code == lhs_code
5721 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5722 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5723 return rhs;
5724 if (rhs != orig_rhs || lhs != orig_lhs)
5725 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5726 lhs, rhs);
5727 return NULL_TREE;
5730 /* Find ways of folding logical expressions of LHS and RHS:
5731 Try to merge two comparisons to the same innermost item.
5732 Look for range tests like "ch >= '0' && ch <= '9'".
5733 Look for combinations of simple terms on machines with expensive branches
5734 and evaluate the RHS unconditionally.
5736 For example, if we have p->a == 2 && p->b == 4 and we can make an
5737 object large enough to span both A and B, we can do this with a comparison
5738 against the object ANDed with the a mask.
5740 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5741 operations to do this with one comparison.
5743 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5744 function and the one above.
5746 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5747 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5749 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5750 two operands.
5752 We return the simplified tree or 0 if no optimization is possible. */
5754 static tree
5755 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5756 tree lhs, tree rhs)
5758 /* If this is the "or" of two comparisons, we can do something if
5759 the comparisons are NE_EXPR. If this is the "and", we can do something
5760 if the comparisons are EQ_EXPR. I.e.,
5761 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5763 WANTED_CODE is this operation code. For single bit fields, we can
5764 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5765 comparison for one-bit fields. */
5767 enum tree_code wanted_code;
5768 enum tree_code lcode, rcode;
5769 tree ll_arg, lr_arg, rl_arg, rr_arg;
5770 tree ll_inner, lr_inner, rl_inner, rr_inner;
5771 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5772 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5773 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5774 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5775 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5776 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5777 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5778 scalar_int_mode lnmode, rnmode;
5779 tree ll_mask, lr_mask, rl_mask, rr_mask;
5780 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5781 tree l_const, r_const;
5782 tree lntype, rntype, result;
5783 HOST_WIDE_INT first_bit, end_bit;
5784 int volatilep;
5786 /* Start by getting the comparison codes. Fail if anything is volatile.
5787 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5788 it were surrounded with a NE_EXPR. */
5790 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5791 return 0;
5793 lcode = TREE_CODE (lhs);
5794 rcode = TREE_CODE (rhs);
5796 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5798 lhs = build2 (NE_EXPR, truth_type, lhs,
5799 build_int_cst (TREE_TYPE (lhs), 0));
5800 lcode = NE_EXPR;
5803 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5805 rhs = build2 (NE_EXPR, truth_type, rhs,
5806 build_int_cst (TREE_TYPE (rhs), 0));
5807 rcode = NE_EXPR;
5810 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5811 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5812 return 0;
5814 ll_arg = TREE_OPERAND (lhs, 0);
5815 lr_arg = TREE_OPERAND (lhs, 1);
5816 rl_arg = TREE_OPERAND (rhs, 0);
5817 rr_arg = TREE_OPERAND (rhs, 1);
5819 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5820 if (simple_operand_p (ll_arg)
5821 && simple_operand_p (lr_arg))
5823 if (operand_equal_p (ll_arg, rl_arg, 0)
5824 && operand_equal_p (lr_arg, rr_arg, 0))
5826 result = combine_comparisons (loc, code, lcode, rcode,
5827 truth_type, ll_arg, lr_arg);
5828 if (result)
5829 return result;
5831 else if (operand_equal_p (ll_arg, rr_arg, 0)
5832 && operand_equal_p (lr_arg, rl_arg, 0))
5834 result = combine_comparisons (loc, code, lcode,
5835 swap_tree_comparison (rcode),
5836 truth_type, ll_arg, lr_arg);
5837 if (result)
5838 return result;
5842 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5843 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5845 /* If the RHS can be evaluated unconditionally and its operands are
5846 simple, it wins to evaluate the RHS unconditionally on machines
5847 with expensive branches. In this case, this isn't a comparison
5848 that can be merged. */
5850 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5851 false) >= 2
5852 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5853 && simple_operand_p (rl_arg)
5854 && simple_operand_p (rr_arg))
5856 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5857 if (code == TRUTH_OR_EXPR
5858 && lcode == NE_EXPR && integer_zerop (lr_arg)
5859 && rcode == NE_EXPR && integer_zerop (rr_arg)
5860 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5861 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5862 return build2_loc (loc, NE_EXPR, truth_type,
5863 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5864 ll_arg, rl_arg),
5865 build_int_cst (TREE_TYPE (ll_arg), 0));
5867 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5868 if (code == TRUTH_AND_EXPR
5869 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5870 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5871 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5872 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5873 return build2_loc (loc, EQ_EXPR, truth_type,
5874 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5875 ll_arg, rl_arg),
5876 build_int_cst (TREE_TYPE (ll_arg), 0));
5879 /* See if the comparisons can be merged. Then get all the parameters for
5880 each side. */
5882 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5883 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5884 return 0;
5886 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5887 volatilep = 0;
5888 ll_inner = decode_field_reference (loc, &ll_arg,
5889 &ll_bitsize, &ll_bitpos, &ll_mode,
5890 &ll_unsignedp, &ll_reversep, &volatilep,
5891 &ll_mask, &ll_and_mask);
5892 lr_inner = decode_field_reference (loc, &lr_arg,
5893 &lr_bitsize, &lr_bitpos, &lr_mode,
5894 &lr_unsignedp, &lr_reversep, &volatilep,
5895 &lr_mask, &lr_and_mask);
5896 rl_inner = decode_field_reference (loc, &rl_arg,
5897 &rl_bitsize, &rl_bitpos, &rl_mode,
5898 &rl_unsignedp, &rl_reversep, &volatilep,
5899 &rl_mask, &rl_and_mask);
5900 rr_inner = decode_field_reference (loc, &rr_arg,
5901 &rr_bitsize, &rr_bitpos, &rr_mode,
5902 &rr_unsignedp, &rr_reversep, &volatilep,
5903 &rr_mask, &rr_and_mask);
5905 /* It must be true that the inner operation on the lhs of each
5906 comparison must be the same if we are to be able to do anything.
5907 Then see if we have constants. If not, the same must be true for
5908 the rhs's. */
5909 if (volatilep
5910 || ll_reversep != rl_reversep
5911 || ll_inner == 0 || rl_inner == 0
5912 || ! operand_equal_p (ll_inner, rl_inner, 0))
5913 return 0;
5915 if (TREE_CODE (lr_arg) == INTEGER_CST
5916 && TREE_CODE (rr_arg) == INTEGER_CST)
5918 l_const = lr_arg, r_const = rr_arg;
5919 lr_reversep = ll_reversep;
5921 else if (lr_reversep != rr_reversep
5922 || lr_inner == 0 || rr_inner == 0
5923 || ! operand_equal_p (lr_inner, rr_inner, 0))
5924 return 0;
5925 else
5926 l_const = r_const = 0;
5928 /* If either comparison code is not correct for our logical operation,
5929 fail. However, we can convert a one-bit comparison against zero into
5930 the opposite comparison against that bit being set in the field. */
5932 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5933 if (lcode != wanted_code)
5935 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5937 /* Make the left operand unsigned, since we are only interested
5938 in the value of one bit. Otherwise we are doing the wrong
5939 thing below. */
5940 ll_unsignedp = 1;
5941 l_const = ll_mask;
5943 else
5944 return 0;
5947 /* This is analogous to the code for l_const above. */
5948 if (rcode != wanted_code)
5950 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5952 rl_unsignedp = 1;
5953 r_const = rl_mask;
5955 else
5956 return 0;
5959 /* See if we can find a mode that contains both fields being compared on
5960 the left. If we can't, fail. Otherwise, update all constants and masks
5961 to be relative to a field of that size. */
5962 first_bit = MIN (ll_bitpos, rl_bitpos);
5963 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5964 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5965 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5966 volatilep, &lnmode))
5967 return 0;
5969 lnbitsize = GET_MODE_BITSIZE (lnmode);
5970 lnbitpos = first_bit & ~ (lnbitsize - 1);
5971 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5972 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5974 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5976 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5977 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5980 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5981 size_int (xll_bitpos));
5982 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5983 size_int (xrl_bitpos));
5985 if (l_const)
5987 l_const = fold_convert_loc (loc, lntype, l_const);
5988 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5989 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5990 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5991 fold_build1_loc (loc, BIT_NOT_EXPR,
5992 lntype, ll_mask))))
5994 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5996 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5999 if (r_const)
6001 r_const = fold_convert_loc (loc, lntype, r_const);
6002 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6003 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6004 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6005 fold_build1_loc (loc, BIT_NOT_EXPR,
6006 lntype, rl_mask))))
6008 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6010 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6014 /* If the right sides are not constant, do the same for it. Also,
6015 disallow this optimization if a size, signedness or storage order
6016 mismatch occurs between the left and right sides. */
6017 if (l_const == 0)
6019 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6020 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6021 || ll_reversep != lr_reversep
6022 /* Make sure the two fields on the right
6023 correspond to the left without being swapped. */
6024 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6025 return 0;
6027 first_bit = MIN (lr_bitpos, rr_bitpos);
6028 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6029 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6030 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6031 volatilep, &rnmode))
6032 return 0;
6034 rnbitsize = GET_MODE_BITSIZE (rnmode);
6035 rnbitpos = first_bit & ~ (rnbitsize - 1);
6036 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6037 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6039 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6041 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6042 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6045 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6046 rntype, lr_mask),
6047 size_int (xlr_bitpos));
6048 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6049 rntype, rr_mask),
6050 size_int (xrr_bitpos));
6052 /* Make a mask that corresponds to both fields being compared.
6053 Do this for both items being compared. If the operands are the
6054 same size and the bits being compared are in the same position
6055 then we can do this by masking both and comparing the masked
6056 results. */
6057 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6058 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6059 if (lnbitsize == rnbitsize
6060 && xll_bitpos == xlr_bitpos
6061 && lnbitpos >= 0
6062 && rnbitpos >= 0)
6064 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6065 lntype, lnbitsize, lnbitpos,
6066 ll_unsignedp || rl_unsignedp, ll_reversep);
6067 if (! all_ones_mask_p (ll_mask, lnbitsize))
6068 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6070 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6071 rntype, rnbitsize, rnbitpos,
6072 lr_unsignedp || rr_unsignedp, lr_reversep);
6073 if (! all_ones_mask_p (lr_mask, rnbitsize))
6074 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6076 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6079 /* There is still another way we can do something: If both pairs of
6080 fields being compared are adjacent, we may be able to make a wider
6081 field containing them both.
6083 Note that we still must mask the lhs/rhs expressions. Furthermore,
6084 the mask must be shifted to account for the shift done by
6085 make_bit_field_ref. */
6086 if (((ll_bitsize + ll_bitpos == rl_bitpos
6087 && lr_bitsize + lr_bitpos == rr_bitpos)
6088 || (ll_bitpos == rl_bitpos + rl_bitsize
6089 && lr_bitpos == rr_bitpos + rr_bitsize))
6090 && ll_bitpos >= 0
6091 && rl_bitpos >= 0
6092 && lr_bitpos >= 0
6093 && rr_bitpos >= 0)
6095 tree type;
6097 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6098 ll_bitsize + rl_bitsize,
6099 MIN (ll_bitpos, rl_bitpos),
6100 ll_unsignedp, ll_reversep);
6101 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6102 lr_bitsize + rr_bitsize,
6103 MIN (lr_bitpos, rr_bitpos),
6104 lr_unsignedp, lr_reversep);
6106 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6107 size_int (MIN (xll_bitpos, xrl_bitpos)));
6108 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6109 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6111 /* Convert to the smaller type before masking out unwanted bits. */
6112 type = lntype;
6113 if (lntype != rntype)
6115 if (lnbitsize > rnbitsize)
6117 lhs = fold_convert_loc (loc, rntype, lhs);
6118 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6119 type = rntype;
6121 else if (lnbitsize < rnbitsize)
6123 rhs = fold_convert_loc (loc, lntype, rhs);
6124 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6125 type = lntype;
6129 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6130 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6132 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6133 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6135 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6138 return 0;
6141 /* Handle the case of comparisons with constants. If there is something in
6142 common between the masks, those bits of the constants must be the same.
6143 If not, the condition is always false. Test for this to avoid generating
6144 incorrect code below. */
6145 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6146 if (! integer_zerop (result)
6147 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6148 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6150 if (wanted_code == NE_EXPR)
6152 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6153 return constant_boolean_node (true, truth_type);
6155 else
6157 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6158 return constant_boolean_node (false, truth_type);
6162 if (lnbitpos < 0)
6163 return 0;
6165 /* Construct the expression we will return. First get the component
6166 reference we will make. Unless the mask is all ones the width of
6167 that field, perform the mask operation. Then compare with the
6168 merged constant. */
6169 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6170 lntype, lnbitsize, lnbitpos,
6171 ll_unsignedp || rl_unsignedp, ll_reversep);
6173 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6174 if (! all_ones_mask_p (ll_mask, lnbitsize))
6175 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6177 return build2_loc (loc, wanted_code, truth_type, result,
6178 const_binop (BIT_IOR_EXPR, l_const, r_const));
6181 /* T is an integer expression that is being multiplied, divided, or taken a
6182 modulus (CODE says which and what kind of divide or modulus) by a
6183 constant C. See if we can eliminate that operation by folding it with
6184 other operations already in T. WIDE_TYPE, if non-null, is a type that
6185 should be used for the computation if wider than our type.
6187 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6188 (X * 2) + (Y * 4). We must, however, be assured that either the original
6189 expression would not overflow or that overflow is undefined for the type
6190 in the language in question.
6192 If we return a non-null expression, it is an equivalent form of the
6193 original computation, but need not be in the original type.
6195 We set *STRICT_OVERFLOW_P to true if the return values depends on
6196 signed overflow being undefined. Otherwise we do not change
6197 *STRICT_OVERFLOW_P. */
6199 static tree
6200 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6201 bool *strict_overflow_p)
6203 /* To avoid exponential search depth, refuse to allow recursion past
6204 three levels. Beyond that (1) it's highly unlikely that we'll find
6205 something interesting and (2) we've probably processed it before
6206 when we built the inner expression. */
6208 static int depth;
6209 tree ret;
6211 if (depth > 3)
6212 return NULL;
6214 depth++;
6215 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6216 depth--;
6218 return ret;
6221 static tree
6222 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6223 bool *strict_overflow_p)
6225 tree type = TREE_TYPE (t);
6226 enum tree_code tcode = TREE_CODE (t);
6227 tree ctype = (wide_type != 0
6228 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6229 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6230 ? wide_type : type);
6231 tree t1, t2;
6232 int same_p = tcode == code;
6233 tree op0 = NULL_TREE, op1 = NULL_TREE;
6234 bool sub_strict_overflow_p;
6236 /* Don't deal with constants of zero here; they confuse the code below. */
6237 if (integer_zerop (c))
6238 return NULL_TREE;
6240 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6241 op0 = TREE_OPERAND (t, 0);
6243 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6244 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6246 /* Note that we need not handle conditional operations here since fold
6247 already handles those cases. So just do arithmetic here. */
6248 switch (tcode)
6250 case INTEGER_CST:
6251 /* For a constant, we can always simplify if we are a multiply
6252 or (for divide and modulus) if it is a multiple of our constant. */
6253 if (code == MULT_EXPR
6254 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6255 TYPE_SIGN (type)))
6257 tree tem = const_binop (code, fold_convert (ctype, t),
6258 fold_convert (ctype, c));
6259 /* If the multiplication overflowed, we lost information on it.
6260 See PR68142 and PR69845. */
6261 if (TREE_OVERFLOW (tem))
6262 return NULL_TREE;
6263 return tem;
6265 break;
6267 CASE_CONVERT: case NON_LVALUE_EXPR:
6268 /* If op0 is an expression ... */
6269 if ((COMPARISON_CLASS_P (op0)
6270 || UNARY_CLASS_P (op0)
6271 || BINARY_CLASS_P (op0)
6272 || VL_EXP_CLASS_P (op0)
6273 || EXPRESSION_CLASS_P (op0))
6274 /* ... and has wrapping overflow, and its type is smaller
6275 than ctype, then we cannot pass through as widening. */
6276 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6277 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6278 && (TYPE_PRECISION (ctype)
6279 > TYPE_PRECISION (TREE_TYPE (op0))))
6280 /* ... or this is a truncation (t is narrower than op0),
6281 then we cannot pass through this narrowing. */
6282 || (TYPE_PRECISION (type)
6283 < TYPE_PRECISION (TREE_TYPE (op0)))
6284 /* ... or signedness changes for division or modulus,
6285 then we cannot pass through this conversion. */
6286 || (code != MULT_EXPR
6287 && (TYPE_UNSIGNED (ctype)
6288 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6289 /* ... or has undefined overflow while the converted to
6290 type has not, we cannot do the operation in the inner type
6291 as that would introduce undefined overflow. */
6292 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6293 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6294 && !TYPE_OVERFLOW_UNDEFINED (type))))
6295 break;
6297 /* Pass the constant down and see if we can make a simplification. If
6298 we can, replace this expression with the inner simplification for
6299 possible later conversion to our or some other type. */
6300 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6301 && TREE_CODE (t2) == INTEGER_CST
6302 && !TREE_OVERFLOW (t2)
6303 && (t1 = extract_muldiv (op0, t2, code,
6304 code == MULT_EXPR ? ctype : NULL_TREE,
6305 strict_overflow_p)) != 0)
6306 return t1;
6307 break;
6309 case ABS_EXPR:
6310 /* If widening the type changes it from signed to unsigned, then we
6311 must avoid building ABS_EXPR itself as unsigned. */
6312 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6314 tree cstype = (*signed_type_for) (ctype);
6315 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6316 != 0)
6318 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6319 return fold_convert (ctype, t1);
6321 break;
6323 /* If the constant is negative, we cannot simplify this. */
6324 if (tree_int_cst_sgn (c) == -1)
6325 break;
6326 /* FALLTHROUGH */
6327 case NEGATE_EXPR:
6328 /* For division and modulus, type can't be unsigned, as e.g.
6329 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6330 For signed types, even with wrapping overflow, this is fine. */
6331 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6332 break;
6333 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6334 != 0)
6335 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6336 break;
6338 case MIN_EXPR: case MAX_EXPR:
6339 /* If widening the type changes the signedness, then we can't perform
6340 this optimization as that changes the result. */
6341 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6342 break;
6344 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6345 sub_strict_overflow_p = false;
6346 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6347 &sub_strict_overflow_p)) != 0
6348 && (t2 = extract_muldiv (op1, c, code, wide_type,
6349 &sub_strict_overflow_p)) != 0)
6351 if (tree_int_cst_sgn (c) < 0)
6352 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6353 if (sub_strict_overflow_p)
6354 *strict_overflow_p = true;
6355 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6356 fold_convert (ctype, t2));
6358 break;
6360 case LSHIFT_EXPR: case RSHIFT_EXPR:
6361 /* If the second operand is constant, this is a multiplication
6362 or floor division, by a power of two, so we can treat it that
6363 way unless the multiplier or divisor overflows. Signed
6364 left-shift overflow is implementation-defined rather than
6365 undefined in C90, so do not convert signed left shift into
6366 multiplication. */
6367 if (TREE_CODE (op1) == INTEGER_CST
6368 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6369 /* const_binop may not detect overflow correctly,
6370 so check for it explicitly here. */
6371 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6372 wi::to_wide (op1))
6373 && (t1 = fold_convert (ctype,
6374 const_binop (LSHIFT_EXPR, size_one_node,
6375 op1))) != 0
6376 && !TREE_OVERFLOW (t1))
6377 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6378 ? MULT_EXPR : FLOOR_DIV_EXPR,
6379 ctype,
6380 fold_convert (ctype, op0),
6381 t1),
6382 c, code, wide_type, strict_overflow_p);
6383 break;
6385 case PLUS_EXPR: case MINUS_EXPR:
6386 /* See if we can eliminate the operation on both sides. If we can, we
6387 can return a new PLUS or MINUS. If we can't, the only remaining
6388 cases where we can do anything are if the second operand is a
6389 constant. */
6390 sub_strict_overflow_p = false;
6391 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6392 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6393 if (t1 != 0 && t2 != 0
6394 && TYPE_OVERFLOW_WRAPS (ctype)
6395 && (code == MULT_EXPR
6396 /* If not multiplication, we can only do this if both operands
6397 are divisible by c. */
6398 || (multiple_of_p (ctype, op0, c)
6399 && multiple_of_p (ctype, op1, c))))
6401 if (sub_strict_overflow_p)
6402 *strict_overflow_p = true;
6403 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6404 fold_convert (ctype, t2));
6407 /* If this was a subtraction, negate OP1 and set it to be an addition.
6408 This simplifies the logic below. */
6409 if (tcode == MINUS_EXPR)
6411 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6412 /* If OP1 was not easily negatable, the constant may be OP0. */
6413 if (TREE_CODE (op0) == INTEGER_CST)
6415 std::swap (op0, op1);
6416 std::swap (t1, t2);
6420 if (TREE_CODE (op1) != INTEGER_CST)
6421 break;
6423 /* If either OP1 or C are negative, this optimization is not safe for
6424 some of the division and remainder types while for others we need
6425 to change the code. */
6426 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6428 if (code == CEIL_DIV_EXPR)
6429 code = FLOOR_DIV_EXPR;
6430 else if (code == FLOOR_DIV_EXPR)
6431 code = CEIL_DIV_EXPR;
6432 else if (code != MULT_EXPR
6433 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6434 break;
6437 /* If it's a multiply or a division/modulus operation of a multiple
6438 of our constant, do the operation and verify it doesn't overflow. */
6439 if (code == MULT_EXPR
6440 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6441 TYPE_SIGN (type)))
6443 op1 = const_binop (code, fold_convert (ctype, op1),
6444 fold_convert (ctype, c));
6445 /* We allow the constant to overflow with wrapping semantics. */
6446 if (op1 == 0
6447 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6448 break;
6450 else
6451 break;
6453 /* If we have an unsigned type, we cannot widen the operation since it
6454 will change the result if the original computation overflowed. */
6455 if (TYPE_UNSIGNED (ctype) && ctype != type)
6456 break;
6458 /* The last case is if we are a multiply. In that case, we can
6459 apply the distributive law to commute the multiply and addition
6460 if the multiplication of the constants doesn't overflow
6461 and overflow is defined. With undefined overflow
6462 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6463 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6464 return fold_build2 (tcode, ctype,
6465 fold_build2 (code, ctype,
6466 fold_convert (ctype, op0),
6467 fold_convert (ctype, c)),
6468 op1);
6470 break;
6472 case MULT_EXPR:
6473 /* We have a special case here if we are doing something like
6474 (C * 8) % 4 since we know that's zero. */
6475 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6476 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6477 /* If the multiplication can overflow we cannot optimize this. */
6478 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6479 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6480 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6481 TYPE_SIGN (type)))
6483 *strict_overflow_p = true;
6484 return omit_one_operand (type, integer_zero_node, op0);
6487 /* ... fall through ... */
6489 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6490 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6491 /* If we can extract our operation from the LHS, do so and return a
6492 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6493 do something only if the second operand is a constant. */
6494 if (same_p
6495 && TYPE_OVERFLOW_WRAPS (ctype)
6496 && (t1 = extract_muldiv (op0, c, code, wide_type,
6497 strict_overflow_p)) != 0)
6498 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6499 fold_convert (ctype, op1));
6500 else if (tcode == MULT_EXPR && code == MULT_EXPR
6501 && TYPE_OVERFLOW_WRAPS (ctype)
6502 && (t1 = extract_muldiv (op1, c, code, wide_type,
6503 strict_overflow_p)) != 0)
6504 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6505 fold_convert (ctype, t1));
6506 else if (TREE_CODE (op1) != INTEGER_CST)
6507 return 0;
6509 /* If these are the same operation types, we can associate them
6510 assuming no overflow. */
6511 if (tcode == code)
6513 bool overflow_p = false;
6514 wi::overflow_type overflow_mul;
6515 signop sign = TYPE_SIGN (ctype);
6516 unsigned prec = TYPE_PRECISION (ctype);
6517 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6518 wi::to_wide (c, prec),
6519 sign, &overflow_mul);
6520 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6521 if (overflow_mul
6522 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6523 overflow_p = true;
6524 if (!overflow_p)
6525 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6526 wide_int_to_tree (ctype, mul));
6529 /* If these operations "cancel" each other, we have the main
6530 optimizations of this pass, which occur when either constant is a
6531 multiple of the other, in which case we replace this with either an
6532 operation or CODE or TCODE.
6534 If we have an unsigned type, we cannot do this since it will change
6535 the result if the original computation overflowed. */
6536 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6537 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6538 || (tcode == MULT_EXPR
6539 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6540 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6541 && code != MULT_EXPR)))
6543 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6544 TYPE_SIGN (type)))
6546 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6547 *strict_overflow_p = true;
6548 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6549 fold_convert (ctype,
6550 const_binop (TRUNC_DIV_EXPR,
6551 op1, c)));
6553 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6554 TYPE_SIGN (type)))
6556 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6557 *strict_overflow_p = true;
6558 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6559 fold_convert (ctype,
6560 const_binop (TRUNC_DIV_EXPR,
6561 c, op1)));
6564 break;
6566 default:
6567 break;
6570 return 0;
6573 /* Return a node which has the indicated constant VALUE (either 0 or
6574 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6575 and is of the indicated TYPE. */
6577 tree
6578 constant_boolean_node (bool value, tree type)
6580 if (type == integer_type_node)
6581 return value ? integer_one_node : integer_zero_node;
6582 else if (type == boolean_type_node)
6583 return value ? boolean_true_node : boolean_false_node;
6584 else if (TREE_CODE (type) == VECTOR_TYPE)
6585 return build_vector_from_val (type,
6586 build_int_cst (TREE_TYPE (type),
6587 value ? -1 : 0));
6588 else
6589 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6593 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6594 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6595 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6596 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6597 COND is the first argument to CODE; otherwise (as in the example
6598 given here), it is the second argument. TYPE is the type of the
6599 original expression. Return NULL_TREE if no simplification is
6600 possible. */
6602 static tree
6603 fold_binary_op_with_conditional_arg (location_t loc,
6604 enum tree_code code,
6605 tree type, tree op0, tree op1,
6606 tree cond, tree arg, int cond_first_p)
6608 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6609 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6610 tree test, true_value, false_value;
6611 tree lhs = NULL_TREE;
6612 tree rhs = NULL_TREE;
6613 enum tree_code cond_code = COND_EXPR;
6615 /* Do not move possibly trapping operations into the conditional as this
6616 pessimizes code and causes gimplification issues when applied late. */
6617 if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
6618 ANY_INTEGRAL_TYPE_P (type)
6619 && TYPE_OVERFLOW_TRAPS (type), op1))
6620 return NULL_TREE;
6622 if (TREE_CODE (cond) == COND_EXPR
6623 || TREE_CODE (cond) == VEC_COND_EXPR)
6625 test = TREE_OPERAND (cond, 0);
6626 true_value = TREE_OPERAND (cond, 1);
6627 false_value = TREE_OPERAND (cond, 2);
6628 /* If this operand throws an expression, then it does not make
6629 sense to try to perform a logical or arithmetic operation
6630 involving it. */
6631 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6632 lhs = true_value;
6633 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6634 rhs = false_value;
6636 else if (!(TREE_CODE (type) != VECTOR_TYPE
6637 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6639 tree testtype = TREE_TYPE (cond);
6640 test = cond;
6641 true_value = constant_boolean_node (true, testtype);
6642 false_value = constant_boolean_node (false, testtype);
6644 else
6645 /* Detect the case of mixing vector and scalar types - bail out. */
6646 return NULL_TREE;
6648 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6649 cond_code = VEC_COND_EXPR;
6651 /* This transformation is only worthwhile if we don't have to wrap ARG
6652 in a SAVE_EXPR and the operation can be simplified without recursing
6653 on at least one of the branches once its pushed inside the COND_EXPR. */
6654 if (!TREE_CONSTANT (arg)
6655 && (TREE_SIDE_EFFECTS (arg)
6656 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6657 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6658 return NULL_TREE;
6660 arg = fold_convert_loc (loc, arg_type, arg);
6661 if (lhs == 0)
6663 true_value = fold_convert_loc (loc, cond_type, true_value);
6664 if (cond_first_p)
6665 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6666 else
6667 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6669 if (rhs == 0)
6671 false_value = fold_convert_loc (loc, cond_type, false_value);
6672 if (cond_first_p)
6673 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6674 else
6675 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6678 /* Check that we have simplified at least one of the branches. */
6679 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6680 return NULL_TREE;
6682 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6686 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6688 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6689 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6690 ADDEND is the same as X.
6692 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6693 and finite. The problematic cases are when X is zero, and its mode
6694 has signed zeros. In the case of rounding towards -infinity,
6695 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6696 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6698 bool
6699 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6701 if (!real_zerop (addend))
6702 return false;
6704 /* Don't allow the fold with -fsignaling-nans. */
6705 if (HONOR_SNANS (element_mode (type)))
6706 return false;
6708 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6709 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6710 return true;
6712 /* In a vector or complex, we would need to check the sign of all zeros. */
6713 if (TREE_CODE (addend) != REAL_CST)
6714 return false;
6716 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6717 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6718 negate = !negate;
6720 /* The mode has signed zeros, and we have to honor their sign.
6721 In this situation, there is only one case we can return true for.
6722 X - 0 is the same as X unless rounding towards -infinity is
6723 supported. */
6724 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6727 /* Subroutine of match.pd that optimizes comparisons of a division by
6728 a nonzero integer constant against an integer constant, i.e.
6729 X/C1 op C2.
6731 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6732 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6734 enum tree_code
6735 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6736 tree *hi, bool *neg_overflow)
6738 tree prod, tmp, type = TREE_TYPE (c1);
6739 signop sign = TYPE_SIGN (type);
6740 wi::overflow_type overflow;
6742 /* We have to do this the hard way to detect unsigned overflow.
6743 prod = int_const_binop (MULT_EXPR, c1, c2); */
6744 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6745 prod = force_fit_type (type, val, -1, overflow);
6746 *neg_overflow = false;
6748 if (sign == UNSIGNED)
6750 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6751 *lo = prod;
6753 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6754 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6755 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6757 else if (tree_int_cst_sgn (c1) >= 0)
6759 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6760 switch (tree_int_cst_sgn (c2))
6762 case -1:
6763 *neg_overflow = true;
6764 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6765 *hi = prod;
6766 break;
6768 case 0:
6769 *lo = fold_negate_const (tmp, type);
6770 *hi = tmp;
6771 break;
6773 case 1:
6774 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6775 *lo = prod;
6776 break;
6778 default:
6779 gcc_unreachable ();
6782 else
6784 /* A negative divisor reverses the relational operators. */
6785 code = swap_tree_comparison (code);
6787 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6788 switch (tree_int_cst_sgn (c2))
6790 case -1:
6791 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6792 *lo = prod;
6793 break;
6795 case 0:
6796 *hi = fold_negate_const (tmp, type);
6797 *lo = tmp;
6798 break;
6800 case 1:
6801 *neg_overflow = true;
6802 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6803 *hi = prod;
6804 break;
6806 default:
6807 gcc_unreachable ();
6811 if (code != EQ_EXPR && code != NE_EXPR)
6812 return code;
6814 if (TREE_OVERFLOW (*lo)
6815 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6816 *lo = NULL_TREE;
6817 if (TREE_OVERFLOW (*hi)
6818 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6819 *hi = NULL_TREE;
6821 return code;
6825 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6826 equality/inequality test, then return a simplified form of the test
6827 using a sign testing. Otherwise return NULL. TYPE is the desired
6828 result type. */
6830 static tree
6831 fold_single_bit_test_into_sign_test (location_t loc,
6832 enum tree_code code, tree arg0, tree arg1,
6833 tree result_type)
6835 /* If this is testing a single bit, we can optimize the test. */
6836 if ((code == NE_EXPR || code == EQ_EXPR)
6837 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6838 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6840 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6841 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6842 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6844 if (arg00 != NULL_TREE
6845 /* This is only a win if casting to a signed type is cheap,
6846 i.e. when arg00's type is not a partial mode. */
6847 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6849 tree stype = signed_type_for (TREE_TYPE (arg00));
6850 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6851 result_type,
6852 fold_convert_loc (loc, stype, arg00),
6853 build_int_cst (stype, 0));
6857 return NULL_TREE;
6860 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6861 equality/inequality test, then return a simplified form of
6862 the test using shifts and logical operations. Otherwise return
6863 NULL. TYPE is the desired result type. */
6865 tree
6866 fold_single_bit_test (location_t loc, enum tree_code code,
6867 tree arg0, tree arg1, tree result_type)
6869 /* If this is testing a single bit, we can optimize the test. */
6870 if ((code == NE_EXPR || code == EQ_EXPR)
6871 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6872 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6874 tree inner = TREE_OPERAND (arg0, 0);
6875 tree type = TREE_TYPE (arg0);
6876 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6877 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6878 int ops_unsigned;
6879 tree signed_type, unsigned_type, intermediate_type;
6880 tree tem, one;
6882 /* First, see if we can fold the single bit test into a sign-bit
6883 test. */
6884 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6885 result_type);
6886 if (tem)
6887 return tem;
6889 /* Otherwise we have (A & C) != 0 where C is a single bit,
6890 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6891 Similarly for (A & C) == 0. */
6893 /* If INNER is a right shift of a constant and it plus BITNUM does
6894 not overflow, adjust BITNUM and INNER. */
6895 if (TREE_CODE (inner) == RSHIFT_EXPR
6896 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6897 && bitnum < TYPE_PRECISION (type)
6898 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6899 TYPE_PRECISION (type) - bitnum))
6901 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6902 inner = TREE_OPERAND (inner, 0);
6905 /* If we are going to be able to omit the AND below, we must do our
6906 operations as unsigned. If we must use the AND, we have a choice.
6907 Normally unsigned is faster, but for some machines signed is. */
6908 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6909 && !flag_syntax_only) ? 0 : 1;
6911 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6912 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6913 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6914 inner = fold_convert_loc (loc, intermediate_type, inner);
6916 if (bitnum != 0)
6917 inner = build2 (RSHIFT_EXPR, intermediate_type,
6918 inner, size_int (bitnum));
6920 one = build_int_cst (intermediate_type, 1);
6922 if (code == EQ_EXPR)
6923 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6925 /* Put the AND last so it can combine with more things. */
6926 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6928 /* Make sure to return the proper type. */
6929 inner = fold_convert_loc (loc, result_type, inner);
6931 return inner;
6933 return NULL_TREE;
6936 /* Test whether it is preferable two swap two operands, ARG0 and
6937 ARG1, for example because ARG0 is an integer constant and ARG1
6938 isn't. */
6940 bool
6941 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6943 if (CONSTANT_CLASS_P (arg1))
6944 return 0;
6945 if (CONSTANT_CLASS_P (arg0))
6946 return 1;
6948 STRIP_NOPS (arg0);
6949 STRIP_NOPS (arg1);
6951 if (TREE_CONSTANT (arg1))
6952 return 0;
6953 if (TREE_CONSTANT (arg0))
6954 return 1;
6956 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6957 for commutative and comparison operators. Ensuring a canonical
6958 form allows the optimizers to find additional redundancies without
6959 having to explicitly check for both orderings. */
6960 if (TREE_CODE (arg0) == SSA_NAME
6961 && TREE_CODE (arg1) == SSA_NAME
6962 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6963 return 1;
6965 /* Put SSA_NAMEs last. */
6966 if (TREE_CODE (arg1) == SSA_NAME)
6967 return 0;
6968 if (TREE_CODE (arg0) == SSA_NAME)
6969 return 1;
6971 /* Put variables last. */
6972 if (DECL_P (arg1))
6973 return 0;
6974 if (DECL_P (arg0))
6975 return 1;
6977 return 0;
6981 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6982 means A >= Y && A != MAX, but in this case we know that
6983 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6985 static tree
6986 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6988 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6990 if (TREE_CODE (bound) == LT_EXPR)
6991 a = TREE_OPERAND (bound, 0);
6992 else if (TREE_CODE (bound) == GT_EXPR)
6993 a = TREE_OPERAND (bound, 1);
6994 else
6995 return NULL_TREE;
6997 typea = TREE_TYPE (a);
6998 if (!INTEGRAL_TYPE_P (typea)
6999 && !POINTER_TYPE_P (typea))
7000 return NULL_TREE;
7002 if (TREE_CODE (ineq) == LT_EXPR)
7004 a1 = TREE_OPERAND (ineq, 1);
7005 y = TREE_OPERAND (ineq, 0);
7007 else if (TREE_CODE (ineq) == GT_EXPR)
7009 a1 = TREE_OPERAND (ineq, 0);
7010 y = TREE_OPERAND (ineq, 1);
7012 else
7013 return NULL_TREE;
7015 if (TREE_TYPE (a1) != typea)
7016 return NULL_TREE;
7018 if (POINTER_TYPE_P (typea))
7020 /* Convert the pointer types into integer before taking the difference. */
7021 tree ta = fold_convert_loc (loc, ssizetype, a);
7022 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7023 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7025 else
7026 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7028 if (!diff || !integer_onep (diff))
7029 return NULL_TREE;
7031 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7034 /* Fold a sum or difference of at least one multiplication.
7035 Returns the folded tree or NULL if no simplification could be made. */
7037 static tree
7038 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7039 tree arg0, tree arg1)
7041 tree arg00, arg01, arg10, arg11;
7042 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7044 /* (A * C) +- (B * C) -> (A+-B) * C.
7045 (A * C) +- A -> A * (C+-1).
7046 We are most concerned about the case where C is a constant,
7047 but other combinations show up during loop reduction. Since
7048 it is not difficult, try all four possibilities. */
7050 if (TREE_CODE (arg0) == MULT_EXPR)
7052 arg00 = TREE_OPERAND (arg0, 0);
7053 arg01 = TREE_OPERAND (arg0, 1);
7055 else if (TREE_CODE (arg0) == INTEGER_CST)
7057 arg00 = build_one_cst (type);
7058 arg01 = arg0;
7060 else
7062 /* We cannot generate constant 1 for fract. */
7063 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7064 return NULL_TREE;
7065 arg00 = arg0;
7066 arg01 = build_one_cst (type);
7068 if (TREE_CODE (arg1) == MULT_EXPR)
7070 arg10 = TREE_OPERAND (arg1, 0);
7071 arg11 = TREE_OPERAND (arg1, 1);
7073 else if (TREE_CODE (arg1) == INTEGER_CST)
7075 arg10 = build_one_cst (type);
7076 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7077 the purpose of this canonicalization. */
7078 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7079 && negate_expr_p (arg1)
7080 && code == PLUS_EXPR)
7082 arg11 = negate_expr (arg1);
7083 code = MINUS_EXPR;
7085 else
7086 arg11 = arg1;
7088 else
7090 /* We cannot generate constant 1 for fract. */
7091 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7092 return NULL_TREE;
7093 arg10 = arg1;
7094 arg11 = build_one_cst (type);
7096 same = NULL_TREE;
7098 /* Prefer factoring a common non-constant. */
7099 if (operand_equal_p (arg00, arg10, 0))
7100 same = arg00, alt0 = arg01, alt1 = arg11;
7101 else if (operand_equal_p (arg01, arg11, 0))
7102 same = arg01, alt0 = arg00, alt1 = arg10;
7103 else if (operand_equal_p (arg00, arg11, 0))
7104 same = arg00, alt0 = arg01, alt1 = arg10;
7105 else if (operand_equal_p (arg01, arg10, 0))
7106 same = arg01, alt0 = arg00, alt1 = arg11;
7108 /* No identical multiplicands; see if we can find a common
7109 power-of-two factor in non-power-of-two multiplies. This
7110 can help in multi-dimensional array access. */
7111 else if (tree_fits_shwi_p (arg01)
7112 && tree_fits_shwi_p (arg11))
7114 HOST_WIDE_INT int01, int11, tmp;
7115 bool swap = false;
7116 tree maybe_same;
7117 int01 = tree_to_shwi (arg01);
7118 int11 = tree_to_shwi (arg11);
7120 /* Move min of absolute values to int11. */
7121 if (absu_hwi (int01) < absu_hwi (int11))
7123 tmp = int01, int01 = int11, int11 = tmp;
7124 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7125 maybe_same = arg01;
7126 swap = true;
7128 else
7129 maybe_same = arg11;
7131 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7132 /* The remainder should not be a constant, otherwise we
7133 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7134 increased the number of multiplications necessary. */
7135 && TREE_CODE (arg10) != INTEGER_CST)
7137 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7138 build_int_cst (TREE_TYPE (arg00),
7139 int01 / int11));
7140 alt1 = arg10;
7141 same = maybe_same;
7142 if (swap)
7143 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7147 if (!same)
7148 return NULL_TREE;
7150 if (! ANY_INTEGRAL_TYPE_P (type)
7151 || TYPE_OVERFLOW_WRAPS (type)
7152 /* We are neither factoring zero nor minus one. */
7153 || TREE_CODE (same) == INTEGER_CST)
7154 return fold_build2_loc (loc, MULT_EXPR, type,
7155 fold_build2_loc (loc, code, type,
7156 fold_convert_loc (loc, type, alt0),
7157 fold_convert_loc (loc, type, alt1)),
7158 fold_convert_loc (loc, type, same));
7160 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7161 same may be minus one and thus the multiplication may overflow. Perform
7162 the sum operation in an unsigned type. */
7163 tree utype = unsigned_type_for (type);
7164 tree tem = fold_build2_loc (loc, code, utype,
7165 fold_convert_loc (loc, utype, alt0),
7166 fold_convert_loc (loc, utype, alt1));
7167 /* If the sum evaluated to a constant that is not -INF the multiplication
7168 cannot overflow. */
7169 if (TREE_CODE (tem) == INTEGER_CST
7170 && (wi::to_wide (tem)
7171 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7172 return fold_build2_loc (loc, MULT_EXPR, type,
7173 fold_convert (type, tem), same);
7175 /* Do not resort to unsigned multiplication because
7176 we lose the no-overflow property of the expression. */
7177 return NULL_TREE;
7180 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7181 specified by EXPR into the buffer PTR of length LEN bytes.
7182 Return the number of bytes placed in the buffer, or zero
7183 upon failure. */
7185 static int
7186 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7188 tree type = TREE_TYPE (expr);
7189 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7190 int byte, offset, word, words;
7191 unsigned char value;
7193 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7194 return 0;
7195 if (off == -1)
7196 off = 0;
7198 if (ptr == NULL)
7199 /* Dry run. */
7200 return MIN (len, total_bytes - off);
7202 words = total_bytes / UNITS_PER_WORD;
7204 for (byte = 0; byte < total_bytes; byte++)
7206 int bitpos = byte * BITS_PER_UNIT;
7207 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7208 number of bytes. */
7209 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7211 if (total_bytes > UNITS_PER_WORD)
7213 word = byte / UNITS_PER_WORD;
7214 if (WORDS_BIG_ENDIAN)
7215 word = (words - 1) - word;
7216 offset = word * UNITS_PER_WORD;
7217 if (BYTES_BIG_ENDIAN)
7218 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7219 else
7220 offset += byte % UNITS_PER_WORD;
7222 else
7223 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7224 if (offset >= off && offset - off < len)
7225 ptr[offset - off] = value;
7227 return MIN (len, total_bytes - off);
7231 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7232 specified by EXPR into the buffer PTR of length LEN bytes.
7233 Return the number of bytes placed in the buffer, or zero
7234 upon failure. */
7236 static int
7237 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7239 tree type = TREE_TYPE (expr);
7240 scalar_mode mode = SCALAR_TYPE_MODE (type);
7241 int total_bytes = GET_MODE_SIZE (mode);
7242 FIXED_VALUE_TYPE value;
7243 tree i_value, i_type;
7245 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7246 return 0;
7248 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7250 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7251 return 0;
7253 value = TREE_FIXED_CST (expr);
7254 i_value = double_int_to_tree (i_type, value.data);
7256 return native_encode_int (i_value, ptr, len, off);
7260 /* Subroutine of native_encode_expr. Encode the REAL_CST
7261 specified by EXPR into the buffer PTR of length LEN bytes.
7262 Return the number of bytes placed in the buffer, or zero
7263 upon failure. */
7265 static int
7266 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7268 tree type = TREE_TYPE (expr);
7269 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7270 int byte, offset, word, words, bitpos;
7271 unsigned char value;
7273 /* There are always 32 bits in each long, no matter the size of
7274 the hosts long. We handle floating point representations with
7275 up to 192 bits. */
7276 long tmp[6];
7278 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7279 return 0;
7280 if (off == -1)
7281 off = 0;
7283 if (ptr == NULL)
7284 /* Dry run. */
7285 return MIN (len, total_bytes - off);
7287 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7289 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7291 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7292 bitpos += BITS_PER_UNIT)
7294 byte = (bitpos / BITS_PER_UNIT) & 3;
7295 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7297 if (UNITS_PER_WORD < 4)
7299 word = byte / UNITS_PER_WORD;
7300 if (WORDS_BIG_ENDIAN)
7301 word = (words - 1) - word;
7302 offset = word * UNITS_PER_WORD;
7303 if (BYTES_BIG_ENDIAN)
7304 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7305 else
7306 offset += byte % UNITS_PER_WORD;
7308 else
7310 offset = byte;
7311 if (BYTES_BIG_ENDIAN)
7313 /* Reverse bytes within each long, or within the entire float
7314 if it's smaller than a long (for HFmode). */
7315 offset = MIN (3, total_bytes - 1) - offset;
7316 gcc_assert (offset >= 0);
7319 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7320 if (offset >= off
7321 && offset - off < len)
7322 ptr[offset - off] = value;
7324 return MIN (len, total_bytes - off);
7327 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7328 specified by EXPR into the buffer PTR of length LEN bytes.
7329 Return the number of bytes placed in the buffer, or zero
7330 upon failure. */
7332 static int
7333 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7335 int rsize, isize;
7336 tree part;
7338 part = TREE_REALPART (expr);
7339 rsize = native_encode_expr (part, ptr, len, off);
7340 if (off == -1 && rsize == 0)
7341 return 0;
7342 part = TREE_IMAGPART (expr);
7343 if (off != -1)
7344 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7345 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7346 len - rsize, off);
7347 if (off == -1 && isize != rsize)
7348 return 0;
7349 return rsize + isize;
7353 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7354 specified by EXPR into the buffer PTR of length LEN bytes.
7355 Return the number of bytes placed in the buffer, or zero
7356 upon failure. */
7358 static int
7359 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7361 unsigned HOST_WIDE_INT i, count;
7362 int size, offset;
7363 tree itype, elem;
7365 offset = 0;
7366 if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7367 return 0;
7368 itype = TREE_TYPE (TREE_TYPE (expr));
7369 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7370 for (i = 0; i < count; i++)
7372 if (off >= size)
7374 off -= size;
7375 continue;
7377 elem = VECTOR_CST_ELT (expr, i);
7378 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7379 len - offset, off);
7380 if ((off == -1 && res != size) || res == 0)
7381 return 0;
7382 offset += res;
7383 if (offset >= len)
7384 return (off == -1 && i < count - 1) ? 0 : offset;
7385 if (off != -1)
7386 off = 0;
7388 return offset;
7392 /* Subroutine of native_encode_expr. Encode the STRING_CST
7393 specified by EXPR into the buffer PTR of length LEN bytes.
7394 Return the number of bytes placed in the buffer, or zero
7395 upon failure. */
7397 static int
7398 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7400 tree type = TREE_TYPE (expr);
7402 /* Wide-char strings are encoded in target byte-order so native
7403 encoding them is trivial. */
7404 if (BITS_PER_UNIT != CHAR_BIT
7405 || TREE_CODE (type) != ARRAY_TYPE
7406 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7407 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7408 return 0;
7410 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7411 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7412 return 0;
7413 if (off == -1)
7414 off = 0;
7415 if (ptr == NULL)
7416 /* Dry run. */;
7417 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7419 int written = 0;
7420 if (off < TREE_STRING_LENGTH (expr))
7422 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7423 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7425 memset (ptr + written, 0,
7426 MIN (total_bytes - written, len - written));
7428 else
7429 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7430 return MIN (total_bytes - off, len);
7434 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7435 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7436 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7437 anything, just do a dry run. If OFF is not -1 then start
7438 the encoding at byte offset OFF and encode at most LEN bytes.
7439 Return the number of bytes placed in the buffer, or zero upon failure. */
7442 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7444 /* We don't support starting at negative offset and -1 is special. */
7445 if (off < -1)
7446 return 0;
7448 switch (TREE_CODE (expr))
7450 case INTEGER_CST:
7451 return native_encode_int (expr, ptr, len, off);
7453 case REAL_CST:
7454 return native_encode_real (expr, ptr, len, off);
7456 case FIXED_CST:
7457 return native_encode_fixed (expr, ptr, len, off);
7459 case COMPLEX_CST:
7460 return native_encode_complex (expr, ptr, len, off);
7462 case VECTOR_CST:
7463 return native_encode_vector (expr, ptr, len, off);
7465 case STRING_CST:
7466 return native_encode_string (expr, ptr, len, off);
7468 default:
7469 return 0;
7474 /* Subroutine of native_interpret_expr. Interpret the contents of
7475 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7476 If the buffer cannot be interpreted, return NULL_TREE. */
7478 static tree
7479 native_interpret_int (tree type, const unsigned char *ptr, int len)
7481 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7483 if (total_bytes > len
7484 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7485 return NULL_TREE;
7487 wide_int result = wi::from_buffer (ptr, total_bytes);
7489 return wide_int_to_tree (type, result);
7493 /* Subroutine of native_interpret_expr. Interpret the contents of
7494 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7495 If the buffer cannot be interpreted, return NULL_TREE. */
7497 static tree
7498 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7500 scalar_mode mode = SCALAR_TYPE_MODE (type);
7501 int total_bytes = GET_MODE_SIZE (mode);
7502 double_int result;
7503 FIXED_VALUE_TYPE fixed_value;
7505 if (total_bytes > len
7506 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7507 return NULL_TREE;
7509 result = double_int::from_buffer (ptr, total_bytes);
7510 fixed_value = fixed_from_double_int (result, mode);
7512 return build_fixed (type, fixed_value);
7516 /* Subroutine of native_interpret_expr. Interpret the contents of
7517 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7518 If the buffer cannot be interpreted, return NULL_TREE. */
7520 static tree
7521 native_interpret_real (tree type, const unsigned char *ptr, int len)
7523 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7524 int total_bytes = GET_MODE_SIZE (mode);
7525 unsigned char value;
7526 /* There are always 32 bits in each long, no matter the size of
7527 the hosts long. We handle floating point representations with
7528 up to 192 bits. */
7529 REAL_VALUE_TYPE r;
7530 long tmp[6];
7532 if (total_bytes > len || total_bytes > 24)
7533 return NULL_TREE;
7534 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7536 memset (tmp, 0, sizeof (tmp));
7537 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7538 bitpos += BITS_PER_UNIT)
7540 /* Both OFFSET and BYTE index within a long;
7541 bitpos indexes the whole float. */
7542 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7543 if (UNITS_PER_WORD < 4)
7545 int word = byte / UNITS_PER_WORD;
7546 if (WORDS_BIG_ENDIAN)
7547 word = (words - 1) - word;
7548 offset = word * UNITS_PER_WORD;
7549 if (BYTES_BIG_ENDIAN)
7550 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7551 else
7552 offset += byte % UNITS_PER_WORD;
7554 else
7556 offset = byte;
7557 if (BYTES_BIG_ENDIAN)
7559 /* Reverse bytes within each long, or within the entire float
7560 if it's smaller than a long (for HFmode). */
7561 offset = MIN (3, total_bytes - 1) - offset;
7562 gcc_assert (offset >= 0);
7565 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7567 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7570 real_from_target (&r, tmp, mode);
7571 return build_real (type, r);
7575 /* Subroutine of native_interpret_expr. Interpret the contents of
7576 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7577 If the buffer cannot be interpreted, return NULL_TREE. */
7579 static tree
7580 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7582 tree etype, rpart, ipart;
7583 int size;
7585 etype = TREE_TYPE (type);
7586 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7587 if (size * 2 > len)
7588 return NULL_TREE;
7589 rpart = native_interpret_expr (etype, ptr, size);
7590 if (!rpart)
7591 return NULL_TREE;
7592 ipart = native_interpret_expr (etype, ptr+size, size);
7593 if (!ipart)
7594 return NULL_TREE;
7595 return build_complex (type, rpart, ipart);
7599 /* Subroutine of native_interpret_expr. Interpret the contents of
7600 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7601 If the buffer cannot be interpreted, return NULL_TREE. */
7603 static tree
7604 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7606 tree etype, elem;
7607 unsigned int i, size;
7608 unsigned HOST_WIDE_INT count;
7610 etype = TREE_TYPE (type);
7611 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7612 if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7613 || size * count > len)
7614 return NULL_TREE;
7616 tree_vector_builder elements (type, count, 1);
7617 for (i = 0; i < count; ++i)
7619 elem = native_interpret_expr (etype, ptr+(i*size), size);
7620 if (!elem)
7621 return NULL_TREE;
7622 elements.quick_push (elem);
7624 return elements.build ();
7628 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7629 the buffer PTR of length LEN as a constant of type TYPE. For
7630 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7631 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7632 return NULL_TREE. */
7634 tree
7635 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7637 switch (TREE_CODE (type))
7639 case INTEGER_TYPE:
7640 case ENUMERAL_TYPE:
7641 case BOOLEAN_TYPE:
7642 case POINTER_TYPE:
7643 case REFERENCE_TYPE:
7644 return native_interpret_int (type, ptr, len);
7646 case REAL_TYPE:
7647 return native_interpret_real (type, ptr, len);
7649 case FIXED_POINT_TYPE:
7650 return native_interpret_fixed (type, ptr, len);
7652 case COMPLEX_TYPE:
7653 return native_interpret_complex (type, ptr, len);
7655 case VECTOR_TYPE:
7656 return native_interpret_vector (type, ptr, len);
7658 default:
7659 return NULL_TREE;
7663 /* Returns true if we can interpret the contents of a native encoding
7664 as TYPE. */
7666 static bool
7667 can_native_interpret_type_p (tree type)
7669 switch (TREE_CODE (type))
7671 case INTEGER_TYPE:
7672 case ENUMERAL_TYPE:
7673 case BOOLEAN_TYPE:
7674 case POINTER_TYPE:
7675 case REFERENCE_TYPE:
7676 case FIXED_POINT_TYPE:
7677 case REAL_TYPE:
7678 case COMPLEX_TYPE:
7679 case VECTOR_TYPE:
7680 return true;
7681 default:
7682 return false;
7687 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7688 TYPE at compile-time. If we're unable to perform the conversion
7689 return NULL_TREE. */
7691 static tree
7692 fold_view_convert_expr (tree type, tree expr)
7694 /* We support up to 512-bit values (for V8DFmode). */
7695 unsigned char buffer[64];
7696 int len;
7698 /* Check that the host and target are sane. */
7699 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7700 return NULL_TREE;
7702 len = native_encode_expr (expr, buffer, sizeof (buffer));
7703 if (len == 0)
7704 return NULL_TREE;
7706 return native_interpret_expr (type, buffer, len);
7709 /* Build an expression for the address of T. Folds away INDIRECT_REF
7710 to avoid confusing the gimplify process. */
7712 tree
7713 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7715 /* The size of the object is not relevant when talking about its address. */
7716 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7717 t = TREE_OPERAND (t, 0);
7719 if (TREE_CODE (t) == INDIRECT_REF)
7721 t = TREE_OPERAND (t, 0);
7723 if (TREE_TYPE (t) != ptrtype)
7724 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7726 else if (TREE_CODE (t) == MEM_REF
7727 && integer_zerop (TREE_OPERAND (t, 1)))
7728 return TREE_OPERAND (t, 0);
7729 else if (TREE_CODE (t) == MEM_REF
7730 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7731 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7732 TREE_OPERAND (t, 0),
7733 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7734 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7736 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7738 if (TREE_TYPE (t) != ptrtype)
7739 t = fold_convert_loc (loc, ptrtype, t);
7741 else
7742 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7744 return t;
7747 /* Build an expression for the address of T. */
7749 tree
7750 build_fold_addr_expr_loc (location_t loc, tree t)
7752 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7754 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7757 /* Fold a unary expression of code CODE and type TYPE with operand
7758 OP0. Return the folded expression if folding is successful.
7759 Otherwise, return NULL_TREE. */
7761 tree
7762 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7764 tree tem;
7765 tree arg0;
7766 enum tree_code_class kind = TREE_CODE_CLASS (code);
7768 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7769 && TREE_CODE_LENGTH (code) == 1);
7771 arg0 = op0;
7772 if (arg0)
7774 if (CONVERT_EXPR_CODE_P (code)
7775 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7777 /* Don't use STRIP_NOPS, because signedness of argument type
7778 matters. */
7779 STRIP_SIGN_NOPS (arg0);
7781 else
7783 /* Strip any conversions that don't change the mode. This
7784 is safe for every expression, except for a comparison
7785 expression because its signedness is derived from its
7786 operands.
7788 Note that this is done as an internal manipulation within
7789 the constant folder, in order to find the simplest
7790 representation of the arguments so that their form can be
7791 studied. In any cases, the appropriate type conversions
7792 should be put back in the tree that will get out of the
7793 constant folder. */
7794 STRIP_NOPS (arg0);
7797 if (CONSTANT_CLASS_P (arg0))
7799 tree tem = const_unop (code, type, arg0);
7800 if (tem)
7802 if (TREE_TYPE (tem) != type)
7803 tem = fold_convert_loc (loc, type, tem);
7804 return tem;
7809 tem = generic_simplify (loc, code, type, op0);
7810 if (tem)
7811 return tem;
7813 if (TREE_CODE_CLASS (code) == tcc_unary)
7815 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7816 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7817 fold_build1_loc (loc, code, type,
7818 fold_convert_loc (loc, TREE_TYPE (op0),
7819 TREE_OPERAND (arg0, 1))));
7820 else if (TREE_CODE (arg0) == COND_EXPR)
7822 tree arg01 = TREE_OPERAND (arg0, 1);
7823 tree arg02 = TREE_OPERAND (arg0, 2);
7824 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7825 arg01 = fold_build1_loc (loc, code, type,
7826 fold_convert_loc (loc,
7827 TREE_TYPE (op0), arg01));
7828 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7829 arg02 = fold_build1_loc (loc, code, type,
7830 fold_convert_loc (loc,
7831 TREE_TYPE (op0), arg02));
7832 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7833 arg01, arg02);
7835 /* If this was a conversion, and all we did was to move into
7836 inside the COND_EXPR, bring it back out. But leave it if
7837 it is a conversion from integer to integer and the
7838 result precision is no wider than a word since such a
7839 conversion is cheap and may be optimized away by combine,
7840 while it couldn't if it were outside the COND_EXPR. Then return
7841 so we don't get into an infinite recursion loop taking the
7842 conversion out and then back in. */
7844 if ((CONVERT_EXPR_CODE_P (code)
7845 || code == NON_LVALUE_EXPR)
7846 && TREE_CODE (tem) == COND_EXPR
7847 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7848 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7849 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7850 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7851 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7852 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7853 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7854 && (INTEGRAL_TYPE_P
7855 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7856 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7857 || flag_syntax_only))
7858 tem = build1_loc (loc, code, type,
7859 build3 (COND_EXPR,
7860 TREE_TYPE (TREE_OPERAND
7861 (TREE_OPERAND (tem, 1), 0)),
7862 TREE_OPERAND (tem, 0),
7863 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7864 TREE_OPERAND (TREE_OPERAND (tem, 2),
7865 0)));
7866 return tem;
7870 switch (code)
7872 case NON_LVALUE_EXPR:
7873 if (!maybe_lvalue_p (op0))
7874 return fold_convert_loc (loc, type, op0);
7875 return NULL_TREE;
7877 CASE_CONVERT:
7878 case FLOAT_EXPR:
7879 case FIX_TRUNC_EXPR:
7880 if (COMPARISON_CLASS_P (op0))
7882 /* If we have (type) (a CMP b) and type is an integral type, return
7883 new expression involving the new type. Canonicalize
7884 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7885 non-integral type.
7886 Do not fold the result as that would not simplify further, also
7887 folding again results in recursions. */
7888 if (TREE_CODE (type) == BOOLEAN_TYPE)
7889 return build2_loc (loc, TREE_CODE (op0), type,
7890 TREE_OPERAND (op0, 0),
7891 TREE_OPERAND (op0, 1));
7892 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7893 && TREE_CODE (type) != VECTOR_TYPE)
7894 return build3_loc (loc, COND_EXPR, type, op0,
7895 constant_boolean_node (true, type),
7896 constant_boolean_node (false, type));
7899 /* Handle (T *)&A.B.C for A being of type T and B and C
7900 living at offset zero. This occurs frequently in
7901 C++ upcasting and then accessing the base. */
7902 if (TREE_CODE (op0) == ADDR_EXPR
7903 && POINTER_TYPE_P (type)
7904 && handled_component_p (TREE_OPERAND (op0, 0)))
7906 poly_int64 bitsize, bitpos;
7907 tree offset;
7908 machine_mode mode;
7909 int unsignedp, reversep, volatilep;
7910 tree base
7911 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7912 &offset, &mode, &unsignedp, &reversep,
7913 &volatilep);
7914 /* If the reference was to a (constant) zero offset, we can use
7915 the address of the base if it has the same base type
7916 as the result type and the pointer type is unqualified. */
7917 if (!offset
7918 && known_eq (bitpos, 0)
7919 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7920 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7921 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7922 return fold_convert_loc (loc, type,
7923 build_fold_addr_expr_loc (loc, base));
7926 if (TREE_CODE (op0) == MODIFY_EXPR
7927 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7928 /* Detect assigning a bitfield. */
7929 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7930 && DECL_BIT_FIELD
7931 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7933 /* Don't leave an assignment inside a conversion
7934 unless assigning a bitfield. */
7935 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7936 /* First do the assignment, then return converted constant. */
7937 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7938 TREE_NO_WARNING (tem) = 1;
7939 TREE_USED (tem) = 1;
7940 return tem;
7943 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7944 constants (if x has signed type, the sign bit cannot be set
7945 in c). This folds extension into the BIT_AND_EXPR.
7946 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7947 very likely don't have maximal range for their precision and this
7948 transformation effectively doesn't preserve non-maximal ranges. */
7949 if (TREE_CODE (type) == INTEGER_TYPE
7950 && TREE_CODE (op0) == BIT_AND_EXPR
7951 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7953 tree and_expr = op0;
7954 tree and0 = TREE_OPERAND (and_expr, 0);
7955 tree and1 = TREE_OPERAND (and_expr, 1);
7956 int change = 0;
7958 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7959 || (TYPE_PRECISION (type)
7960 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7961 change = 1;
7962 else if (TYPE_PRECISION (TREE_TYPE (and1))
7963 <= HOST_BITS_PER_WIDE_INT
7964 && tree_fits_uhwi_p (and1))
7966 unsigned HOST_WIDE_INT cst;
7968 cst = tree_to_uhwi (and1);
7969 cst &= HOST_WIDE_INT_M1U
7970 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7971 change = (cst == 0);
7972 if (change
7973 && !flag_syntax_only
7974 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7975 == ZERO_EXTEND))
7977 tree uns = unsigned_type_for (TREE_TYPE (and0));
7978 and0 = fold_convert_loc (loc, uns, and0);
7979 and1 = fold_convert_loc (loc, uns, and1);
7982 if (change)
7984 tem = force_fit_type (type, wi::to_widest (and1), 0,
7985 TREE_OVERFLOW (and1));
7986 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7987 fold_convert_loc (loc, type, and0), tem);
7991 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7992 cast (T1)X will fold away. We assume that this happens when X itself
7993 is a cast. */
7994 if (POINTER_TYPE_P (type)
7995 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7996 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7998 tree arg00 = TREE_OPERAND (arg0, 0);
7999 tree arg01 = TREE_OPERAND (arg0, 1);
8001 return fold_build_pointer_plus_loc
8002 (loc, fold_convert_loc (loc, type, arg00), arg01);
8005 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8006 of the same precision, and X is an integer type not narrower than
8007 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8008 if (INTEGRAL_TYPE_P (type)
8009 && TREE_CODE (op0) == BIT_NOT_EXPR
8010 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8011 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8012 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8014 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8015 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8016 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8017 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8018 fold_convert_loc (loc, type, tem));
8021 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8022 type of X and Y (integer types only). */
8023 if (INTEGRAL_TYPE_P (type)
8024 && TREE_CODE (op0) == MULT_EXPR
8025 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8026 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8028 /* Be careful not to introduce new overflows. */
8029 tree mult_type;
8030 if (TYPE_OVERFLOW_WRAPS (type))
8031 mult_type = type;
8032 else
8033 mult_type = unsigned_type_for (type);
8035 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8037 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8038 fold_convert_loc (loc, mult_type,
8039 TREE_OPERAND (op0, 0)),
8040 fold_convert_loc (loc, mult_type,
8041 TREE_OPERAND (op0, 1)));
8042 return fold_convert_loc (loc, type, tem);
8046 return NULL_TREE;
8048 case VIEW_CONVERT_EXPR:
8049 if (TREE_CODE (op0) == MEM_REF)
8051 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8052 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8053 tem = fold_build2_loc (loc, MEM_REF, type,
8054 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8055 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8056 return tem;
8059 return NULL_TREE;
8061 case NEGATE_EXPR:
8062 tem = fold_negate_expr (loc, arg0);
8063 if (tem)
8064 return fold_convert_loc (loc, type, tem);
8065 return NULL_TREE;
8067 case ABS_EXPR:
8068 /* Convert fabs((double)float) into (double)fabsf(float). */
8069 if (TREE_CODE (arg0) == NOP_EXPR
8070 && TREE_CODE (type) == REAL_TYPE)
8072 tree targ0 = strip_float_extensions (arg0);
8073 if (targ0 != arg0)
8074 return fold_convert_loc (loc, type,
8075 fold_build1_loc (loc, ABS_EXPR,
8076 TREE_TYPE (targ0),
8077 targ0));
8079 return NULL_TREE;
8081 case BIT_NOT_EXPR:
8082 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8083 if (TREE_CODE (arg0) == BIT_XOR_EXPR
8084 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8085 fold_convert_loc (loc, type,
8086 TREE_OPERAND (arg0, 0)))))
8087 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8088 fold_convert_loc (loc, type,
8089 TREE_OPERAND (arg0, 1)));
8090 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8091 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8092 fold_convert_loc (loc, type,
8093 TREE_OPERAND (arg0, 1)))))
8094 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8095 fold_convert_loc (loc, type,
8096 TREE_OPERAND (arg0, 0)), tem);
8098 return NULL_TREE;
8100 case TRUTH_NOT_EXPR:
8101 /* Note that the operand of this must be an int
8102 and its values must be 0 or 1.
8103 ("true" is a fixed value perhaps depending on the language,
8104 but we don't handle values other than 1 correctly yet.) */
8105 tem = fold_truth_not_expr (loc, arg0);
8106 if (!tem)
8107 return NULL_TREE;
8108 return fold_convert_loc (loc, type, tem);
8110 case INDIRECT_REF:
8111 /* Fold *&X to X if X is an lvalue. */
8112 if (TREE_CODE (op0) == ADDR_EXPR)
8114 tree op00 = TREE_OPERAND (op0, 0);
8115 if ((VAR_P (op00)
8116 || TREE_CODE (op00) == PARM_DECL
8117 || TREE_CODE (op00) == RESULT_DECL)
8118 && !TREE_READONLY (op00))
8119 return op00;
8121 return NULL_TREE;
8123 default:
8124 return NULL_TREE;
8125 } /* switch (code) */
8129 /* If the operation was a conversion do _not_ mark a resulting constant
8130 with TREE_OVERFLOW if the original constant was not. These conversions
8131 have implementation defined behavior and retaining the TREE_OVERFLOW
8132 flag here would confuse later passes such as VRP. */
8133 tree
8134 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8135 tree type, tree op0)
8137 tree res = fold_unary_loc (loc, code, type, op0);
8138 if (res
8139 && TREE_CODE (res) == INTEGER_CST
8140 && TREE_CODE (op0) == INTEGER_CST
8141 && CONVERT_EXPR_CODE_P (code))
8142 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8144 return res;
8147 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8148 operands OP0 and OP1. LOC is the location of the resulting expression.
8149 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8150 Return the folded expression if folding is successful. Otherwise,
8151 return NULL_TREE. */
8152 static tree
8153 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8154 tree arg0, tree arg1, tree op0, tree op1)
8156 tree tem;
8158 /* We only do these simplifications if we are optimizing. */
8159 if (!optimize)
8160 return NULL_TREE;
8162 /* Check for things like (A || B) && (A || C). We can convert this
8163 to A || (B && C). Note that either operator can be any of the four
8164 truth and/or operations and the transformation will still be
8165 valid. Also note that we only care about order for the
8166 ANDIF and ORIF operators. If B contains side effects, this
8167 might change the truth-value of A. */
8168 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8169 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8170 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8171 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8172 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8173 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8175 tree a00 = TREE_OPERAND (arg0, 0);
8176 tree a01 = TREE_OPERAND (arg0, 1);
8177 tree a10 = TREE_OPERAND (arg1, 0);
8178 tree a11 = TREE_OPERAND (arg1, 1);
8179 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8180 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8181 && (code == TRUTH_AND_EXPR
8182 || code == TRUTH_OR_EXPR));
8184 if (operand_equal_p (a00, a10, 0))
8185 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8186 fold_build2_loc (loc, code, type, a01, a11));
8187 else if (commutative && operand_equal_p (a00, a11, 0))
8188 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8189 fold_build2_loc (loc, code, type, a01, a10));
8190 else if (commutative && operand_equal_p (a01, a10, 0))
8191 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8192 fold_build2_loc (loc, code, type, a00, a11));
8194 /* This case if tricky because we must either have commutative
8195 operators or else A10 must not have side-effects. */
8197 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8198 && operand_equal_p (a01, a11, 0))
8199 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8200 fold_build2_loc (loc, code, type, a00, a10),
8201 a01);
8204 /* See if we can build a range comparison. */
8205 if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8206 return tem;
8208 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8209 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8211 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8212 if (tem)
8213 return fold_build2_loc (loc, code, type, tem, arg1);
8216 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8217 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8219 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8220 if (tem)
8221 return fold_build2_loc (loc, code, type, arg0, tem);
8224 /* Check for the possibility of merging component references. If our
8225 lhs is another similar operation, try to merge its rhs with our
8226 rhs. Then try to merge our lhs and rhs. */
8227 if (TREE_CODE (arg0) == code
8228 && (tem = fold_truth_andor_1 (loc, code, type,
8229 TREE_OPERAND (arg0, 1), arg1)) != 0)
8230 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8232 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8233 return tem;
8235 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
8236 if (PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT) != -1)
8237 logical_op_non_short_circuit
8238 = PARAM_VALUE (PARAM_LOGICAL_OP_NON_SHORT_CIRCUIT);
8239 if (logical_op_non_short_circuit
8240 && !flag_sanitize_coverage
8241 && (code == TRUTH_AND_EXPR
8242 || code == TRUTH_ANDIF_EXPR
8243 || code == TRUTH_OR_EXPR
8244 || code == TRUTH_ORIF_EXPR))
8246 enum tree_code ncode, icode;
8248 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8249 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8250 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8252 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8253 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8254 We don't want to pack more than two leafs to a non-IF AND/OR
8255 expression.
8256 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8257 equal to IF-CODE, then we don't want to add right-hand operand.
8258 If the inner right-hand side of left-hand operand has
8259 side-effects, or isn't simple, then we can't add to it,
8260 as otherwise we might destroy if-sequence. */
8261 if (TREE_CODE (arg0) == icode
8262 && simple_operand_p_2 (arg1)
8263 /* Needed for sequence points to handle trappings, and
8264 side-effects. */
8265 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8267 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8268 arg1);
8269 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8270 tem);
8272 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8273 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8274 else if (TREE_CODE (arg1) == icode
8275 && simple_operand_p_2 (arg0)
8276 /* Needed for sequence points to handle trappings, and
8277 side-effects. */
8278 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8280 tem = fold_build2_loc (loc, ncode, type,
8281 arg0, TREE_OPERAND (arg1, 0));
8282 return fold_build2_loc (loc, icode, type, tem,
8283 TREE_OPERAND (arg1, 1));
8285 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8286 into (A OR B).
8287 For sequence point consistancy, we need to check for trapping,
8288 and side-effects. */
8289 else if (code == icode && simple_operand_p_2 (arg0)
8290 && simple_operand_p_2 (arg1))
8291 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8294 return NULL_TREE;
8297 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8298 by changing CODE to reduce the magnitude of constants involved in
8299 ARG0 of the comparison.
8300 Returns a canonicalized comparison tree if a simplification was
8301 possible, otherwise returns NULL_TREE.
8302 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8303 valid if signed overflow is undefined. */
8305 static tree
8306 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8307 tree arg0, tree arg1,
8308 bool *strict_overflow_p)
8310 enum tree_code code0 = TREE_CODE (arg0);
8311 tree t, cst0 = NULL_TREE;
8312 int sgn0;
8314 /* Match A +- CST code arg1. We can change this only if overflow
8315 is undefined. */
8316 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8317 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8318 /* In principle pointers also have undefined overflow behavior,
8319 but that causes problems elsewhere. */
8320 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8321 && (code0 == MINUS_EXPR
8322 || code0 == PLUS_EXPR)
8323 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8324 return NULL_TREE;
8326 /* Identify the constant in arg0 and its sign. */
8327 cst0 = TREE_OPERAND (arg0, 1);
8328 sgn0 = tree_int_cst_sgn (cst0);
8330 /* Overflowed constants and zero will cause problems. */
8331 if (integer_zerop (cst0)
8332 || TREE_OVERFLOW (cst0))
8333 return NULL_TREE;
8335 /* See if we can reduce the magnitude of the constant in
8336 arg0 by changing the comparison code. */
8337 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8338 if (code == LT_EXPR
8339 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8340 code = LE_EXPR;
8341 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8342 else if (code == GT_EXPR
8343 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8344 code = GE_EXPR;
8345 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8346 else if (code == LE_EXPR
8347 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8348 code = LT_EXPR;
8349 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8350 else if (code == GE_EXPR
8351 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8352 code = GT_EXPR;
8353 else
8354 return NULL_TREE;
8355 *strict_overflow_p = true;
8357 /* Now build the constant reduced in magnitude. But not if that
8358 would produce one outside of its types range. */
8359 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8360 && ((sgn0 == 1
8361 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8362 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8363 || (sgn0 == -1
8364 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8365 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8366 return NULL_TREE;
8368 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8369 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8370 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8371 t = fold_convert (TREE_TYPE (arg1), t);
8373 return fold_build2_loc (loc, code, type, t, arg1);
8376 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8377 overflow further. Try to decrease the magnitude of constants involved
8378 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8379 and put sole constants at the second argument position.
8380 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8382 static tree
8383 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8384 tree arg0, tree arg1)
8386 tree t;
8387 bool strict_overflow_p;
8388 const char * const warnmsg = G_("assuming signed overflow does not occur "
8389 "when reducing constant in comparison");
8391 /* Try canonicalization by simplifying arg0. */
8392 strict_overflow_p = false;
8393 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8394 &strict_overflow_p);
8395 if (t)
8397 if (strict_overflow_p)
8398 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8399 return t;
8402 /* Try canonicalization by simplifying arg1 using the swapped
8403 comparison. */
8404 code = swap_tree_comparison (code);
8405 strict_overflow_p = false;
8406 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8407 &strict_overflow_p);
8408 if (t && strict_overflow_p)
8409 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8410 return t;
8413 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8414 space. This is used to avoid issuing overflow warnings for
8415 expressions like &p->x which can not wrap. */
8417 static bool
8418 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8420 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8421 return true;
8423 if (maybe_lt (bitpos, 0))
8424 return true;
8426 poly_wide_int wi_offset;
8427 int precision = TYPE_PRECISION (TREE_TYPE (base));
8428 if (offset == NULL_TREE)
8429 wi_offset = wi::zero (precision);
8430 else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8431 return true;
8432 else
8433 wi_offset = wi::to_poly_wide (offset);
8435 wi::overflow_type overflow;
8436 poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8437 precision);
8438 poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8439 if (overflow)
8440 return true;
8442 poly_uint64 total_hwi, size;
8443 if (!total.to_uhwi (&total_hwi)
8444 || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8445 &size)
8446 || known_eq (size, 0U))
8447 return true;
8449 if (known_le (total_hwi, size))
8450 return false;
8452 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8453 array. */
8454 if (TREE_CODE (base) == ADDR_EXPR
8455 && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8456 &size)
8457 && maybe_ne (size, 0U)
8458 && known_le (total_hwi, size))
8459 return false;
8461 return true;
8464 /* Return a positive integer when the symbol DECL is known to have
8465 a nonzero address, zero when it's known not to (e.g., it's a weak
8466 symbol), and a negative integer when the symbol is not yet in the
8467 symbol table and so whether or not its address is zero is unknown.
8468 For function local objects always return positive integer. */
8469 static int
8470 maybe_nonzero_address (tree decl)
8472 if (DECL_P (decl) && decl_in_symtab_p (decl))
8473 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8474 return symbol->nonzero_address ();
8476 /* Function local objects are never NULL. */
8477 if (DECL_P (decl)
8478 && (DECL_CONTEXT (decl)
8479 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8480 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8481 return 1;
8483 return -1;
8486 /* Subroutine of fold_binary. This routine performs all of the
8487 transformations that are common to the equality/inequality
8488 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8489 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8490 fold_binary should call fold_binary. Fold a comparison with
8491 tree code CODE and type TYPE with operands OP0 and OP1. Return
8492 the folded comparison or NULL_TREE. */
8494 static tree
8495 fold_comparison (location_t loc, enum tree_code code, tree type,
8496 tree op0, tree op1)
8498 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8499 tree arg0, arg1, tem;
8501 arg0 = op0;
8502 arg1 = op1;
8504 STRIP_SIGN_NOPS (arg0);
8505 STRIP_SIGN_NOPS (arg1);
8507 /* For comparisons of pointers we can decompose it to a compile time
8508 comparison of the base objects and the offsets into the object.
8509 This requires at least one operand being an ADDR_EXPR or a
8510 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8511 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8512 && (TREE_CODE (arg0) == ADDR_EXPR
8513 || TREE_CODE (arg1) == ADDR_EXPR
8514 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8515 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8517 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8518 poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8519 machine_mode mode;
8520 int volatilep, reversep, unsignedp;
8521 bool indirect_base0 = false, indirect_base1 = false;
8523 /* Get base and offset for the access. Strip ADDR_EXPR for
8524 get_inner_reference, but put it back by stripping INDIRECT_REF
8525 off the base object if possible. indirect_baseN will be true
8526 if baseN is not an address but refers to the object itself. */
8527 base0 = arg0;
8528 if (TREE_CODE (arg0) == ADDR_EXPR)
8530 base0
8531 = get_inner_reference (TREE_OPERAND (arg0, 0),
8532 &bitsize, &bitpos0, &offset0, &mode,
8533 &unsignedp, &reversep, &volatilep);
8534 if (TREE_CODE (base0) == INDIRECT_REF)
8535 base0 = TREE_OPERAND (base0, 0);
8536 else
8537 indirect_base0 = true;
8539 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8541 base0 = TREE_OPERAND (arg0, 0);
8542 STRIP_SIGN_NOPS (base0);
8543 if (TREE_CODE (base0) == ADDR_EXPR)
8545 base0
8546 = get_inner_reference (TREE_OPERAND (base0, 0),
8547 &bitsize, &bitpos0, &offset0, &mode,
8548 &unsignedp, &reversep, &volatilep);
8549 if (TREE_CODE (base0) == INDIRECT_REF)
8550 base0 = TREE_OPERAND (base0, 0);
8551 else
8552 indirect_base0 = true;
8554 if (offset0 == NULL_TREE || integer_zerop (offset0))
8555 offset0 = TREE_OPERAND (arg0, 1);
8556 else
8557 offset0 = size_binop (PLUS_EXPR, offset0,
8558 TREE_OPERAND (arg0, 1));
8559 if (poly_int_tree_p (offset0))
8561 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8562 TYPE_PRECISION (sizetype));
8563 tem <<= LOG2_BITS_PER_UNIT;
8564 tem += bitpos0;
8565 if (tem.to_shwi (&bitpos0))
8566 offset0 = NULL_TREE;
8570 base1 = arg1;
8571 if (TREE_CODE (arg1) == ADDR_EXPR)
8573 base1
8574 = get_inner_reference (TREE_OPERAND (arg1, 0),
8575 &bitsize, &bitpos1, &offset1, &mode,
8576 &unsignedp, &reversep, &volatilep);
8577 if (TREE_CODE (base1) == INDIRECT_REF)
8578 base1 = TREE_OPERAND (base1, 0);
8579 else
8580 indirect_base1 = true;
8582 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8584 base1 = TREE_OPERAND (arg1, 0);
8585 STRIP_SIGN_NOPS (base1);
8586 if (TREE_CODE (base1) == ADDR_EXPR)
8588 base1
8589 = get_inner_reference (TREE_OPERAND (base1, 0),
8590 &bitsize, &bitpos1, &offset1, &mode,
8591 &unsignedp, &reversep, &volatilep);
8592 if (TREE_CODE (base1) == INDIRECT_REF)
8593 base1 = TREE_OPERAND (base1, 0);
8594 else
8595 indirect_base1 = true;
8597 if (offset1 == NULL_TREE || integer_zerop (offset1))
8598 offset1 = TREE_OPERAND (arg1, 1);
8599 else
8600 offset1 = size_binop (PLUS_EXPR, offset1,
8601 TREE_OPERAND (arg1, 1));
8602 if (poly_int_tree_p (offset1))
8604 poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8605 TYPE_PRECISION (sizetype));
8606 tem <<= LOG2_BITS_PER_UNIT;
8607 tem += bitpos1;
8608 if (tem.to_shwi (&bitpos1))
8609 offset1 = NULL_TREE;
8613 /* If we have equivalent bases we might be able to simplify. */
8614 if (indirect_base0 == indirect_base1
8615 && operand_equal_p (base0, base1,
8616 indirect_base0 ? OEP_ADDRESS_OF : 0))
8618 /* We can fold this expression to a constant if the non-constant
8619 offset parts are equal. */
8620 if ((offset0 == offset1
8621 || (offset0 && offset1
8622 && operand_equal_p (offset0, offset1, 0)))
8623 && (equality_code
8624 || (indirect_base0
8625 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8626 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8628 if (!equality_code
8629 && maybe_ne (bitpos0, bitpos1)
8630 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8631 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8632 fold_overflow_warning (("assuming pointer wraparound does not "
8633 "occur when comparing P +- C1 with "
8634 "P +- C2"),
8635 WARN_STRICT_OVERFLOW_CONDITIONAL);
8637 switch (code)
8639 case EQ_EXPR:
8640 if (known_eq (bitpos0, bitpos1))
8641 return constant_boolean_node (true, type);
8642 if (known_ne (bitpos0, bitpos1))
8643 return constant_boolean_node (false, type);
8644 break;
8645 case NE_EXPR:
8646 if (known_ne (bitpos0, bitpos1))
8647 return constant_boolean_node (true, type);
8648 if (known_eq (bitpos0, bitpos1))
8649 return constant_boolean_node (false, type);
8650 break;
8651 case LT_EXPR:
8652 if (known_lt (bitpos0, bitpos1))
8653 return constant_boolean_node (true, type);
8654 if (known_ge (bitpos0, bitpos1))
8655 return constant_boolean_node (false, type);
8656 break;
8657 case LE_EXPR:
8658 if (known_le (bitpos0, bitpos1))
8659 return constant_boolean_node (true, type);
8660 if (known_gt (bitpos0, bitpos1))
8661 return constant_boolean_node (false, type);
8662 break;
8663 case GE_EXPR:
8664 if (known_ge (bitpos0, bitpos1))
8665 return constant_boolean_node (true, type);
8666 if (known_lt (bitpos0, bitpos1))
8667 return constant_boolean_node (false, type);
8668 break;
8669 case GT_EXPR:
8670 if (known_gt (bitpos0, bitpos1))
8671 return constant_boolean_node (true, type);
8672 if (known_le (bitpos0, bitpos1))
8673 return constant_boolean_node (false, type);
8674 break;
8675 default:;
8678 /* We can simplify the comparison to a comparison of the variable
8679 offset parts if the constant offset parts are equal.
8680 Be careful to use signed sizetype here because otherwise we
8681 mess with array offsets in the wrong way. This is possible
8682 because pointer arithmetic is restricted to retain within an
8683 object and overflow on pointer differences is undefined as of
8684 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8685 else if (known_eq (bitpos0, bitpos1)
8686 && (equality_code
8687 || (indirect_base0
8688 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8689 || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8691 /* By converting to signed sizetype we cover middle-end pointer
8692 arithmetic which operates on unsigned pointer types of size
8693 type size and ARRAY_REF offsets which are properly sign or
8694 zero extended from their type in case it is narrower than
8695 sizetype. */
8696 if (offset0 == NULL_TREE)
8697 offset0 = build_int_cst (ssizetype, 0);
8698 else
8699 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8700 if (offset1 == NULL_TREE)
8701 offset1 = build_int_cst (ssizetype, 0);
8702 else
8703 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8705 if (!equality_code
8706 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8707 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8708 fold_overflow_warning (("assuming pointer wraparound does not "
8709 "occur when comparing P +- C1 with "
8710 "P +- C2"),
8711 WARN_STRICT_OVERFLOW_COMPARISON);
8713 return fold_build2_loc (loc, code, type, offset0, offset1);
8716 /* For equal offsets we can simplify to a comparison of the
8717 base addresses. */
8718 else if (known_eq (bitpos0, bitpos1)
8719 && (indirect_base0
8720 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8721 && (indirect_base1
8722 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8723 && ((offset0 == offset1)
8724 || (offset0 && offset1
8725 && operand_equal_p (offset0, offset1, 0))))
8727 if (indirect_base0)
8728 base0 = build_fold_addr_expr_loc (loc, base0);
8729 if (indirect_base1)
8730 base1 = build_fold_addr_expr_loc (loc, base1);
8731 return fold_build2_loc (loc, code, type, base0, base1);
8733 /* Comparison between an ordinary (non-weak) symbol and a null
8734 pointer can be eliminated since such symbols must have a non
8735 null address. In C, relational expressions between pointers
8736 to objects and null pointers are undefined. The results
8737 below follow the C++ rules with the additional property that
8738 every object pointer compares greater than a null pointer.
8740 else if (((DECL_P (base0)
8741 && maybe_nonzero_address (base0) > 0
8742 /* Avoid folding references to struct members at offset 0 to
8743 prevent tests like '&ptr->firstmember == 0' from getting
8744 eliminated. When ptr is null, although the -> expression
8745 is strictly speaking invalid, GCC retains it as a matter
8746 of QoI. See PR c/44555. */
8747 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
8748 || CONSTANT_CLASS_P (base0))
8749 && indirect_base0
8750 /* The caller guarantees that when one of the arguments is
8751 constant (i.e., null in this case) it is second. */
8752 && integer_zerop (arg1))
8754 switch (code)
8756 case EQ_EXPR:
8757 case LE_EXPR:
8758 case LT_EXPR:
8759 return constant_boolean_node (false, type);
8760 case GE_EXPR:
8761 case GT_EXPR:
8762 case NE_EXPR:
8763 return constant_boolean_node (true, type);
8764 default:
8765 gcc_unreachable ();
8770 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8771 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8772 the resulting offset is smaller in absolute value than the
8773 original one and has the same sign. */
8774 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8775 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8776 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8777 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8778 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8779 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8780 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8781 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8783 tree const1 = TREE_OPERAND (arg0, 1);
8784 tree const2 = TREE_OPERAND (arg1, 1);
8785 tree variable1 = TREE_OPERAND (arg0, 0);
8786 tree variable2 = TREE_OPERAND (arg1, 0);
8787 tree cst;
8788 const char * const warnmsg = G_("assuming signed overflow does not "
8789 "occur when combining constants around "
8790 "a comparison");
8792 /* Put the constant on the side where it doesn't overflow and is
8793 of lower absolute value and of same sign than before. */
8794 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8795 ? MINUS_EXPR : PLUS_EXPR,
8796 const2, const1);
8797 if (!TREE_OVERFLOW (cst)
8798 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8799 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8801 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8802 return fold_build2_loc (loc, code, type,
8803 variable1,
8804 fold_build2_loc (loc, TREE_CODE (arg1),
8805 TREE_TYPE (arg1),
8806 variable2, cst));
8809 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8810 ? MINUS_EXPR : PLUS_EXPR,
8811 const1, const2);
8812 if (!TREE_OVERFLOW (cst)
8813 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8814 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8816 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8817 return fold_build2_loc (loc, code, type,
8818 fold_build2_loc (loc, TREE_CODE (arg0),
8819 TREE_TYPE (arg0),
8820 variable1, cst),
8821 variable2);
8825 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8826 if (tem)
8827 return tem;
8829 /* If we are comparing an expression that just has comparisons
8830 of two integer values, arithmetic expressions of those comparisons,
8831 and constants, we can simplify it. There are only three cases
8832 to check: the two values can either be equal, the first can be
8833 greater, or the second can be greater. Fold the expression for
8834 those three values. Since each value must be 0 or 1, we have
8835 eight possibilities, each of which corresponds to the constant 0
8836 or 1 or one of the six possible comparisons.
8838 This handles common cases like (a > b) == 0 but also handles
8839 expressions like ((x > y) - (y > x)) > 0, which supposedly
8840 occur in macroized code. */
8842 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8844 tree cval1 = 0, cval2 = 0;
8846 if (twoval_comparison_p (arg0, &cval1, &cval2)
8847 /* Don't handle degenerate cases here; they should already
8848 have been handled anyway. */
8849 && cval1 != 0 && cval2 != 0
8850 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8851 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8852 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8853 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8854 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8855 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8856 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8858 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8859 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8861 /* We can't just pass T to eval_subst in case cval1 or cval2
8862 was the same as ARG1. */
8864 tree high_result
8865 = fold_build2_loc (loc, code, type,
8866 eval_subst (loc, arg0, cval1, maxval,
8867 cval2, minval),
8868 arg1);
8869 tree equal_result
8870 = fold_build2_loc (loc, code, type,
8871 eval_subst (loc, arg0, cval1, maxval,
8872 cval2, maxval),
8873 arg1);
8874 tree low_result
8875 = fold_build2_loc (loc, code, type,
8876 eval_subst (loc, arg0, cval1, minval,
8877 cval2, maxval),
8878 arg1);
8880 /* All three of these results should be 0 or 1. Confirm they are.
8881 Then use those values to select the proper code to use. */
8883 if (TREE_CODE (high_result) == INTEGER_CST
8884 && TREE_CODE (equal_result) == INTEGER_CST
8885 && TREE_CODE (low_result) == INTEGER_CST)
8887 /* Make a 3-bit mask with the high-order bit being the
8888 value for `>', the next for '=', and the low for '<'. */
8889 switch ((integer_onep (high_result) * 4)
8890 + (integer_onep (equal_result) * 2)
8891 + integer_onep (low_result))
8893 case 0:
8894 /* Always false. */
8895 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8896 case 1:
8897 code = LT_EXPR;
8898 break;
8899 case 2:
8900 code = EQ_EXPR;
8901 break;
8902 case 3:
8903 code = LE_EXPR;
8904 break;
8905 case 4:
8906 code = GT_EXPR;
8907 break;
8908 case 5:
8909 code = NE_EXPR;
8910 break;
8911 case 6:
8912 code = GE_EXPR;
8913 break;
8914 case 7:
8915 /* Always true. */
8916 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8919 return fold_build2_loc (loc, code, type, cval1, cval2);
8924 return NULL_TREE;
8928 /* Subroutine of fold_binary. Optimize complex multiplications of the
8929 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8930 argument EXPR represents the expression "z" of type TYPE. */
8932 static tree
8933 fold_mult_zconjz (location_t loc, tree type, tree expr)
8935 tree itype = TREE_TYPE (type);
8936 tree rpart, ipart, tem;
8938 if (TREE_CODE (expr) == COMPLEX_EXPR)
8940 rpart = TREE_OPERAND (expr, 0);
8941 ipart = TREE_OPERAND (expr, 1);
8943 else if (TREE_CODE (expr) == COMPLEX_CST)
8945 rpart = TREE_REALPART (expr);
8946 ipart = TREE_IMAGPART (expr);
8948 else
8950 expr = save_expr (expr);
8951 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8952 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8955 rpart = save_expr (rpart);
8956 ipart = save_expr (ipart);
8957 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8958 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8959 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8960 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8961 build_zero_cst (itype));
8965 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8966 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8967 true if successful. */
8969 static bool
8970 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8972 unsigned HOST_WIDE_INT i, nunits;
8974 if (TREE_CODE (arg) == VECTOR_CST
8975 && VECTOR_CST_NELTS (arg).is_constant (&nunits))
8977 for (i = 0; i < nunits; ++i)
8978 elts[i] = VECTOR_CST_ELT (arg, i);
8980 else if (TREE_CODE (arg) == CONSTRUCTOR)
8982 constructor_elt *elt;
8984 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8985 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8986 return false;
8987 else
8988 elts[i] = elt->value;
8990 else
8991 return false;
8992 for (; i < nelts; i++)
8993 elts[i]
8994 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8995 return true;
8998 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8999 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9000 NULL_TREE otherwise. */
9002 static tree
9003 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9005 unsigned int i;
9006 unsigned HOST_WIDE_INT nelts;
9007 bool need_ctor = false;
9009 if (!sel.length ().is_constant (&nelts))
9010 return NULL_TREE;
9011 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9012 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9013 && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9014 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9015 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9016 return NULL_TREE;
9018 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9019 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9020 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9021 return NULL_TREE;
9023 tree_vector_builder out_elts (type, nelts, 1);
9024 for (i = 0; i < nelts; i++)
9026 HOST_WIDE_INT index;
9027 if (!sel[i].is_constant (&index))
9028 return NULL_TREE;
9029 if (!CONSTANT_CLASS_P (in_elts[index]))
9030 need_ctor = true;
9031 out_elts.quick_push (unshare_expr (in_elts[index]));
9034 if (need_ctor)
9036 vec<constructor_elt, va_gc> *v;
9037 vec_alloc (v, nelts);
9038 for (i = 0; i < nelts; i++)
9039 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9040 return build_constructor (type, v);
9042 else
9043 return out_elts.build ();
9046 /* Try to fold a pointer difference of type TYPE two address expressions of
9047 array references AREF0 and AREF1 using location LOC. Return a
9048 simplified expression for the difference or NULL_TREE. */
9050 static tree
9051 fold_addr_of_array_ref_difference (location_t loc, tree type,
9052 tree aref0, tree aref1,
9053 bool use_pointer_diff)
9055 tree base0 = TREE_OPERAND (aref0, 0);
9056 tree base1 = TREE_OPERAND (aref1, 0);
9057 tree base_offset = build_int_cst (type, 0);
9059 /* If the bases are array references as well, recurse. If the bases
9060 are pointer indirections compute the difference of the pointers.
9061 If the bases are equal, we are set. */
9062 if ((TREE_CODE (base0) == ARRAY_REF
9063 && TREE_CODE (base1) == ARRAY_REF
9064 && (base_offset
9065 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9066 use_pointer_diff)))
9067 || (INDIRECT_REF_P (base0)
9068 && INDIRECT_REF_P (base1)
9069 && (base_offset
9070 = use_pointer_diff
9071 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9072 TREE_OPERAND (base0, 0),
9073 TREE_OPERAND (base1, 0))
9074 : fold_binary_loc (loc, MINUS_EXPR, type,
9075 fold_convert (type,
9076 TREE_OPERAND (base0, 0)),
9077 fold_convert (type,
9078 TREE_OPERAND (base1, 0)))))
9079 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9081 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9082 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9083 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9084 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9085 return fold_build2_loc (loc, PLUS_EXPR, type,
9086 base_offset,
9087 fold_build2_loc (loc, MULT_EXPR, type,
9088 diff, esz));
9090 return NULL_TREE;
9093 /* If the real or vector real constant CST of type TYPE has an exact
9094 inverse, return it, else return NULL. */
9096 tree
9097 exact_inverse (tree type, tree cst)
9099 REAL_VALUE_TYPE r;
9100 tree unit_type;
9101 machine_mode mode;
9103 switch (TREE_CODE (cst))
9105 case REAL_CST:
9106 r = TREE_REAL_CST (cst);
9108 if (exact_real_inverse (TYPE_MODE (type), &r))
9109 return build_real (type, r);
9111 return NULL_TREE;
9113 case VECTOR_CST:
9115 unit_type = TREE_TYPE (type);
9116 mode = TYPE_MODE (unit_type);
9118 tree_vector_builder elts;
9119 if (!elts.new_unary_operation (type, cst, false))
9120 return NULL_TREE;
9121 unsigned int count = elts.encoded_nelts ();
9122 for (unsigned int i = 0; i < count; ++i)
9124 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9125 if (!exact_real_inverse (mode, &r))
9126 return NULL_TREE;
9127 elts.quick_push (build_real (unit_type, r));
9130 return elts.build ();
9133 default:
9134 return NULL_TREE;
9138 /* Mask out the tz least significant bits of X of type TYPE where
9139 tz is the number of trailing zeroes in Y. */
9140 static wide_int
9141 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9143 int tz = wi::ctz (y);
9144 if (tz > 0)
9145 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9146 return x;
9149 /* Return true when T is an address and is known to be nonzero.
9150 For floating point we further ensure that T is not denormal.
9151 Similar logic is present in nonzero_address in rtlanal.h.
9153 If the return value is based on the assumption that signed overflow
9154 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9155 change *STRICT_OVERFLOW_P. */
9157 static bool
9158 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9160 tree type = TREE_TYPE (t);
9161 enum tree_code code;
9163 /* Doing something useful for floating point would need more work. */
9164 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9165 return false;
9167 code = TREE_CODE (t);
9168 switch (TREE_CODE_CLASS (code))
9170 case tcc_unary:
9171 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9172 strict_overflow_p);
9173 case tcc_binary:
9174 case tcc_comparison:
9175 return tree_binary_nonzero_warnv_p (code, type,
9176 TREE_OPERAND (t, 0),
9177 TREE_OPERAND (t, 1),
9178 strict_overflow_p);
9179 case tcc_constant:
9180 case tcc_declaration:
9181 case tcc_reference:
9182 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9184 default:
9185 break;
9188 switch (code)
9190 case TRUTH_NOT_EXPR:
9191 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9192 strict_overflow_p);
9194 case TRUTH_AND_EXPR:
9195 case TRUTH_OR_EXPR:
9196 case TRUTH_XOR_EXPR:
9197 return tree_binary_nonzero_warnv_p (code, type,
9198 TREE_OPERAND (t, 0),
9199 TREE_OPERAND (t, 1),
9200 strict_overflow_p);
9202 case COND_EXPR:
9203 case CONSTRUCTOR:
9204 case OBJ_TYPE_REF:
9205 case ASSERT_EXPR:
9206 case ADDR_EXPR:
9207 case WITH_SIZE_EXPR:
9208 case SSA_NAME:
9209 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9211 case COMPOUND_EXPR:
9212 case MODIFY_EXPR:
9213 case BIND_EXPR:
9214 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9215 strict_overflow_p);
9217 case SAVE_EXPR:
9218 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9219 strict_overflow_p);
9221 case CALL_EXPR:
9223 tree fndecl = get_callee_fndecl (t);
9224 if (!fndecl) return false;
9225 if (flag_delete_null_pointer_checks && !flag_check_new
9226 && DECL_IS_OPERATOR_NEW (fndecl)
9227 && !TREE_NOTHROW (fndecl))
9228 return true;
9229 if (flag_delete_null_pointer_checks
9230 && lookup_attribute ("returns_nonnull",
9231 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9232 return true;
9233 return alloca_call_p (t);
9236 default:
9237 break;
9239 return false;
9242 /* Return true when T is an address and is known to be nonzero.
9243 Handle warnings about undefined signed overflow. */
9245 bool
9246 tree_expr_nonzero_p (tree t)
9248 bool ret, strict_overflow_p;
9250 strict_overflow_p = false;
9251 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9252 if (strict_overflow_p)
9253 fold_overflow_warning (("assuming signed overflow does not occur when "
9254 "determining that expression is always "
9255 "non-zero"),
9256 WARN_STRICT_OVERFLOW_MISC);
9257 return ret;
9260 /* Return true if T is known not to be equal to an integer W. */
9262 bool
9263 expr_not_equal_to (tree t, const wide_int &w)
9265 wide_int min, max, nz;
9266 value_range_kind rtype;
9267 switch (TREE_CODE (t))
9269 case INTEGER_CST:
9270 return wi::to_wide (t) != w;
9272 case SSA_NAME:
9273 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9274 return false;
9275 rtype = get_range_info (t, &min, &max);
9276 if (rtype == VR_RANGE)
9278 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9279 return true;
9280 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9281 return true;
9283 else if (rtype == VR_ANTI_RANGE
9284 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9285 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9286 return true;
9287 /* If T has some known zero bits and W has any of those bits set,
9288 then T is known not to be equal to W. */
9289 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9290 TYPE_PRECISION (TREE_TYPE (t))), 0))
9291 return true;
9292 return false;
9294 default:
9295 return false;
9299 /* Fold a binary expression of code CODE and type TYPE with operands
9300 OP0 and OP1. LOC is the location of the resulting expression.
9301 Return the folded expression if folding is successful. Otherwise,
9302 return NULL_TREE. */
9304 tree
9305 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9306 tree op0, tree op1)
9308 enum tree_code_class kind = TREE_CODE_CLASS (code);
9309 tree arg0, arg1, tem;
9310 tree t1 = NULL_TREE;
9311 bool strict_overflow_p;
9312 unsigned int prec;
9314 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9315 && TREE_CODE_LENGTH (code) == 2
9316 && op0 != NULL_TREE
9317 && op1 != NULL_TREE);
9319 arg0 = op0;
9320 arg1 = op1;
9322 /* Strip any conversions that don't change the mode. This is
9323 safe for every expression, except for a comparison expression
9324 because its signedness is derived from its operands. So, in
9325 the latter case, only strip conversions that don't change the
9326 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9327 preserved.
9329 Note that this is done as an internal manipulation within the
9330 constant folder, in order to find the simplest representation
9331 of the arguments so that their form can be studied. In any
9332 cases, the appropriate type conversions should be put back in
9333 the tree that will get out of the constant folder. */
9335 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9337 STRIP_SIGN_NOPS (arg0);
9338 STRIP_SIGN_NOPS (arg1);
9340 else
9342 STRIP_NOPS (arg0);
9343 STRIP_NOPS (arg1);
9346 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9347 constant but we can't do arithmetic on them. */
9348 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9350 tem = const_binop (code, type, arg0, arg1);
9351 if (tem != NULL_TREE)
9353 if (TREE_TYPE (tem) != type)
9354 tem = fold_convert_loc (loc, type, tem);
9355 return tem;
9359 /* If this is a commutative operation, and ARG0 is a constant, move it
9360 to ARG1 to reduce the number of tests below. */
9361 if (commutative_tree_code (code)
9362 && tree_swap_operands_p (arg0, arg1))
9363 return fold_build2_loc (loc, code, type, op1, op0);
9365 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9366 to ARG1 to reduce the number of tests below. */
9367 if (kind == tcc_comparison
9368 && tree_swap_operands_p (arg0, arg1))
9369 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9371 tem = generic_simplify (loc, code, type, op0, op1);
9372 if (tem)
9373 return tem;
9375 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9377 First check for cases where an arithmetic operation is applied to a
9378 compound, conditional, or comparison operation. Push the arithmetic
9379 operation inside the compound or conditional to see if any folding
9380 can then be done. Convert comparison to conditional for this purpose.
9381 The also optimizes non-constant cases that used to be done in
9382 expand_expr.
9384 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9385 one of the operands is a comparison and the other is a comparison, a
9386 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9387 code below would make the expression more complex. Change it to a
9388 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9389 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9391 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9392 || code == EQ_EXPR || code == NE_EXPR)
9393 && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9394 && ((truth_value_p (TREE_CODE (arg0))
9395 && (truth_value_p (TREE_CODE (arg1))
9396 || (TREE_CODE (arg1) == BIT_AND_EXPR
9397 && integer_onep (TREE_OPERAND (arg1, 1)))))
9398 || (truth_value_p (TREE_CODE (arg1))
9399 && (truth_value_p (TREE_CODE (arg0))
9400 || (TREE_CODE (arg0) == BIT_AND_EXPR
9401 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9403 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9404 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9405 : TRUTH_XOR_EXPR,
9406 boolean_type_node,
9407 fold_convert_loc (loc, boolean_type_node, arg0),
9408 fold_convert_loc (loc, boolean_type_node, arg1));
9410 if (code == EQ_EXPR)
9411 tem = invert_truthvalue_loc (loc, tem);
9413 return fold_convert_loc (loc, type, tem);
9416 if (TREE_CODE_CLASS (code) == tcc_binary
9417 || TREE_CODE_CLASS (code) == tcc_comparison)
9419 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9421 tem = fold_build2_loc (loc, code, type,
9422 fold_convert_loc (loc, TREE_TYPE (op0),
9423 TREE_OPERAND (arg0, 1)), op1);
9424 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9425 tem);
9427 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9429 tem = fold_build2_loc (loc, code, type, op0,
9430 fold_convert_loc (loc, TREE_TYPE (op1),
9431 TREE_OPERAND (arg1, 1)));
9432 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9433 tem);
9436 if (TREE_CODE (arg0) == COND_EXPR
9437 || TREE_CODE (arg0) == VEC_COND_EXPR
9438 || COMPARISON_CLASS_P (arg0))
9440 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9441 arg0, arg1,
9442 /*cond_first_p=*/1);
9443 if (tem != NULL_TREE)
9444 return tem;
9447 if (TREE_CODE (arg1) == COND_EXPR
9448 || TREE_CODE (arg1) == VEC_COND_EXPR
9449 || COMPARISON_CLASS_P (arg1))
9451 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9452 arg1, arg0,
9453 /*cond_first_p=*/0);
9454 if (tem != NULL_TREE)
9455 return tem;
9459 switch (code)
9461 case MEM_REF:
9462 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9463 if (TREE_CODE (arg0) == ADDR_EXPR
9464 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9466 tree iref = TREE_OPERAND (arg0, 0);
9467 return fold_build2 (MEM_REF, type,
9468 TREE_OPERAND (iref, 0),
9469 int_const_binop (PLUS_EXPR, arg1,
9470 TREE_OPERAND (iref, 1)));
9473 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9474 if (TREE_CODE (arg0) == ADDR_EXPR
9475 && handled_component_p (TREE_OPERAND (arg0, 0)))
9477 tree base;
9478 poly_int64 coffset;
9479 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9480 &coffset);
9481 if (!base)
9482 return NULL_TREE;
9483 return fold_build2 (MEM_REF, type,
9484 build_fold_addr_expr (base),
9485 int_const_binop (PLUS_EXPR, arg1,
9486 size_int (coffset)));
9489 return NULL_TREE;
9491 case POINTER_PLUS_EXPR:
9492 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9493 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9494 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9495 return fold_convert_loc (loc, type,
9496 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9497 fold_convert_loc (loc, sizetype,
9498 arg1),
9499 fold_convert_loc (loc, sizetype,
9500 arg0)));
9502 return NULL_TREE;
9504 case PLUS_EXPR:
9505 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9507 /* X + (X / CST) * -CST is X % CST. */
9508 if (TREE_CODE (arg1) == MULT_EXPR
9509 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9510 && operand_equal_p (arg0,
9511 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9513 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9514 tree cst1 = TREE_OPERAND (arg1, 1);
9515 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9516 cst1, cst0);
9517 if (sum && integer_zerop (sum))
9518 return fold_convert_loc (loc, type,
9519 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9520 TREE_TYPE (arg0), arg0,
9521 cst0));
9525 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9526 one. Make sure the type is not saturating and has the signedness of
9527 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9528 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9529 if ((TREE_CODE (arg0) == MULT_EXPR
9530 || TREE_CODE (arg1) == MULT_EXPR)
9531 && !TYPE_SATURATING (type)
9532 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9533 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9534 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9536 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9537 if (tem)
9538 return tem;
9541 if (! FLOAT_TYPE_P (type))
9543 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9544 (plus (plus (mult) (mult)) (foo)) so that we can
9545 take advantage of the factoring cases below. */
9546 if (ANY_INTEGRAL_TYPE_P (type)
9547 && TYPE_OVERFLOW_WRAPS (type)
9548 && (((TREE_CODE (arg0) == PLUS_EXPR
9549 || TREE_CODE (arg0) == MINUS_EXPR)
9550 && TREE_CODE (arg1) == MULT_EXPR)
9551 || ((TREE_CODE (arg1) == PLUS_EXPR
9552 || TREE_CODE (arg1) == MINUS_EXPR)
9553 && TREE_CODE (arg0) == MULT_EXPR)))
9555 tree parg0, parg1, parg, marg;
9556 enum tree_code pcode;
9558 if (TREE_CODE (arg1) == MULT_EXPR)
9559 parg = arg0, marg = arg1;
9560 else
9561 parg = arg1, marg = arg0;
9562 pcode = TREE_CODE (parg);
9563 parg0 = TREE_OPERAND (parg, 0);
9564 parg1 = TREE_OPERAND (parg, 1);
9565 STRIP_NOPS (parg0);
9566 STRIP_NOPS (parg1);
9568 if (TREE_CODE (parg0) == MULT_EXPR
9569 && TREE_CODE (parg1) != MULT_EXPR)
9570 return fold_build2_loc (loc, pcode, type,
9571 fold_build2_loc (loc, PLUS_EXPR, type,
9572 fold_convert_loc (loc, type,
9573 parg0),
9574 fold_convert_loc (loc, type,
9575 marg)),
9576 fold_convert_loc (loc, type, parg1));
9577 if (TREE_CODE (parg0) != MULT_EXPR
9578 && TREE_CODE (parg1) == MULT_EXPR)
9579 return
9580 fold_build2_loc (loc, PLUS_EXPR, type,
9581 fold_convert_loc (loc, type, parg0),
9582 fold_build2_loc (loc, pcode, type,
9583 fold_convert_loc (loc, type, marg),
9584 fold_convert_loc (loc, type,
9585 parg1)));
9588 else
9590 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9591 to __complex__ ( x, y ). This is not the same for SNaNs or
9592 if signed zeros are involved. */
9593 if (!HONOR_SNANS (element_mode (arg0))
9594 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9595 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9597 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9598 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9599 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9600 bool arg0rz = false, arg0iz = false;
9601 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9602 || (arg0i && (arg0iz = real_zerop (arg0i))))
9604 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9605 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9606 if (arg0rz && arg1i && real_zerop (arg1i))
9608 tree rp = arg1r ? arg1r
9609 : build1 (REALPART_EXPR, rtype, arg1);
9610 tree ip = arg0i ? arg0i
9611 : build1 (IMAGPART_EXPR, rtype, arg0);
9612 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9614 else if (arg0iz && arg1r && real_zerop (arg1r))
9616 tree rp = arg0r ? arg0r
9617 : build1 (REALPART_EXPR, rtype, arg0);
9618 tree ip = arg1i ? arg1i
9619 : build1 (IMAGPART_EXPR, rtype, arg1);
9620 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9625 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9626 We associate floats only if the user has specified
9627 -fassociative-math. */
9628 if (flag_associative_math
9629 && TREE_CODE (arg1) == PLUS_EXPR
9630 && TREE_CODE (arg0) != MULT_EXPR)
9632 tree tree10 = TREE_OPERAND (arg1, 0);
9633 tree tree11 = TREE_OPERAND (arg1, 1);
9634 if (TREE_CODE (tree11) == MULT_EXPR
9635 && TREE_CODE (tree10) == MULT_EXPR)
9637 tree tree0;
9638 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9639 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9642 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9643 We associate floats only if the user has specified
9644 -fassociative-math. */
9645 if (flag_associative_math
9646 && TREE_CODE (arg0) == PLUS_EXPR
9647 && TREE_CODE (arg1) != MULT_EXPR)
9649 tree tree00 = TREE_OPERAND (arg0, 0);
9650 tree tree01 = TREE_OPERAND (arg0, 1);
9651 if (TREE_CODE (tree01) == MULT_EXPR
9652 && TREE_CODE (tree00) == MULT_EXPR)
9654 tree tree0;
9655 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9656 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9661 bit_rotate:
9662 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9663 is a rotate of A by C1 bits. */
9664 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9665 is a rotate of A by B bits.
9666 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9667 though in this case CODE must be | and not + or ^, otherwise
9668 it doesn't return A when B is 0. */
9670 enum tree_code code0, code1;
9671 tree rtype;
9672 code0 = TREE_CODE (arg0);
9673 code1 = TREE_CODE (arg1);
9674 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9675 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9676 && operand_equal_p (TREE_OPERAND (arg0, 0),
9677 TREE_OPERAND (arg1, 0), 0)
9678 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9679 TYPE_UNSIGNED (rtype))
9680 /* Only create rotates in complete modes. Other cases are not
9681 expanded properly. */
9682 && (element_precision (rtype)
9683 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9685 tree tree01, tree11;
9686 tree orig_tree01, orig_tree11;
9687 enum tree_code code01, code11;
9689 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9690 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9691 STRIP_NOPS (tree01);
9692 STRIP_NOPS (tree11);
9693 code01 = TREE_CODE (tree01);
9694 code11 = TREE_CODE (tree11);
9695 if (code11 != MINUS_EXPR
9696 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9698 std::swap (code0, code1);
9699 std::swap (code01, code11);
9700 std::swap (tree01, tree11);
9701 std::swap (orig_tree01, orig_tree11);
9703 if (code01 == INTEGER_CST
9704 && code11 == INTEGER_CST
9705 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9706 == element_precision (rtype)))
9708 tem = build2_loc (loc, LROTATE_EXPR,
9709 rtype, TREE_OPERAND (arg0, 0),
9710 code0 == LSHIFT_EXPR
9711 ? orig_tree01 : orig_tree11);
9712 return fold_convert_loc (loc, type, tem);
9714 else if (code11 == MINUS_EXPR)
9716 tree tree110, tree111;
9717 tree110 = TREE_OPERAND (tree11, 0);
9718 tree111 = TREE_OPERAND (tree11, 1);
9719 STRIP_NOPS (tree110);
9720 STRIP_NOPS (tree111);
9721 if (TREE_CODE (tree110) == INTEGER_CST
9722 && compare_tree_int (tree110,
9723 element_precision (rtype)) == 0
9724 && operand_equal_p (tree01, tree111, 0))
9726 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9727 ? LROTATE_EXPR : RROTATE_EXPR),
9728 rtype, TREE_OPERAND (arg0, 0),
9729 orig_tree01);
9730 return fold_convert_loc (loc, type, tem);
9733 else if (code == BIT_IOR_EXPR
9734 && code11 == BIT_AND_EXPR
9735 && pow2p_hwi (element_precision (rtype)))
9737 tree tree110, tree111;
9738 tree110 = TREE_OPERAND (tree11, 0);
9739 tree111 = TREE_OPERAND (tree11, 1);
9740 STRIP_NOPS (tree110);
9741 STRIP_NOPS (tree111);
9742 if (TREE_CODE (tree110) == NEGATE_EXPR
9743 && TREE_CODE (tree111) == INTEGER_CST
9744 && compare_tree_int (tree111,
9745 element_precision (rtype) - 1) == 0
9746 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9748 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9749 ? LROTATE_EXPR : RROTATE_EXPR),
9750 rtype, TREE_OPERAND (arg0, 0),
9751 orig_tree01);
9752 return fold_convert_loc (loc, type, tem);
9758 associate:
9759 /* In most languages, can't associate operations on floats through
9760 parentheses. Rather than remember where the parentheses were, we
9761 don't associate floats at all, unless the user has specified
9762 -fassociative-math.
9763 And, we need to make sure type is not saturating. */
9765 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9766 && !TYPE_SATURATING (type))
9768 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9769 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9770 tree atype = type;
9771 bool ok = true;
9773 /* Split both trees into variables, constants, and literals. Then
9774 associate each group together, the constants with literals,
9775 then the result with variables. This increases the chances of
9776 literals being recombined later and of generating relocatable
9777 expressions for the sum of a constant and literal. */
9778 var0 = split_tree (arg0, type, code,
9779 &minus_var0, &con0, &minus_con0,
9780 &lit0, &minus_lit0, 0);
9781 var1 = split_tree (arg1, type, code,
9782 &minus_var1, &con1, &minus_con1,
9783 &lit1, &minus_lit1, code == MINUS_EXPR);
9785 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9786 if (code == MINUS_EXPR)
9787 code = PLUS_EXPR;
9789 /* With undefined overflow prefer doing association in a type
9790 which wraps on overflow, if that is one of the operand types. */
9791 if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
9792 && !TYPE_OVERFLOW_WRAPS (type))
9794 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9795 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9796 atype = TREE_TYPE (arg0);
9797 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9798 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9799 atype = TREE_TYPE (arg1);
9800 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9803 /* With undefined overflow we can only associate constants with one
9804 variable, and constants whose association doesn't overflow. */
9805 if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
9806 && !TYPE_OVERFLOW_WRAPS (atype))
9808 if ((var0 && var1) || (minus_var0 && minus_var1))
9810 /* ??? If split_tree would handle NEGATE_EXPR we could
9811 simply reject these cases and the allowed cases would
9812 be the var0/minus_var1 ones. */
9813 tree tmp0 = var0 ? var0 : minus_var0;
9814 tree tmp1 = var1 ? var1 : minus_var1;
9815 bool one_neg = false;
9817 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9819 tmp0 = TREE_OPERAND (tmp0, 0);
9820 one_neg = !one_neg;
9822 if (CONVERT_EXPR_P (tmp0)
9823 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9824 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9825 <= TYPE_PRECISION (atype)))
9826 tmp0 = TREE_OPERAND (tmp0, 0);
9827 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9829 tmp1 = TREE_OPERAND (tmp1, 0);
9830 one_neg = !one_neg;
9832 if (CONVERT_EXPR_P (tmp1)
9833 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9834 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9835 <= TYPE_PRECISION (atype)))
9836 tmp1 = TREE_OPERAND (tmp1, 0);
9837 /* The only case we can still associate with two variables
9838 is if they cancel out. */
9839 if (!one_neg
9840 || !operand_equal_p (tmp0, tmp1, 0))
9841 ok = false;
9843 else if ((var0 && minus_var1
9844 && ! operand_equal_p (var0, minus_var1, 0))
9845 || (minus_var0 && var1
9846 && ! operand_equal_p (minus_var0, var1, 0)))
9847 ok = false;
9850 /* Only do something if we found more than two objects. Otherwise,
9851 nothing has changed and we risk infinite recursion. */
9852 if (ok
9853 && ((var0 != 0) + (var1 != 0)
9854 + (minus_var0 != 0) + (minus_var1 != 0)
9855 + (con0 != 0) + (con1 != 0)
9856 + (minus_con0 != 0) + (minus_con1 != 0)
9857 + (lit0 != 0) + (lit1 != 0)
9858 + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
9860 var0 = associate_trees (loc, var0, var1, code, atype);
9861 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9862 code, atype);
9863 con0 = associate_trees (loc, con0, con1, code, atype);
9864 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9865 code, atype);
9866 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9867 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9868 code, atype);
9870 if (minus_var0 && var0)
9872 var0 = associate_trees (loc, var0, minus_var0,
9873 MINUS_EXPR, atype);
9874 minus_var0 = 0;
9876 if (minus_con0 && con0)
9878 con0 = associate_trees (loc, con0, minus_con0,
9879 MINUS_EXPR, atype);
9880 minus_con0 = 0;
9883 /* Preserve the MINUS_EXPR if the negative part of the literal is
9884 greater than the positive part. Otherwise, the multiplicative
9885 folding code (i.e extract_muldiv) may be fooled in case
9886 unsigned constants are subtracted, like in the following
9887 example: ((X*2 + 4) - 8U)/2. */
9888 if (minus_lit0 && lit0)
9890 if (TREE_CODE (lit0) == INTEGER_CST
9891 && TREE_CODE (minus_lit0) == INTEGER_CST
9892 && tree_int_cst_lt (lit0, minus_lit0)
9893 /* But avoid ending up with only negated parts. */
9894 && (var0 || con0))
9896 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9897 MINUS_EXPR, atype);
9898 lit0 = 0;
9900 else
9902 lit0 = associate_trees (loc, lit0, minus_lit0,
9903 MINUS_EXPR, atype);
9904 minus_lit0 = 0;
9908 /* Don't introduce overflows through reassociation. */
9909 if ((lit0 && TREE_OVERFLOW_P (lit0))
9910 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9911 return NULL_TREE;
9913 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9914 con0 = associate_trees (loc, con0, lit0, code, atype);
9915 lit0 = 0;
9916 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9917 code, atype);
9918 minus_lit0 = 0;
9920 /* Eliminate minus_con0. */
9921 if (minus_con0)
9923 if (con0)
9924 con0 = associate_trees (loc, con0, minus_con0,
9925 MINUS_EXPR, atype);
9926 else if (var0)
9927 var0 = associate_trees (loc, var0, minus_con0,
9928 MINUS_EXPR, atype);
9929 else
9930 gcc_unreachable ();
9931 minus_con0 = 0;
9934 /* Eliminate minus_var0. */
9935 if (minus_var0)
9937 if (con0)
9938 con0 = associate_trees (loc, con0, minus_var0,
9939 MINUS_EXPR, atype);
9940 else
9941 gcc_unreachable ();
9942 minus_var0 = 0;
9945 return
9946 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9947 code, atype));
9951 return NULL_TREE;
9953 case POINTER_DIFF_EXPR:
9954 case MINUS_EXPR:
9955 /* Fold &a[i] - &a[j] to i-j. */
9956 if (TREE_CODE (arg0) == ADDR_EXPR
9957 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9958 && TREE_CODE (arg1) == ADDR_EXPR
9959 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9961 tree tem = fold_addr_of_array_ref_difference (loc, type,
9962 TREE_OPERAND (arg0, 0),
9963 TREE_OPERAND (arg1, 0),
9964 code
9965 == POINTER_DIFF_EXPR);
9966 if (tem)
9967 return tem;
9970 /* Further transformations are not for pointers. */
9971 if (code == POINTER_DIFF_EXPR)
9972 return NULL_TREE;
9974 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9975 if (TREE_CODE (arg0) == NEGATE_EXPR
9976 && negate_expr_p (op1)
9977 /* If arg0 is e.g. unsigned int and type is int, then this could
9978 introduce UB, because if A is INT_MIN at runtime, the original
9979 expression can be well defined while the latter is not.
9980 See PR83269. */
9981 && !(ANY_INTEGRAL_TYPE_P (type)
9982 && TYPE_OVERFLOW_UNDEFINED (type)
9983 && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9984 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9985 return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9986 fold_convert_loc (loc, type,
9987 TREE_OPERAND (arg0, 0)));
9989 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9990 __complex__ ( x, -y ). This is not the same for SNaNs or if
9991 signed zeros are involved. */
9992 if (!HONOR_SNANS (element_mode (arg0))
9993 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9994 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9996 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9997 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9998 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9999 bool arg0rz = false, arg0iz = false;
10000 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10001 || (arg0i && (arg0iz = real_zerop (arg0i))))
10003 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10004 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10005 if (arg0rz && arg1i && real_zerop (arg1i))
10007 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10008 arg1r ? arg1r
10009 : build1 (REALPART_EXPR, rtype, arg1));
10010 tree ip = arg0i ? arg0i
10011 : build1 (IMAGPART_EXPR, rtype, arg0);
10012 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10014 else if (arg0iz && arg1r && real_zerop (arg1r))
10016 tree rp = arg0r ? arg0r
10017 : build1 (REALPART_EXPR, rtype, arg0);
10018 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10019 arg1i ? arg1i
10020 : build1 (IMAGPART_EXPR, rtype, arg1));
10021 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10026 /* A - B -> A + (-B) if B is easily negatable. */
10027 if (negate_expr_p (op1)
10028 && ! TYPE_OVERFLOW_SANITIZED (type)
10029 && ((FLOAT_TYPE_P (type)
10030 /* Avoid this transformation if B is a positive REAL_CST. */
10031 && (TREE_CODE (op1) != REAL_CST
10032 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10033 || INTEGRAL_TYPE_P (type)))
10034 return fold_build2_loc (loc, PLUS_EXPR, type,
10035 fold_convert_loc (loc, type, arg0),
10036 negate_expr (op1));
10038 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10039 one. Make sure the type is not saturating and has the signedness of
10040 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10041 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10042 if ((TREE_CODE (arg0) == MULT_EXPR
10043 || TREE_CODE (arg1) == MULT_EXPR)
10044 && !TYPE_SATURATING (type)
10045 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10046 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10047 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10049 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10050 if (tem)
10051 return tem;
10054 goto associate;
10056 case MULT_EXPR:
10057 if (! FLOAT_TYPE_P (type))
10059 /* Transform x * -C into -x * C if x is easily negatable. */
10060 if (TREE_CODE (op1) == INTEGER_CST
10061 && tree_int_cst_sgn (op1) == -1
10062 && negate_expr_p (op0)
10063 && negate_expr_p (op1)
10064 && (tem = negate_expr (op1)) != op1
10065 && ! TREE_OVERFLOW (tem))
10066 return fold_build2_loc (loc, MULT_EXPR, type,
10067 fold_convert_loc (loc, type,
10068 negate_expr (op0)), tem);
10070 strict_overflow_p = false;
10071 if (TREE_CODE (arg1) == INTEGER_CST
10072 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10073 &strict_overflow_p)) != 0)
10075 if (strict_overflow_p)
10076 fold_overflow_warning (("assuming signed overflow does not "
10077 "occur when simplifying "
10078 "multiplication"),
10079 WARN_STRICT_OVERFLOW_MISC);
10080 return fold_convert_loc (loc, type, tem);
10083 /* Optimize z * conj(z) for integer complex numbers. */
10084 if (TREE_CODE (arg0) == CONJ_EXPR
10085 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10086 return fold_mult_zconjz (loc, type, arg1);
10087 if (TREE_CODE (arg1) == CONJ_EXPR
10088 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10089 return fold_mult_zconjz (loc, type, arg0);
10091 else
10093 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10094 This is not the same for NaNs or if signed zeros are
10095 involved. */
10096 if (!HONOR_NANS (arg0)
10097 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10098 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10099 && TREE_CODE (arg1) == COMPLEX_CST
10100 && real_zerop (TREE_REALPART (arg1)))
10102 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10103 if (real_onep (TREE_IMAGPART (arg1)))
10104 return
10105 fold_build2_loc (loc, COMPLEX_EXPR, type,
10106 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10107 rtype, arg0)),
10108 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10109 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10110 return
10111 fold_build2_loc (loc, COMPLEX_EXPR, type,
10112 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10113 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10114 rtype, arg0)));
10117 /* Optimize z * conj(z) for floating point complex numbers.
10118 Guarded by flag_unsafe_math_optimizations as non-finite
10119 imaginary components don't produce scalar results. */
10120 if (flag_unsafe_math_optimizations
10121 && TREE_CODE (arg0) == CONJ_EXPR
10122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10123 return fold_mult_zconjz (loc, type, arg1);
10124 if (flag_unsafe_math_optimizations
10125 && TREE_CODE (arg1) == CONJ_EXPR
10126 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10127 return fold_mult_zconjz (loc, type, arg0);
10129 goto associate;
10131 case BIT_IOR_EXPR:
10132 /* Canonicalize (X & C1) | C2. */
10133 if (TREE_CODE (arg0) == BIT_AND_EXPR
10134 && TREE_CODE (arg1) == INTEGER_CST
10135 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10137 int width = TYPE_PRECISION (type), w;
10138 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10139 wide_int c2 = wi::to_wide (arg1);
10141 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10142 if ((c1 & c2) == c1)
10143 return omit_one_operand_loc (loc, type, arg1,
10144 TREE_OPERAND (arg0, 0));
10146 wide_int msk = wi::mask (width, false,
10147 TYPE_PRECISION (TREE_TYPE (arg1)));
10149 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10150 if (wi::bit_and_not (msk, c1 | c2) == 0)
10152 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10153 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10156 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10157 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10158 mode which allows further optimizations. */
10159 c1 &= msk;
10160 c2 &= msk;
10161 wide_int c3 = wi::bit_and_not (c1, c2);
10162 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10164 wide_int mask = wi::mask (w, false,
10165 TYPE_PRECISION (type));
10166 if (((c1 | c2) & mask) == mask
10167 && wi::bit_and_not (c1, mask) == 0)
10169 c3 = mask;
10170 break;
10174 if (c3 != c1)
10176 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10177 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10178 wide_int_to_tree (type, c3));
10179 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10183 /* See if this can be simplified into a rotate first. If that
10184 is unsuccessful continue in the association code. */
10185 goto bit_rotate;
10187 case BIT_XOR_EXPR:
10188 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10189 if (TREE_CODE (arg0) == BIT_AND_EXPR
10190 && INTEGRAL_TYPE_P (type)
10191 && integer_onep (TREE_OPERAND (arg0, 1))
10192 && integer_onep (arg1))
10193 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10194 build_zero_cst (TREE_TYPE (arg0)));
10196 /* See if this can be simplified into a rotate first. If that
10197 is unsuccessful continue in the association code. */
10198 goto bit_rotate;
10200 case BIT_AND_EXPR:
10201 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10202 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10203 && INTEGRAL_TYPE_P (type)
10204 && integer_onep (TREE_OPERAND (arg0, 1))
10205 && integer_onep (arg1))
10207 tree tem2;
10208 tem = TREE_OPERAND (arg0, 0);
10209 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10210 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10211 tem, tem2);
10212 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10213 build_zero_cst (TREE_TYPE (tem)));
10215 /* Fold ~X & 1 as (X & 1) == 0. */
10216 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10217 && INTEGRAL_TYPE_P (type)
10218 && integer_onep (arg1))
10220 tree tem2;
10221 tem = TREE_OPERAND (arg0, 0);
10222 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10223 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10224 tem, tem2);
10225 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10226 build_zero_cst (TREE_TYPE (tem)));
10228 /* Fold !X & 1 as X == 0. */
10229 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10230 && integer_onep (arg1))
10232 tem = TREE_OPERAND (arg0, 0);
10233 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10234 build_zero_cst (TREE_TYPE (tem)));
10237 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10238 multiple of 1 << CST. */
10239 if (TREE_CODE (arg1) == INTEGER_CST)
10241 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10242 wide_int ncst1 = -cst1;
10243 if ((cst1 & ncst1) == ncst1
10244 && multiple_of_p (type, arg0,
10245 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10246 return fold_convert_loc (loc, type, arg0);
10249 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10250 bits from CST2. */
10251 if (TREE_CODE (arg1) == INTEGER_CST
10252 && TREE_CODE (arg0) == MULT_EXPR
10253 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10255 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10256 wide_int masked
10257 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10259 if (masked == 0)
10260 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10261 arg0, arg1);
10262 else if (masked != warg1)
10264 /* Avoid the transform if arg1 is a mask of some
10265 mode which allows further optimizations. */
10266 int pop = wi::popcount (warg1);
10267 if (!(pop >= BITS_PER_UNIT
10268 && pow2p_hwi (pop)
10269 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10270 return fold_build2_loc (loc, code, type, op0,
10271 wide_int_to_tree (type, masked));
10275 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10276 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10277 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10279 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10281 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10282 if (mask == -1)
10283 return
10284 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10287 goto associate;
10289 case RDIV_EXPR:
10290 /* Don't touch a floating-point divide by zero unless the mode
10291 of the constant can represent infinity. */
10292 if (TREE_CODE (arg1) == REAL_CST
10293 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10294 && real_zerop (arg1))
10295 return NULL_TREE;
10297 /* (-A) / (-B) -> A / B */
10298 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10299 return fold_build2_loc (loc, RDIV_EXPR, type,
10300 TREE_OPERAND (arg0, 0),
10301 negate_expr (arg1));
10302 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10303 return fold_build2_loc (loc, RDIV_EXPR, type,
10304 negate_expr (arg0),
10305 TREE_OPERAND (arg1, 0));
10306 return NULL_TREE;
10308 case TRUNC_DIV_EXPR:
10309 /* Fall through */
10311 case FLOOR_DIV_EXPR:
10312 /* Simplify A / (B << N) where A and B are positive and B is
10313 a power of 2, to A >> (N + log2(B)). */
10314 strict_overflow_p = false;
10315 if (TREE_CODE (arg1) == LSHIFT_EXPR
10316 && (TYPE_UNSIGNED (type)
10317 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10319 tree sval = TREE_OPERAND (arg1, 0);
10320 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10322 tree sh_cnt = TREE_OPERAND (arg1, 1);
10323 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10324 wi::exact_log2 (wi::to_wide (sval)));
10326 if (strict_overflow_p)
10327 fold_overflow_warning (("assuming signed overflow does not "
10328 "occur when simplifying A / (B << N)"),
10329 WARN_STRICT_OVERFLOW_MISC);
10331 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10332 sh_cnt, pow2);
10333 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10334 fold_convert_loc (loc, type, arg0), sh_cnt);
10338 /* Fall through */
10340 case ROUND_DIV_EXPR:
10341 case CEIL_DIV_EXPR:
10342 case EXACT_DIV_EXPR:
10343 if (integer_zerop (arg1))
10344 return NULL_TREE;
10346 /* Convert -A / -B to A / B when the type is signed and overflow is
10347 undefined. */
10348 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10349 && TREE_CODE (op0) == NEGATE_EXPR
10350 && negate_expr_p (op1))
10352 if (INTEGRAL_TYPE_P (type))
10353 fold_overflow_warning (("assuming signed overflow does not occur "
10354 "when distributing negation across "
10355 "division"),
10356 WARN_STRICT_OVERFLOW_MISC);
10357 return fold_build2_loc (loc, code, type,
10358 fold_convert_loc (loc, type,
10359 TREE_OPERAND (arg0, 0)),
10360 negate_expr (op1));
10362 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10363 && TREE_CODE (arg1) == NEGATE_EXPR
10364 && negate_expr_p (op0))
10366 if (INTEGRAL_TYPE_P (type))
10367 fold_overflow_warning (("assuming signed overflow does not occur "
10368 "when distributing negation across "
10369 "division"),
10370 WARN_STRICT_OVERFLOW_MISC);
10371 return fold_build2_loc (loc, code, type,
10372 negate_expr (op0),
10373 fold_convert_loc (loc, type,
10374 TREE_OPERAND (arg1, 0)));
10377 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10378 operation, EXACT_DIV_EXPR.
10380 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10381 At one time others generated faster code, it's not clear if they do
10382 after the last round to changes to the DIV code in expmed.c. */
10383 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10384 && multiple_of_p (type, arg0, arg1))
10385 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10386 fold_convert (type, arg0),
10387 fold_convert (type, arg1));
10389 strict_overflow_p = false;
10390 if (TREE_CODE (arg1) == INTEGER_CST
10391 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10392 &strict_overflow_p)) != 0)
10394 if (strict_overflow_p)
10395 fold_overflow_warning (("assuming signed overflow does not occur "
10396 "when simplifying division"),
10397 WARN_STRICT_OVERFLOW_MISC);
10398 return fold_convert_loc (loc, type, tem);
10401 return NULL_TREE;
10403 case CEIL_MOD_EXPR:
10404 case FLOOR_MOD_EXPR:
10405 case ROUND_MOD_EXPR:
10406 case TRUNC_MOD_EXPR:
10407 strict_overflow_p = false;
10408 if (TREE_CODE (arg1) == INTEGER_CST
10409 && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10410 &strict_overflow_p)) != 0)
10412 if (strict_overflow_p)
10413 fold_overflow_warning (("assuming signed overflow does not occur "
10414 "when simplifying modulus"),
10415 WARN_STRICT_OVERFLOW_MISC);
10416 return fold_convert_loc (loc, type, tem);
10419 return NULL_TREE;
10421 case LROTATE_EXPR:
10422 case RROTATE_EXPR:
10423 case RSHIFT_EXPR:
10424 case LSHIFT_EXPR:
10425 /* Since negative shift count is not well-defined,
10426 don't try to compute it in the compiler. */
10427 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10428 return NULL_TREE;
10430 prec = element_precision (type);
10432 /* If we have a rotate of a bit operation with the rotate count and
10433 the second operand of the bit operation both constant,
10434 permute the two operations. */
10435 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10436 && (TREE_CODE (arg0) == BIT_AND_EXPR
10437 || TREE_CODE (arg0) == BIT_IOR_EXPR
10438 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10439 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10441 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10442 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10443 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10444 fold_build2_loc (loc, code, type,
10445 arg00, arg1),
10446 fold_build2_loc (loc, code, type,
10447 arg01, arg1));
10450 /* Two consecutive rotates adding up to the some integer
10451 multiple of the precision of the type can be ignored. */
10452 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10453 && TREE_CODE (arg0) == RROTATE_EXPR
10454 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10455 && wi::umod_trunc (wi::to_wide (arg1)
10456 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10457 prec) == 0)
10458 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10460 return NULL_TREE;
10462 case MIN_EXPR:
10463 case MAX_EXPR:
10464 goto associate;
10466 case TRUTH_ANDIF_EXPR:
10467 /* Note that the operands of this must be ints
10468 and their values must be 0 or 1.
10469 ("true" is a fixed value perhaps depending on the language.) */
10470 /* If first arg is constant zero, return it. */
10471 if (integer_zerop (arg0))
10472 return fold_convert_loc (loc, type, arg0);
10473 /* FALLTHRU */
10474 case TRUTH_AND_EXPR:
10475 /* If either arg is constant true, drop it. */
10476 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10477 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10478 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10479 /* Preserve sequence points. */
10480 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10481 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10482 /* If second arg is constant zero, result is zero, but first arg
10483 must be evaluated. */
10484 if (integer_zerop (arg1))
10485 return omit_one_operand_loc (loc, type, arg1, arg0);
10486 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10487 case will be handled here. */
10488 if (integer_zerop (arg0))
10489 return omit_one_operand_loc (loc, type, arg0, arg1);
10491 /* !X && X is always false. */
10492 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10493 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10494 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10495 /* X && !X is always false. */
10496 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10497 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10498 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10500 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10501 means A >= Y && A != MAX, but in this case we know that
10502 A < X <= MAX. */
10504 if (!TREE_SIDE_EFFECTS (arg0)
10505 && !TREE_SIDE_EFFECTS (arg1))
10507 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10508 if (tem && !operand_equal_p (tem, arg0, 0))
10509 return fold_build2_loc (loc, code, type, tem, arg1);
10511 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10512 if (tem && !operand_equal_p (tem, arg1, 0))
10513 return fold_build2_loc (loc, code, type, arg0, tem);
10516 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10517 != NULL_TREE)
10518 return tem;
10520 return NULL_TREE;
10522 case TRUTH_ORIF_EXPR:
10523 /* Note that the operands of this must be ints
10524 and their values must be 0 or true.
10525 ("true" is a fixed value perhaps depending on the language.) */
10526 /* If first arg is constant true, return it. */
10527 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10528 return fold_convert_loc (loc, type, arg0);
10529 /* FALLTHRU */
10530 case TRUTH_OR_EXPR:
10531 /* If either arg is constant zero, drop it. */
10532 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10533 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10534 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10535 /* Preserve sequence points. */
10536 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10537 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10538 /* If second arg is constant true, result is true, but we must
10539 evaluate first arg. */
10540 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10541 return omit_one_operand_loc (loc, type, arg1, arg0);
10542 /* Likewise for first arg, but note this only occurs here for
10543 TRUTH_OR_EXPR. */
10544 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10545 return omit_one_operand_loc (loc, type, arg0, arg1);
10547 /* !X || X is always true. */
10548 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10549 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10550 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10551 /* X || !X is always true. */
10552 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10553 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10554 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10556 /* (X && !Y) || (!X && Y) is X ^ Y */
10557 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10558 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10560 tree a0, a1, l0, l1, n0, n1;
10562 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10563 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10565 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10566 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10568 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10569 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10571 if ((operand_equal_p (n0, a0, 0)
10572 && operand_equal_p (n1, a1, 0))
10573 || (operand_equal_p (n0, a1, 0)
10574 && operand_equal_p (n1, a0, 0)))
10575 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10578 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10579 != NULL_TREE)
10580 return tem;
10582 return NULL_TREE;
10584 case TRUTH_XOR_EXPR:
10585 /* If the second arg is constant zero, drop it. */
10586 if (integer_zerop (arg1))
10587 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10588 /* If the second arg is constant true, this is a logical inversion. */
10589 if (integer_onep (arg1))
10591 tem = invert_truthvalue_loc (loc, arg0);
10592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10594 /* Identical arguments cancel to zero. */
10595 if (operand_equal_p (arg0, arg1, 0))
10596 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10598 /* !X ^ X is always true. */
10599 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10600 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10601 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10603 /* X ^ !X is always true. */
10604 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10605 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10606 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10608 return NULL_TREE;
10610 case EQ_EXPR:
10611 case NE_EXPR:
10612 STRIP_NOPS (arg0);
10613 STRIP_NOPS (arg1);
10615 tem = fold_comparison (loc, code, type, op0, op1);
10616 if (tem != NULL_TREE)
10617 return tem;
10619 /* bool_var != 1 becomes !bool_var. */
10620 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10621 && code == NE_EXPR)
10622 return fold_convert_loc (loc, type,
10623 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10624 TREE_TYPE (arg0), arg0));
10626 /* bool_var == 0 becomes !bool_var. */
10627 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10628 && code == EQ_EXPR)
10629 return fold_convert_loc (loc, type,
10630 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10631 TREE_TYPE (arg0), arg0));
10633 /* !exp != 0 becomes !exp */
10634 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10635 && code == NE_EXPR)
10636 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10638 /* If this is an EQ or NE comparison with zero and ARG0 is
10639 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10640 two operations, but the latter can be done in one less insn
10641 on machines that have only two-operand insns or on which a
10642 constant cannot be the first operand. */
10643 if (TREE_CODE (arg0) == BIT_AND_EXPR
10644 && integer_zerop (arg1))
10646 tree arg00 = TREE_OPERAND (arg0, 0);
10647 tree arg01 = TREE_OPERAND (arg0, 1);
10648 if (TREE_CODE (arg00) == LSHIFT_EXPR
10649 && integer_onep (TREE_OPERAND (arg00, 0)))
10651 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10652 arg01, TREE_OPERAND (arg00, 1));
10653 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10654 build_int_cst (TREE_TYPE (arg0), 1));
10655 return fold_build2_loc (loc, code, type,
10656 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10657 arg1);
10659 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10660 && integer_onep (TREE_OPERAND (arg01, 0)))
10662 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10663 arg00, TREE_OPERAND (arg01, 1));
10664 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10665 build_int_cst (TREE_TYPE (arg0), 1));
10666 return fold_build2_loc (loc, code, type,
10667 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10668 arg1);
10672 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10673 C1 is a valid shift constant, and C2 is a power of two, i.e.
10674 a single bit. */
10675 if (TREE_CODE (arg0) == BIT_AND_EXPR
10676 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10677 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10678 == INTEGER_CST
10679 && integer_pow2p (TREE_OPERAND (arg0, 1))
10680 && integer_zerop (arg1))
10682 tree itype = TREE_TYPE (arg0);
10683 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10684 prec = TYPE_PRECISION (itype);
10686 /* Check for a valid shift count. */
10687 if (wi::ltu_p (wi::to_wide (arg001), prec))
10689 tree arg01 = TREE_OPERAND (arg0, 1);
10690 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10691 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10692 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10693 can be rewritten as (X & (C2 << C1)) != 0. */
10694 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10696 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10697 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10698 return fold_build2_loc (loc, code, type, tem,
10699 fold_convert_loc (loc, itype, arg1));
10701 /* Otherwise, for signed (arithmetic) shifts,
10702 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10703 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10704 else if (!TYPE_UNSIGNED (itype))
10705 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10706 arg000, build_int_cst (itype, 0));
10707 /* Otherwise, of unsigned (logical) shifts,
10708 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10709 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10710 else
10711 return omit_one_operand_loc (loc, type,
10712 code == EQ_EXPR ? integer_one_node
10713 : integer_zero_node,
10714 arg000);
10718 /* If this is a comparison of a field, we may be able to simplify it. */
10719 if ((TREE_CODE (arg0) == COMPONENT_REF
10720 || TREE_CODE (arg0) == BIT_FIELD_REF)
10721 /* Handle the constant case even without -O
10722 to make sure the warnings are given. */
10723 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10725 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10726 if (t1)
10727 return t1;
10730 /* Optimize comparisons of strlen vs zero to a compare of the
10731 first character of the string vs zero. To wit,
10732 strlen(ptr) == 0 => *ptr == 0
10733 strlen(ptr) != 0 => *ptr != 0
10734 Other cases should reduce to one of these two (or a constant)
10735 due to the return value of strlen being unsigned. */
10736 if (TREE_CODE (arg0) == CALL_EXPR
10737 && integer_zerop (arg1))
10739 tree fndecl = get_callee_fndecl (arg0);
10741 if (fndecl
10742 && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
10743 && call_expr_nargs (arg0) == 1
10744 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10746 tree iref = build_fold_indirect_ref_loc (loc,
10747 CALL_EXPR_ARG (arg0, 0));
10748 return fold_build2_loc (loc, code, type, iref,
10749 build_int_cst (TREE_TYPE (iref), 0));
10753 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10754 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10755 if (TREE_CODE (arg0) == RSHIFT_EXPR
10756 && integer_zerop (arg1)
10757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10759 tree arg00 = TREE_OPERAND (arg0, 0);
10760 tree arg01 = TREE_OPERAND (arg0, 1);
10761 tree itype = TREE_TYPE (arg00);
10762 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10764 if (TYPE_UNSIGNED (itype))
10766 itype = signed_type_for (itype);
10767 arg00 = fold_convert_loc (loc, itype, arg00);
10769 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10770 type, arg00, build_zero_cst (itype));
10774 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10775 (X & C) == 0 when C is a single bit. */
10776 if (TREE_CODE (arg0) == BIT_AND_EXPR
10777 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10778 && integer_zerop (arg1)
10779 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10781 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10782 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10783 TREE_OPERAND (arg0, 1));
10784 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10785 type, tem,
10786 fold_convert_loc (loc, TREE_TYPE (arg0),
10787 arg1));
10790 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10791 constant C is a power of two, i.e. a single bit. */
10792 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10793 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10794 && integer_zerop (arg1)
10795 && integer_pow2p (TREE_OPERAND (arg0, 1))
10796 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10797 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10799 tree arg00 = TREE_OPERAND (arg0, 0);
10800 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10801 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10804 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10805 when is C is a power of two, i.e. a single bit. */
10806 if (TREE_CODE (arg0) == BIT_AND_EXPR
10807 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10808 && integer_zerop (arg1)
10809 && integer_pow2p (TREE_OPERAND (arg0, 1))
10810 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10811 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10813 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10814 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10815 arg000, TREE_OPERAND (arg0, 1));
10816 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10817 tem, build_int_cst (TREE_TYPE (tem), 0));
10820 if (integer_zerop (arg1)
10821 && tree_expr_nonzero_p (arg0))
10823 tree res = constant_boolean_node (code==NE_EXPR, type);
10824 return omit_one_operand_loc (loc, type, res, arg0);
10827 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10828 if (TREE_CODE (arg0) == BIT_AND_EXPR
10829 && TREE_CODE (arg1) == BIT_AND_EXPR)
10831 tree arg00 = TREE_OPERAND (arg0, 0);
10832 tree arg01 = TREE_OPERAND (arg0, 1);
10833 tree arg10 = TREE_OPERAND (arg1, 0);
10834 tree arg11 = TREE_OPERAND (arg1, 1);
10835 tree itype = TREE_TYPE (arg0);
10837 if (operand_equal_p (arg01, arg11, 0))
10839 tem = fold_convert_loc (loc, itype, arg10);
10840 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10841 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10842 return fold_build2_loc (loc, code, type, tem,
10843 build_zero_cst (itype));
10845 if (operand_equal_p (arg01, arg10, 0))
10847 tem = fold_convert_loc (loc, itype, arg11);
10848 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10849 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10850 return fold_build2_loc (loc, code, type, tem,
10851 build_zero_cst (itype));
10853 if (operand_equal_p (arg00, arg11, 0))
10855 tem = fold_convert_loc (loc, itype, arg10);
10856 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10857 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10858 return fold_build2_loc (loc, code, type, tem,
10859 build_zero_cst (itype));
10861 if (operand_equal_p (arg00, arg10, 0))
10863 tem = fold_convert_loc (loc, itype, arg11);
10864 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10865 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10866 return fold_build2_loc (loc, code, type, tem,
10867 build_zero_cst (itype));
10871 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10872 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10874 tree arg00 = TREE_OPERAND (arg0, 0);
10875 tree arg01 = TREE_OPERAND (arg0, 1);
10876 tree arg10 = TREE_OPERAND (arg1, 0);
10877 tree arg11 = TREE_OPERAND (arg1, 1);
10878 tree itype = TREE_TYPE (arg0);
10880 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10881 operand_equal_p guarantees no side-effects so we don't need
10882 to use omit_one_operand on Z. */
10883 if (operand_equal_p (arg01, arg11, 0))
10884 return fold_build2_loc (loc, code, type, arg00,
10885 fold_convert_loc (loc, TREE_TYPE (arg00),
10886 arg10));
10887 if (operand_equal_p (arg01, arg10, 0))
10888 return fold_build2_loc (loc, code, type, arg00,
10889 fold_convert_loc (loc, TREE_TYPE (arg00),
10890 arg11));
10891 if (operand_equal_p (arg00, arg11, 0))
10892 return fold_build2_loc (loc, code, type, arg01,
10893 fold_convert_loc (loc, TREE_TYPE (arg01),
10894 arg10));
10895 if (operand_equal_p (arg00, arg10, 0))
10896 return fold_build2_loc (loc, code, type, arg01,
10897 fold_convert_loc (loc, TREE_TYPE (arg01),
10898 arg11));
10900 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10901 if (TREE_CODE (arg01) == INTEGER_CST
10902 && TREE_CODE (arg11) == INTEGER_CST)
10904 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10905 fold_convert_loc (loc, itype, arg11));
10906 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10907 return fold_build2_loc (loc, code, type, tem,
10908 fold_convert_loc (loc, itype, arg10));
10912 /* Attempt to simplify equality/inequality comparisons of complex
10913 values. Only lower the comparison if the result is known or
10914 can be simplified to a single scalar comparison. */
10915 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10916 || TREE_CODE (arg0) == COMPLEX_CST)
10917 && (TREE_CODE (arg1) == COMPLEX_EXPR
10918 || TREE_CODE (arg1) == COMPLEX_CST))
10920 tree real0, imag0, real1, imag1;
10921 tree rcond, icond;
10923 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10925 real0 = TREE_OPERAND (arg0, 0);
10926 imag0 = TREE_OPERAND (arg0, 1);
10928 else
10930 real0 = TREE_REALPART (arg0);
10931 imag0 = TREE_IMAGPART (arg0);
10934 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10936 real1 = TREE_OPERAND (arg1, 0);
10937 imag1 = TREE_OPERAND (arg1, 1);
10939 else
10941 real1 = TREE_REALPART (arg1);
10942 imag1 = TREE_IMAGPART (arg1);
10945 rcond = fold_binary_loc (loc, code, type, real0, real1);
10946 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10948 if (integer_zerop (rcond))
10950 if (code == EQ_EXPR)
10951 return omit_two_operands_loc (loc, type, boolean_false_node,
10952 imag0, imag1);
10953 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10955 else
10957 if (code == NE_EXPR)
10958 return omit_two_operands_loc (loc, type, boolean_true_node,
10959 imag0, imag1);
10960 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10964 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10965 if (icond && TREE_CODE (icond) == INTEGER_CST)
10967 if (integer_zerop (icond))
10969 if (code == EQ_EXPR)
10970 return omit_two_operands_loc (loc, type, boolean_false_node,
10971 real0, real1);
10972 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10974 else
10976 if (code == NE_EXPR)
10977 return omit_two_operands_loc (loc, type, boolean_true_node,
10978 real0, real1);
10979 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10984 return NULL_TREE;
10986 case LT_EXPR:
10987 case GT_EXPR:
10988 case LE_EXPR:
10989 case GE_EXPR:
10990 tem = fold_comparison (loc, code, type, op0, op1);
10991 if (tem != NULL_TREE)
10992 return tem;
10994 /* Transform comparisons of the form X +- C CMP X. */
10995 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10997 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10998 && !HONOR_SNANS (arg0))
11000 tree arg01 = TREE_OPERAND (arg0, 1);
11001 enum tree_code code0 = TREE_CODE (arg0);
11002 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11004 /* (X - c) > X becomes false. */
11005 if (code == GT_EXPR
11006 && ((code0 == MINUS_EXPR && is_positive >= 0)
11007 || (code0 == PLUS_EXPR && is_positive <= 0)))
11008 return constant_boolean_node (0, type);
11010 /* Likewise (X + c) < X becomes false. */
11011 if (code == LT_EXPR
11012 && ((code0 == PLUS_EXPR && is_positive >= 0)
11013 || (code0 == MINUS_EXPR && is_positive <= 0)))
11014 return constant_boolean_node (0, type);
11016 /* Convert (X - c) <= X to true. */
11017 if (!HONOR_NANS (arg1)
11018 && code == LE_EXPR
11019 && ((code0 == MINUS_EXPR && is_positive >= 0)
11020 || (code0 == PLUS_EXPR && is_positive <= 0)))
11021 return constant_boolean_node (1, type);
11023 /* Convert (X + c) >= X to true. */
11024 if (!HONOR_NANS (arg1)
11025 && code == GE_EXPR
11026 && ((code0 == PLUS_EXPR && is_positive >= 0)
11027 || (code0 == MINUS_EXPR && is_positive <= 0)))
11028 return constant_boolean_node (1, type);
11031 /* If we are comparing an ABS_EXPR with a constant, we can
11032 convert all the cases into explicit comparisons, but they may
11033 well not be faster than doing the ABS and one comparison.
11034 But ABS (X) <= C is a range comparison, which becomes a subtraction
11035 and a comparison, and is probably faster. */
11036 if (code == LE_EXPR
11037 && TREE_CODE (arg1) == INTEGER_CST
11038 && TREE_CODE (arg0) == ABS_EXPR
11039 && ! TREE_SIDE_EFFECTS (arg0)
11040 && (tem = negate_expr (arg1)) != 0
11041 && TREE_CODE (tem) == INTEGER_CST
11042 && !TREE_OVERFLOW (tem))
11043 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11044 build2 (GE_EXPR, type,
11045 TREE_OPERAND (arg0, 0), tem),
11046 build2 (LE_EXPR, type,
11047 TREE_OPERAND (arg0, 0), arg1));
11049 /* Convert ABS_EXPR<x> >= 0 to true. */
11050 strict_overflow_p = false;
11051 if (code == GE_EXPR
11052 && (integer_zerop (arg1)
11053 || (! HONOR_NANS (arg0)
11054 && real_zerop (arg1)))
11055 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11057 if (strict_overflow_p)
11058 fold_overflow_warning (("assuming signed overflow does not occur "
11059 "when simplifying comparison of "
11060 "absolute value and zero"),
11061 WARN_STRICT_OVERFLOW_CONDITIONAL);
11062 return omit_one_operand_loc (loc, type,
11063 constant_boolean_node (true, type),
11064 arg0);
11067 /* Convert ABS_EXPR<x> < 0 to false. */
11068 strict_overflow_p = false;
11069 if (code == LT_EXPR
11070 && (integer_zerop (arg1) || real_zerop (arg1))
11071 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11073 if (strict_overflow_p)
11074 fold_overflow_warning (("assuming signed overflow does not occur "
11075 "when simplifying comparison of "
11076 "absolute value and zero"),
11077 WARN_STRICT_OVERFLOW_CONDITIONAL);
11078 return omit_one_operand_loc (loc, type,
11079 constant_boolean_node (false, type),
11080 arg0);
11083 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11084 and similarly for >= into !=. */
11085 if ((code == LT_EXPR || code == GE_EXPR)
11086 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11087 && TREE_CODE (arg1) == LSHIFT_EXPR
11088 && integer_onep (TREE_OPERAND (arg1, 0)))
11089 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11090 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11091 TREE_OPERAND (arg1, 1)),
11092 build_zero_cst (TREE_TYPE (arg0)));
11094 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11095 otherwise Y might be >= # of bits in X's type and thus e.g.
11096 (unsigned char) (1 << Y) for Y 15 might be 0.
11097 If the cast is widening, then 1 << Y should have unsigned type,
11098 otherwise if Y is number of bits in the signed shift type minus 1,
11099 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11100 31 might be 0xffffffff80000000. */
11101 if ((code == LT_EXPR || code == GE_EXPR)
11102 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11103 && CONVERT_EXPR_P (arg1)
11104 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11105 && (element_precision (TREE_TYPE (arg1))
11106 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11107 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11108 || (element_precision (TREE_TYPE (arg1))
11109 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11110 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11112 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11113 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11114 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11115 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11116 build_zero_cst (TREE_TYPE (arg0)));
11119 return NULL_TREE;
11121 case UNORDERED_EXPR:
11122 case ORDERED_EXPR:
11123 case UNLT_EXPR:
11124 case UNLE_EXPR:
11125 case UNGT_EXPR:
11126 case UNGE_EXPR:
11127 case UNEQ_EXPR:
11128 case LTGT_EXPR:
11129 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11131 tree targ0 = strip_float_extensions (arg0);
11132 tree targ1 = strip_float_extensions (arg1);
11133 tree newtype = TREE_TYPE (targ0);
11135 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11136 newtype = TREE_TYPE (targ1);
11138 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11139 return fold_build2_loc (loc, code, type,
11140 fold_convert_loc (loc, newtype, targ0),
11141 fold_convert_loc (loc, newtype, targ1));
11144 return NULL_TREE;
11146 case COMPOUND_EXPR:
11147 /* When pedantic, a compound expression can be neither an lvalue
11148 nor an integer constant expression. */
11149 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11150 return NULL_TREE;
11151 /* Don't let (0, 0) be null pointer constant. */
11152 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11153 : fold_convert_loc (loc, type, arg1);
11154 return pedantic_non_lvalue_loc (loc, tem);
11156 case ASSERT_EXPR:
11157 /* An ASSERT_EXPR should never be passed to fold_binary. */
11158 gcc_unreachable ();
11160 default:
11161 return NULL_TREE;
11162 } /* switch (code) */
11165 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11166 ((A & N) + B) & M -> (A + B) & M
11167 Similarly if (N & M) == 0,
11168 ((A | N) + B) & M -> (A + B) & M
11169 and for - instead of + (or unary - instead of +)
11170 and/or ^ instead of |.
11171 If B is constant and (B & M) == 0, fold into A & M.
11173 This function is a helper for match.pd patterns. Return non-NULL
11174 type in which the simplified operation should be performed only
11175 if any optimization is possible.
11177 ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11178 then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
11179 Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
11180 +/-. */
11181 tree
11182 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
11183 tree arg00, enum tree_code code00, tree arg000, tree arg001,
11184 tree arg01, enum tree_code code01, tree arg010, tree arg011,
11185 tree *pmop)
11187 gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
11188 gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
11189 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11190 if (~cst1 == 0
11191 || (cst1 & (cst1 + 1)) != 0
11192 || !INTEGRAL_TYPE_P (type)
11193 || (!TYPE_OVERFLOW_WRAPS (type)
11194 && TREE_CODE (type) != INTEGER_TYPE)
11195 || (wi::max_value (type) & cst1) != cst1)
11196 return NULL_TREE;
11198 enum tree_code codes[2] = { code00, code01 };
11199 tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
11200 int which = 0;
11201 wide_int cst0;
11203 /* Now we know that arg0 is (C + D) or (C - D) or -C and
11204 arg1 (M) is == (1LL << cst) - 1.
11205 Store C into PMOP[0] and D into PMOP[1]. */
11206 pmop[0] = arg00;
11207 pmop[1] = arg01;
11208 which = code != NEGATE_EXPR;
11210 for (; which >= 0; which--)
11211 switch (codes[which])
11213 case BIT_AND_EXPR:
11214 case BIT_IOR_EXPR:
11215 case BIT_XOR_EXPR:
11216 gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
11217 cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
11218 if (codes[which] == BIT_AND_EXPR)
11220 if (cst0 != cst1)
11221 break;
11223 else if (cst0 != 0)
11224 break;
11225 /* If C or D is of the form (A & N) where
11226 (N & M) == M, or of the form (A | N) or
11227 (A ^ N) where (N & M) == 0, replace it with A. */
11228 pmop[which] = arg0xx[2 * which];
11229 break;
11230 case ERROR_MARK:
11231 if (TREE_CODE (pmop[which]) != INTEGER_CST)
11232 break;
11233 /* If C or D is a N where (N & M) == 0, it can be
11234 omitted (replaced with 0). */
11235 if ((code == PLUS_EXPR
11236 || (code == MINUS_EXPR && which == 0))
11237 && (cst1 & wi::to_wide (pmop[which])) == 0)
11238 pmop[which] = build_int_cst (type, 0);
11239 /* Similarly, with C - N where (-N & M) == 0. */
11240 if (code == MINUS_EXPR
11241 && which == 1
11242 && (cst1 & -wi::to_wide (pmop[which])) == 0)
11243 pmop[which] = build_int_cst (type, 0);
11244 break;
11245 default:
11246 gcc_unreachable ();
11249 /* Only build anything new if we optimized one or both arguments above. */
11250 if (pmop[0] == arg00 && pmop[1] == arg01)
11251 return NULL_TREE;
11253 if (TYPE_OVERFLOW_WRAPS (type))
11254 return type;
11255 else
11256 return unsigned_type_for (type);
11259 /* Used by contains_label_[p1]. */
11261 struct contains_label_data
11263 hash_set<tree> *pset;
11264 bool inside_switch_p;
11267 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11268 a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11269 return NULL_TREE. Do not check the subtrees of GOTO_EXPR. */
11271 static tree
11272 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11274 contains_label_data *d = (contains_label_data *) data;
11275 switch (TREE_CODE (*tp))
11277 case LABEL_EXPR:
11278 return *tp;
11280 case CASE_LABEL_EXPR:
11281 if (!d->inside_switch_p)
11282 return *tp;
11283 return NULL_TREE;
11285 case SWITCH_EXPR:
11286 if (!d->inside_switch_p)
11288 if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11289 return *tp;
11290 d->inside_switch_p = true;
11291 if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11292 return *tp;
11293 d->inside_switch_p = false;
11294 *walk_subtrees = 0;
11296 return NULL_TREE;
11298 case GOTO_EXPR:
11299 *walk_subtrees = 0;
11300 return NULL_TREE;
11302 default:
11303 return NULL_TREE;
11307 /* Return whether the sub-tree ST contains a label which is accessible from
11308 outside the sub-tree. */
11310 static bool
11311 contains_label_p (tree st)
11313 hash_set<tree> pset;
11314 contains_label_data data = { &pset, false };
11315 return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11318 /* Fold a ternary expression of code CODE and type TYPE with operands
11319 OP0, OP1, and OP2. Return the folded expression if folding is
11320 successful. Otherwise, return NULL_TREE. */
11322 tree
11323 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11324 tree op0, tree op1, tree op2)
11326 tree tem;
11327 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11328 enum tree_code_class kind = TREE_CODE_CLASS (code);
11330 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11331 && TREE_CODE_LENGTH (code) == 3);
11333 /* If this is a commutative operation, and OP0 is a constant, move it
11334 to OP1 to reduce the number of tests below. */
11335 if (commutative_ternary_tree_code (code)
11336 && tree_swap_operands_p (op0, op1))
11337 return fold_build3_loc (loc, code, type, op1, op0, op2);
11339 tem = generic_simplify (loc, code, type, op0, op1, op2);
11340 if (tem)
11341 return tem;
11343 /* Strip any conversions that don't change the mode. This is safe
11344 for every expression, except for a comparison expression because
11345 its signedness is derived from its operands. So, in the latter
11346 case, only strip conversions that don't change the signedness.
11348 Note that this is done as an internal manipulation within the
11349 constant folder, in order to find the simplest representation of
11350 the arguments so that their form can be studied. In any cases,
11351 the appropriate type conversions should be put back in the tree
11352 that will get out of the constant folder. */
11353 if (op0)
11355 arg0 = op0;
11356 STRIP_NOPS (arg0);
11359 if (op1)
11361 arg1 = op1;
11362 STRIP_NOPS (arg1);
11365 if (op2)
11367 arg2 = op2;
11368 STRIP_NOPS (arg2);
11371 switch (code)
11373 case COMPONENT_REF:
11374 if (TREE_CODE (arg0) == CONSTRUCTOR
11375 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11377 unsigned HOST_WIDE_INT idx;
11378 tree field, value;
11379 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11380 if (field == arg1)
11381 return value;
11383 return NULL_TREE;
11385 case COND_EXPR:
11386 case VEC_COND_EXPR:
11387 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11388 so all simple results must be passed through pedantic_non_lvalue. */
11389 if (TREE_CODE (arg0) == INTEGER_CST)
11391 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11392 tem = integer_zerop (arg0) ? op2 : op1;
11393 /* Only optimize constant conditions when the selected branch
11394 has the same type as the COND_EXPR. This avoids optimizing
11395 away "c ? x : throw", where the throw has a void type.
11396 Avoid throwing away that operand which contains label. */
11397 if ((!TREE_SIDE_EFFECTS (unused_op)
11398 || !contains_label_p (unused_op))
11399 && (! VOID_TYPE_P (TREE_TYPE (tem))
11400 || VOID_TYPE_P (type)))
11401 return pedantic_non_lvalue_loc (loc, tem);
11402 return NULL_TREE;
11404 else if (TREE_CODE (arg0) == VECTOR_CST)
11406 unsigned HOST_WIDE_INT nelts;
11407 if ((TREE_CODE (arg1) == VECTOR_CST
11408 || TREE_CODE (arg1) == CONSTRUCTOR)
11409 && (TREE_CODE (arg2) == VECTOR_CST
11410 || TREE_CODE (arg2) == CONSTRUCTOR)
11411 && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11413 vec_perm_builder sel (nelts, nelts, 1);
11414 for (unsigned int i = 0; i < nelts; i++)
11416 tree val = VECTOR_CST_ELT (arg0, i);
11417 if (integer_all_onesp (val))
11418 sel.quick_push (i);
11419 else if (integer_zerop (val))
11420 sel.quick_push (nelts + i);
11421 else /* Currently unreachable. */
11422 return NULL_TREE;
11424 vec_perm_indices indices (sel, 2, nelts);
11425 tree t = fold_vec_perm (type, arg1, arg2, indices);
11426 if (t != NULL_TREE)
11427 return t;
11431 /* If we have A op B ? A : C, we may be able to convert this to a
11432 simpler expression, depending on the operation and the values
11433 of B and C. Signed zeros prevent all of these transformations,
11434 for reasons given above each one.
11436 Also try swapping the arguments and inverting the conditional. */
11437 if (COMPARISON_CLASS_P (arg0)
11438 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11439 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11441 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11442 if (tem)
11443 return tem;
11446 if (COMPARISON_CLASS_P (arg0)
11447 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11448 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11450 location_t loc0 = expr_location_or (arg0, loc);
11451 tem = fold_invert_truthvalue (loc0, arg0);
11452 if (tem && COMPARISON_CLASS_P (tem))
11454 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11455 if (tem)
11456 return tem;
11460 /* If the second operand is simpler than the third, swap them
11461 since that produces better jump optimization results. */
11462 if (truth_value_p (TREE_CODE (arg0))
11463 && tree_swap_operands_p (op1, op2))
11465 location_t loc0 = expr_location_or (arg0, loc);
11466 /* See if this can be inverted. If it can't, possibly because
11467 it was a floating-point inequality comparison, don't do
11468 anything. */
11469 tem = fold_invert_truthvalue (loc0, arg0);
11470 if (tem)
11471 return fold_build3_loc (loc, code, type, tem, op2, op1);
11474 /* Convert A ? 1 : 0 to simply A. */
11475 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11476 : (integer_onep (op1)
11477 && !VECTOR_TYPE_P (type)))
11478 && integer_zerop (op2)
11479 /* If we try to convert OP0 to our type, the
11480 call to fold will try to move the conversion inside
11481 a COND, which will recurse. In that case, the COND_EXPR
11482 is probably the best choice, so leave it alone. */
11483 && type == TREE_TYPE (arg0))
11484 return pedantic_non_lvalue_loc (loc, arg0);
11486 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11487 over COND_EXPR in cases such as floating point comparisons. */
11488 if (integer_zerop (op1)
11489 && code == COND_EXPR
11490 && integer_onep (op2)
11491 && !VECTOR_TYPE_P (type)
11492 && truth_value_p (TREE_CODE (arg0)))
11493 return pedantic_non_lvalue_loc (loc,
11494 fold_convert_loc (loc, type,
11495 invert_truthvalue_loc (loc,
11496 arg0)));
11498 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11499 if (TREE_CODE (arg0) == LT_EXPR
11500 && integer_zerop (TREE_OPERAND (arg0, 1))
11501 && integer_zerop (op2)
11502 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11504 /* sign_bit_p looks through both zero and sign extensions,
11505 but for this optimization only sign extensions are
11506 usable. */
11507 tree tem2 = TREE_OPERAND (arg0, 0);
11508 while (tem != tem2)
11510 if (TREE_CODE (tem2) != NOP_EXPR
11511 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11513 tem = NULL_TREE;
11514 break;
11516 tem2 = TREE_OPERAND (tem2, 0);
11518 /* sign_bit_p only checks ARG1 bits within A's precision.
11519 If <sign bit of A> has wider type than A, bits outside
11520 of A's precision in <sign bit of A> need to be checked.
11521 If they are all 0, this optimization needs to be done
11522 in unsigned A's type, if they are all 1 in signed A's type,
11523 otherwise this can't be done. */
11524 if (tem
11525 && TYPE_PRECISION (TREE_TYPE (tem))
11526 < TYPE_PRECISION (TREE_TYPE (arg1))
11527 && TYPE_PRECISION (TREE_TYPE (tem))
11528 < TYPE_PRECISION (type))
11530 int inner_width, outer_width;
11531 tree tem_type;
11533 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11534 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11535 if (outer_width > TYPE_PRECISION (type))
11536 outer_width = TYPE_PRECISION (type);
11538 wide_int mask = wi::shifted_mask
11539 (inner_width, outer_width - inner_width, false,
11540 TYPE_PRECISION (TREE_TYPE (arg1)));
11542 wide_int common = mask & wi::to_wide (arg1);
11543 if (common == mask)
11545 tem_type = signed_type_for (TREE_TYPE (tem));
11546 tem = fold_convert_loc (loc, tem_type, tem);
11548 else if (common == 0)
11550 tem_type = unsigned_type_for (TREE_TYPE (tem));
11551 tem = fold_convert_loc (loc, tem_type, tem);
11553 else
11554 tem = NULL;
11557 if (tem)
11558 return
11559 fold_convert_loc (loc, type,
11560 fold_build2_loc (loc, BIT_AND_EXPR,
11561 TREE_TYPE (tem), tem,
11562 fold_convert_loc (loc,
11563 TREE_TYPE (tem),
11564 arg1)));
11567 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11568 already handled above. */
11569 if (TREE_CODE (arg0) == BIT_AND_EXPR
11570 && integer_onep (TREE_OPERAND (arg0, 1))
11571 && integer_zerop (op2)
11572 && integer_pow2p (arg1))
11574 tree tem = TREE_OPERAND (arg0, 0);
11575 STRIP_NOPS (tem);
11576 if (TREE_CODE (tem) == RSHIFT_EXPR
11577 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11578 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11579 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11580 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11581 fold_convert_loc (loc, type,
11582 TREE_OPERAND (tem, 0)),
11583 op1);
11586 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11587 is probably obsolete because the first operand should be a
11588 truth value (that's why we have the two cases above), but let's
11589 leave it in until we can confirm this for all front-ends. */
11590 if (integer_zerop (op2)
11591 && TREE_CODE (arg0) == NE_EXPR
11592 && integer_zerop (TREE_OPERAND (arg0, 1))
11593 && integer_pow2p (arg1)
11594 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11595 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11596 arg1, OEP_ONLY_CONST)
11597 /* operand_equal_p compares just value, not precision, so e.g.
11598 arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
11599 second operand 32-bit -128, which is not a power of two (or vice
11600 versa. */
11601 && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
11602 return pedantic_non_lvalue_loc (loc,
11603 fold_convert_loc (loc, type,
11604 TREE_OPERAND (arg0,
11605 0)));
11607 /* Disable the transformations below for vectors, since
11608 fold_binary_op_with_conditional_arg may undo them immediately,
11609 yielding an infinite loop. */
11610 if (code == VEC_COND_EXPR)
11611 return NULL_TREE;
11613 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11614 if (integer_zerop (op2)
11615 && truth_value_p (TREE_CODE (arg0))
11616 && truth_value_p (TREE_CODE (arg1))
11617 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11618 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11619 : TRUTH_ANDIF_EXPR,
11620 type, fold_convert_loc (loc, type, arg0), op1);
11622 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11623 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11624 && truth_value_p (TREE_CODE (arg0))
11625 && truth_value_p (TREE_CODE (arg1))
11626 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11628 location_t loc0 = expr_location_or (arg0, loc);
11629 /* Only perform transformation if ARG0 is easily inverted. */
11630 tem = fold_invert_truthvalue (loc0, arg0);
11631 if (tem)
11632 return fold_build2_loc (loc, code == VEC_COND_EXPR
11633 ? BIT_IOR_EXPR
11634 : TRUTH_ORIF_EXPR,
11635 type, fold_convert_loc (loc, type, tem),
11636 op1);
11639 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11640 if (integer_zerop (arg1)
11641 && truth_value_p (TREE_CODE (arg0))
11642 && truth_value_p (TREE_CODE (op2))
11643 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11645 location_t loc0 = expr_location_or (arg0, loc);
11646 /* Only perform transformation if ARG0 is easily inverted. */
11647 tem = fold_invert_truthvalue (loc0, arg0);
11648 if (tem)
11649 return fold_build2_loc (loc, code == VEC_COND_EXPR
11650 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11651 type, fold_convert_loc (loc, type, tem),
11652 op2);
11655 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11656 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11657 && truth_value_p (TREE_CODE (arg0))
11658 && truth_value_p (TREE_CODE (op2))
11659 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11660 return fold_build2_loc (loc, code == VEC_COND_EXPR
11661 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11662 type, fold_convert_loc (loc, type, arg0), op2);
11664 return NULL_TREE;
11666 case CALL_EXPR:
11667 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11668 of fold_ternary on them. */
11669 gcc_unreachable ();
11671 case BIT_FIELD_REF:
11672 if (TREE_CODE (arg0) == VECTOR_CST
11673 && (type == TREE_TYPE (TREE_TYPE (arg0))
11674 || (VECTOR_TYPE_P (type)
11675 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
11676 && tree_fits_uhwi_p (op1)
11677 && tree_fits_uhwi_p (op2))
11679 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11680 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11681 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11682 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11684 if (n != 0
11685 && (idx % width) == 0
11686 && (n % width) == 0
11687 && known_le ((idx + n) / width,
11688 TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
11690 idx = idx / width;
11691 n = n / width;
11693 if (TREE_CODE (arg0) == VECTOR_CST)
11695 if (n == 1)
11697 tem = VECTOR_CST_ELT (arg0, idx);
11698 if (VECTOR_TYPE_P (type))
11699 tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
11700 return tem;
11703 tree_vector_builder vals (type, n, 1);
11704 for (unsigned i = 0; i < n; ++i)
11705 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11706 return vals.build ();
11711 /* On constants we can use native encode/interpret to constant
11712 fold (nearly) all BIT_FIELD_REFs. */
11713 if (CONSTANT_CLASS_P (arg0)
11714 && can_native_interpret_type_p (type)
11715 && BITS_PER_UNIT == 8
11716 && tree_fits_uhwi_p (op1)
11717 && tree_fits_uhwi_p (op2))
11719 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11720 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11721 /* Limit us to a reasonable amount of work. To relax the
11722 other limitations we need bit-shifting of the buffer
11723 and rounding up the size. */
11724 if (bitpos % BITS_PER_UNIT == 0
11725 && bitsize % BITS_PER_UNIT == 0
11726 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11728 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11729 unsigned HOST_WIDE_INT len
11730 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11731 bitpos / BITS_PER_UNIT);
11732 if (len > 0
11733 && len * BITS_PER_UNIT >= bitsize)
11735 tree v = native_interpret_expr (type, b,
11736 bitsize / BITS_PER_UNIT);
11737 if (v)
11738 return v;
11743 return NULL_TREE;
11745 case VEC_PERM_EXPR:
11746 if (TREE_CODE (arg2) == VECTOR_CST)
11748 /* Build a vector of integers from the tree mask. */
11749 vec_perm_builder builder;
11750 if (!tree_to_vec_perm_builder (&builder, arg2))
11751 return NULL_TREE;
11753 /* Create a vec_perm_indices for the integer vector. */
11754 poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
11755 bool single_arg = (op0 == op1);
11756 vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
11758 /* Check for cases that fold to OP0 or OP1 in their original
11759 element order. */
11760 if (sel.series_p (0, 1, 0, 1))
11761 return op0;
11762 if (sel.series_p (0, 1, nelts, 1))
11763 return op1;
11765 if (!single_arg)
11767 if (sel.all_from_input_p (0))
11768 op1 = op0;
11769 else if (sel.all_from_input_p (1))
11771 op0 = op1;
11772 sel.rotate_inputs (1);
11776 if ((TREE_CODE (op0) == VECTOR_CST
11777 || TREE_CODE (op0) == CONSTRUCTOR)
11778 && (TREE_CODE (op1) == VECTOR_CST
11779 || TREE_CODE (op1) == CONSTRUCTOR))
11781 tree t = fold_vec_perm (type, op0, op1, sel);
11782 if (t != NULL_TREE)
11783 return t;
11786 bool changed = (op0 == op1 && !single_arg);
11788 /* Generate a canonical form of the selector. */
11789 if (arg2 == op2 && sel.encoding () != builder)
11791 /* Some targets are deficient and fail to expand a single
11792 argument permutation while still allowing an equivalent
11793 2-argument version. */
11794 if (sel.ninputs () == 2
11795 || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
11796 op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11797 else
11799 vec_perm_indices sel2 (builder, 2, nelts);
11800 if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
11801 op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
11802 else
11803 /* Not directly supported with either encoding,
11804 so use the preferred form. */
11805 op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11807 changed = true;
11810 if (changed)
11811 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11813 return NULL_TREE;
11815 case BIT_INSERT_EXPR:
11816 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11817 if (TREE_CODE (arg0) == INTEGER_CST
11818 && TREE_CODE (arg1) == INTEGER_CST)
11820 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11821 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11822 wide_int tem = (wi::to_wide (arg0)
11823 & wi::shifted_mask (bitpos, bitsize, true,
11824 TYPE_PRECISION (type)));
11825 wide_int tem2
11826 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11827 bitsize), bitpos);
11828 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11830 else if (TREE_CODE (arg0) == VECTOR_CST
11831 && CONSTANT_CLASS_P (arg1)
11832 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11833 TREE_TYPE (arg1)))
11835 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11836 unsigned HOST_WIDE_INT elsize
11837 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11838 if (bitpos % elsize == 0)
11840 unsigned k = bitpos / elsize;
11841 unsigned HOST_WIDE_INT nelts;
11842 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11843 return arg0;
11844 else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
11846 tree_vector_builder elts (type, nelts, 1);
11847 elts.quick_grow (nelts);
11848 for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
11849 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11850 return elts.build ();
11854 return NULL_TREE;
11856 default:
11857 return NULL_TREE;
11858 } /* switch (code) */
11861 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11862 of an array (or vector). */
11864 tree
11865 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11867 tree index_type = NULL_TREE;
11868 offset_int low_bound = 0;
11870 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11872 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11873 if (domain_type && TYPE_MIN_VALUE (domain_type))
11875 /* Static constructors for variably sized objects makes no sense. */
11876 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11877 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11878 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11882 if (index_type)
11883 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11884 TYPE_SIGN (index_type));
11886 offset_int index = low_bound - 1;
11887 if (index_type)
11888 index = wi::ext (index, TYPE_PRECISION (index_type),
11889 TYPE_SIGN (index_type));
11891 offset_int max_index;
11892 unsigned HOST_WIDE_INT cnt;
11893 tree cfield, cval;
11895 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11897 /* Array constructor might explicitly set index, or specify a range,
11898 or leave index NULL meaning that it is next index after previous
11899 one. */
11900 if (cfield)
11902 if (TREE_CODE (cfield) == INTEGER_CST)
11903 max_index = index = wi::to_offset (cfield);
11904 else
11906 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11907 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11908 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11911 else
11913 index += 1;
11914 if (index_type)
11915 index = wi::ext (index, TYPE_PRECISION (index_type),
11916 TYPE_SIGN (index_type));
11917 max_index = index;
11920 /* Do we have match? */
11921 if (wi::cmpu (access_index, index) >= 0
11922 && wi::cmpu (access_index, max_index) <= 0)
11923 return cval;
11925 return NULL_TREE;
11928 /* Perform constant folding and related simplification of EXPR.
11929 The related simplifications include x*1 => x, x*0 => 0, etc.,
11930 and application of the associative law.
11931 NOP_EXPR conversions may be removed freely (as long as we
11932 are careful not to change the type of the overall expression).
11933 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11934 but we can constant-fold them if they have constant operands. */
11936 #ifdef ENABLE_FOLD_CHECKING
11937 # define fold(x) fold_1 (x)
11938 static tree fold_1 (tree);
11939 static
11940 #endif
11941 tree
11942 fold (tree expr)
11944 const tree t = expr;
11945 enum tree_code code = TREE_CODE (t);
11946 enum tree_code_class kind = TREE_CODE_CLASS (code);
11947 tree tem;
11948 location_t loc = EXPR_LOCATION (expr);
11950 /* Return right away if a constant. */
11951 if (kind == tcc_constant)
11952 return t;
11954 /* CALL_EXPR-like objects with variable numbers of operands are
11955 treated specially. */
11956 if (kind == tcc_vl_exp)
11958 if (code == CALL_EXPR)
11960 tem = fold_call_expr (loc, expr, false);
11961 return tem ? tem : expr;
11963 return expr;
11966 if (IS_EXPR_CODE_CLASS (kind))
11968 tree type = TREE_TYPE (t);
11969 tree op0, op1, op2;
11971 switch (TREE_CODE_LENGTH (code))
11973 case 1:
11974 op0 = TREE_OPERAND (t, 0);
11975 tem = fold_unary_loc (loc, code, type, op0);
11976 return tem ? tem : expr;
11977 case 2:
11978 op0 = TREE_OPERAND (t, 0);
11979 op1 = TREE_OPERAND (t, 1);
11980 tem = fold_binary_loc (loc, code, type, op0, op1);
11981 return tem ? tem : expr;
11982 case 3:
11983 op0 = TREE_OPERAND (t, 0);
11984 op1 = TREE_OPERAND (t, 1);
11985 op2 = TREE_OPERAND (t, 2);
11986 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11987 return tem ? tem : expr;
11988 default:
11989 break;
11993 switch (code)
11995 case ARRAY_REF:
11997 tree op0 = TREE_OPERAND (t, 0);
11998 tree op1 = TREE_OPERAND (t, 1);
12000 if (TREE_CODE (op1) == INTEGER_CST
12001 && TREE_CODE (op0) == CONSTRUCTOR
12002 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12004 tree val = get_array_ctor_element_at_index (op0,
12005 wi::to_offset (op1));
12006 if (val)
12007 return val;
12010 return t;
12013 /* Return a VECTOR_CST if possible. */
12014 case CONSTRUCTOR:
12016 tree type = TREE_TYPE (t);
12017 if (TREE_CODE (type) != VECTOR_TYPE)
12018 return t;
12020 unsigned i;
12021 tree val;
12022 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12023 if (! CONSTANT_CLASS_P (val))
12024 return t;
12026 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12029 case CONST_DECL:
12030 return fold (DECL_INITIAL (t));
12032 default:
12033 return t;
12034 } /* switch (code) */
12037 #ifdef ENABLE_FOLD_CHECKING
12038 #undef fold
12040 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12041 hash_table<nofree_ptr_hash<const tree_node> > *);
12042 static void fold_check_failed (const_tree, const_tree);
12043 void print_fold_checksum (const_tree);
12045 /* When --enable-checking=fold, compute a digest of expr before
12046 and after actual fold call to see if fold did not accidentally
12047 change original expr. */
12049 tree
12050 fold (tree expr)
12052 tree ret;
12053 struct md5_ctx ctx;
12054 unsigned char checksum_before[16], checksum_after[16];
12055 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12057 md5_init_ctx (&ctx);
12058 fold_checksum_tree (expr, &ctx, &ht);
12059 md5_finish_ctx (&ctx, checksum_before);
12060 ht.empty ();
12062 ret = fold_1 (expr);
12064 md5_init_ctx (&ctx);
12065 fold_checksum_tree (expr, &ctx, &ht);
12066 md5_finish_ctx (&ctx, checksum_after);
12068 if (memcmp (checksum_before, checksum_after, 16))
12069 fold_check_failed (expr, ret);
12071 return ret;
12074 void
12075 print_fold_checksum (const_tree expr)
12077 struct md5_ctx ctx;
12078 unsigned char checksum[16], cnt;
12079 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12081 md5_init_ctx (&ctx);
12082 fold_checksum_tree (expr, &ctx, &ht);
12083 md5_finish_ctx (&ctx, checksum);
12084 for (cnt = 0; cnt < 16; ++cnt)
12085 fprintf (stderr, "%02x", checksum[cnt]);
12086 putc ('\n', stderr);
12089 static void
12090 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12092 internal_error ("fold check: original tree changed by fold");
12095 static void
12096 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12097 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12099 const tree_node **slot;
12100 enum tree_code code;
12101 union tree_node buf;
12102 int i, len;
12104 recursive_label:
12105 if (expr == NULL)
12106 return;
12107 slot = ht->find_slot (expr, INSERT);
12108 if (*slot != NULL)
12109 return;
12110 *slot = expr;
12111 code = TREE_CODE (expr);
12112 if (TREE_CODE_CLASS (code) == tcc_declaration
12113 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12115 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12116 memcpy ((char *) &buf, expr, tree_size (expr));
12117 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12118 buf.decl_with_vis.symtab_node = NULL;
12119 expr = (tree) &buf;
12121 else if (TREE_CODE_CLASS (code) == tcc_type
12122 && (TYPE_POINTER_TO (expr)
12123 || TYPE_REFERENCE_TO (expr)
12124 || TYPE_CACHED_VALUES_P (expr)
12125 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12126 || TYPE_NEXT_VARIANT (expr)
12127 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12129 /* Allow these fields to be modified. */
12130 tree tmp;
12131 memcpy ((char *) &buf, expr, tree_size (expr));
12132 expr = tmp = (tree) &buf;
12133 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12134 TYPE_POINTER_TO (tmp) = NULL;
12135 TYPE_REFERENCE_TO (tmp) = NULL;
12136 TYPE_NEXT_VARIANT (tmp) = NULL;
12137 TYPE_ALIAS_SET (tmp) = -1;
12138 if (TYPE_CACHED_VALUES_P (tmp))
12140 TYPE_CACHED_VALUES_P (tmp) = 0;
12141 TYPE_CACHED_VALUES (tmp) = NULL;
12144 md5_process_bytes (expr, tree_size (expr), ctx);
12145 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12146 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12147 if (TREE_CODE_CLASS (code) != tcc_type
12148 && TREE_CODE_CLASS (code) != tcc_declaration
12149 && code != TREE_LIST
12150 && code != SSA_NAME
12151 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12152 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12153 switch (TREE_CODE_CLASS (code))
12155 case tcc_constant:
12156 switch (code)
12158 case STRING_CST:
12159 md5_process_bytes (TREE_STRING_POINTER (expr),
12160 TREE_STRING_LENGTH (expr), ctx);
12161 break;
12162 case COMPLEX_CST:
12163 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12164 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12165 break;
12166 case VECTOR_CST:
12167 len = vector_cst_encoded_nelts (expr);
12168 for (i = 0; i < len; ++i)
12169 fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12170 break;
12171 default:
12172 break;
12174 break;
12175 case tcc_exceptional:
12176 switch (code)
12178 case TREE_LIST:
12179 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12180 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12181 expr = TREE_CHAIN (expr);
12182 goto recursive_label;
12183 break;
12184 case TREE_VEC:
12185 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12186 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12187 break;
12188 default:
12189 break;
12191 break;
12192 case tcc_expression:
12193 case tcc_reference:
12194 case tcc_comparison:
12195 case tcc_unary:
12196 case tcc_binary:
12197 case tcc_statement:
12198 case tcc_vl_exp:
12199 len = TREE_OPERAND_LENGTH (expr);
12200 for (i = 0; i < len; ++i)
12201 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12202 break;
12203 case tcc_declaration:
12204 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12205 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12206 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12208 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12209 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12210 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12211 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12212 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12215 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12217 if (TREE_CODE (expr) == FUNCTION_DECL)
12219 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12220 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12222 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12224 break;
12225 case tcc_type:
12226 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12227 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12228 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12229 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12230 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12231 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12232 if (INTEGRAL_TYPE_P (expr)
12233 || SCALAR_FLOAT_TYPE_P (expr))
12235 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12236 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12238 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12239 if (TREE_CODE (expr) == RECORD_TYPE
12240 || TREE_CODE (expr) == UNION_TYPE
12241 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12242 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12243 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12244 break;
12245 default:
12246 break;
12250 /* Helper function for outputting the checksum of a tree T. When
12251 debugging with gdb, you can "define mynext" to be "next" followed
12252 by "call debug_fold_checksum (op0)", then just trace down till the
12253 outputs differ. */
12255 DEBUG_FUNCTION void
12256 debug_fold_checksum (const_tree t)
12258 int i;
12259 unsigned char checksum[16];
12260 struct md5_ctx ctx;
12261 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12263 md5_init_ctx (&ctx);
12264 fold_checksum_tree (t, &ctx, &ht);
12265 md5_finish_ctx (&ctx, checksum);
12266 ht.empty ();
12268 for (i = 0; i < 16; i++)
12269 fprintf (stderr, "%d ", checksum[i]);
12271 fprintf (stderr, "\n");
12274 #endif
12276 /* Fold a unary tree expression with code CODE of type TYPE with an
12277 operand OP0. LOC is the location of the resulting expression.
12278 Return a folded expression if successful. Otherwise, return a tree
12279 expression with code CODE of type TYPE with an operand OP0. */
12281 tree
12282 fold_build1_loc (location_t loc,
12283 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12285 tree tem;
12286 #ifdef ENABLE_FOLD_CHECKING
12287 unsigned char checksum_before[16], checksum_after[16];
12288 struct md5_ctx ctx;
12289 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12291 md5_init_ctx (&ctx);
12292 fold_checksum_tree (op0, &ctx, &ht);
12293 md5_finish_ctx (&ctx, checksum_before);
12294 ht.empty ();
12295 #endif
12297 tem = fold_unary_loc (loc, code, type, op0);
12298 if (!tem)
12299 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12301 #ifdef ENABLE_FOLD_CHECKING
12302 md5_init_ctx (&ctx);
12303 fold_checksum_tree (op0, &ctx, &ht);
12304 md5_finish_ctx (&ctx, checksum_after);
12306 if (memcmp (checksum_before, checksum_after, 16))
12307 fold_check_failed (op0, tem);
12308 #endif
12309 return tem;
12312 /* Fold a binary tree expression with code CODE of type TYPE with
12313 operands OP0 and OP1. LOC is the location of the resulting
12314 expression. Return a folded expression if successful. Otherwise,
12315 return a tree expression with code CODE of type TYPE with operands
12316 OP0 and OP1. */
12318 tree
12319 fold_build2_loc (location_t loc,
12320 enum tree_code code, tree type, tree op0, tree op1
12321 MEM_STAT_DECL)
12323 tree tem;
12324 #ifdef ENABLE_FOLD_CHECKING
12325 unsigned char checksum_before_op0[16],
12326 checksum_before_op1[16],
12327 checksum_after_op0[16],
12328 checksum_after_op1[16];
12329 struct md5_ctx ctx;
12330 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12332 md5_init_ctx (&ctx);
12333 fold_checksum_tree (op0, &ctx, &ht);
12334 md5_finish_ctx (&ctx, checksum_before_op0);
12335 ht.empty ();
12337 md5_init_ctx (&ctx);
12338 fold_checksum_tree (op1, &ctx, &ht);
12339 md5_finish_ctx (&ctx, checksum_before_op1);
12340 ht.empty ();
12341 #endif
12343 tem = fold_binary_loc (loc, code, type, op0, op1);
12344 if (!tem)
12345 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12347 #ifdef ENABLE_FOLD_CHECKING
12348 md5_init_ctx (&ctx);
12349 fold_checksum_tree (op0, &ctx, &ht);
12350 md5_finish_ctx (&ctx, checksum_after_op0);
12351 ht.empty ();
12353 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12354 fold_check_failed (op0, tem);
12356 md5_init_ctx (&ctx);
12357 fold_checksum_tree (op1, &ctx, &ht);
12358 md5_finish_ctx (&ctx, checksum_after_op1);
12360 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12361 fold_check_failed (op1, tem);
12362 #endif
12363 return tem;
12366 /* Fold a ternary tree expression with code CODE of type TYPE with
12367 operands OP0, OP1, and OP2. Return a folded expression if
12368 successful. Otherwise, return a tree expression with code CODE of
12369 type TYPE with operands OP0, OP1, and OP2. */
12371 tree
12372 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12373 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12375 tree tem;
12376 #ifdef ENABLE_FOLD_CHECKING
12377 unsigned char checksum_before_op0[16],
12378 checksum_before_op1[16],
12379 checksum_before_op2[16],
12380 checksum_after_op0[16],
12381 checksum_after_op1[16],
12382 checksum_after_op2[16];
12383 struct md5_ctx ctx;
12384 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12386 md5_init_ctx (&ctx);
12387 fold_checksum_tree (op0, &ctx, &ht);
12388 md5_finish_ctx (&ctx, checksum_before_op0);
12389 ht.empty ();
12391 md5_init_ctx (&ctx);
12392 fold_checksum_tree (op1, &ctx, &ht);
12393 md5_finish_ctx (&ctx, checksum_before_op1);
12394 ht.empty ();
12396 md5_init_ctx (&ctx);
12397 fold_checksum_tree (op2, &ctx, &ht);
12398 md5_finish_ctx (&ctx, checksum_before_op2);
12399 ht.empty ();
12400 #endif
12402 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12403 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12404 if (!tem)
12405 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12407 #ifdef ENABLE_FOLD_CHECKING
12408 md5_init_ctx (&ctx);
12409 fold_checksum_tree (op0, &ctx, &ht);
12410 md5_finish_ctx (&ctx, checksum_after_op0);
12411 ht.empty ();
12413 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12414 fold_check_failed (op0, tem);
12416 md5_init_ctx (&ctx);
12417 fold_checksum_tree (op1, &ctx, &ht);
12418 md5_finish_ctx (&ctx, checksum_after_op1);
12419 ht.empty ();
12421 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12422 fold_check_failed (op1, tem);
12424 md5_init_ctx (&ctx);
12425 fold_checksum_tree (op2, &ctx, &ht);
12426 md5_finish_ctx (&ctx, checksum_after_op2);
12428 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12429 fold_check_failed (op2, tem);
12430 #endif
12431 return tem;
12434 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12435 arguments in ARGARRAY, and a null static chain.
12436 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12437 of type TYPE from the given operands as constructed by build_call_array. */
12439 tree
12440 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12441 int nargs, tree *argarray)
12443 tree tem;
12444 #ifdef ENABLE_FOLD_CHECKING
12445 unsigned char checksum_before_fn[16],
12446 checksum_before_arglist[16],
12447 checksum_after_fn[16],
12448 checksum_after_arglist[16];
12449 struct md5_ctx ctx;
12450 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12451 int i;
12453 md5_init_ctx (&ctx);
12454 fold_checksum_tree (fn, &ctx, &ht);
12455 md5_finish_ctx (&ctx, checksum_before_fn);
12456 ht.empty ();
12458 md5_init_ctx (&ctx);
12459 for (i = 0; i < nargs; i++)
12460 fold_checksum_tree (argarray[i], &ctx, &ht);
12461 md5_finish_ctx (&ctx, checksum_before_arglist);
12462 ht.empty ();
12463 #endif
12465 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12466 if (!tem)
12467 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12469 #ifdef ENABLE_FOLD_CHECKING
12470 md5_init_ctx (&ctx);
12471 fold_checksum_tree (fn, &ctx, &ht);
12472 md5_finish_ctx (&ctx, checksum_after_fn);
12473 ht.empty ();
12475 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12476 fold_check_failed (fn, tem);
12478 md5_init_ctx (&ctx);
12479 for (i = 0; i < nargs; i++)
12480 fold_checksum_tree (argarray[i], &ctx, &ht);
12481 md5_finish_ctx (&ctx, checksum_after_arglist);
12483 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12484 fold_check_failed (NULL_TREE, tem);
12485 #endif
12486 return tem;
12489 /* Perform constant folding and related simplification of initializer
12490 expression EXPR. These behave identically to "fold_buildN" but ignore
12491 potential run-time traps and exceptions that fold must preserve. */
12493 #define START_FOLD_INIT \
12494 int saved_signaling_nans = flag_signaling_nans;\
12495 int saved_trapping_math = flag_trapping_math;\
12496 int saved_rounding_math = flag_rounding_math;\
12497 int saved_trapv = flag_trapv;\
12498 int saved_folding_initializer = folding_initializer;\
12499 flag_signaling_nans = 0;\
12500 flag_trapping_math = 0;\
12501 flag_rounding_math = 0;\
12502 flag_trapv = 0;\
12503 folding_initializer = 1;
12505 #define END_FOLD_INIT \
12506 flag_signaling_nans = saved_signaling_nans;\
12507 flag_trapping_math = saved_trapping_math;\
12508 flag_rounding_math = saved_rounding_math;\
12509 flag_trapv = saved_trapv;\
12510 folding_initializer = saved_folding_initializer;
12512 tree
12513 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12514 tree type, tree op)
12516 tree result;
12517 START_FOLD_INIT;
12519 result = fold_build1_loc (loc, code, type, op);
12521 END_FOLD_INIT;
12522 return result;
12525 tree
12526 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12527 tree type, tree op0, tree op1)
12529 tree result;
12530 START_FOLD_INIT;
12532 result = fold_build2_loc (loc, code, type, op0, op1);
12534 END_FOLD_INIT;
12535 return result;
12538 tree
12539 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12540 int nargs, tree *argarray)
12542 tree result;
12543 START_FOLD_INIT;
12545 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12547 END_FOLD_INIT;
12548 return result;
12551 #undef START_FOLD_INIT
12552 #undef END_FOLD_INIT
12554 /* Determine if first argument is a multiple of second argument. Return 0 if
12555 it is not, or we cannot easily determined it to be.
12557 An example of the sort of thing we care about (at this point; this routine
12558 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12559 fold cases do now) is discovering that
12561 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12563 is a multiple of
12565 SAVE_EXPR (J * 8)
12567 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12569 This code also handles discovering that
12571 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12573 is a multiple of 8 so we don't have to worry about dealing with a
12574 possible remainder.
12576 Note that we *look* inside a SAVE_EXPR only to determine how it was
12577 calculated; it is not safe for fold to do much of anything else with the
12578 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12579 at run time. For example, the latter example above *cannot* be implemented
12580 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12581 evaluation time of the original SAVE_EXPR is not necessarily the same at
12582 the time the new expression is evaluated. The only optimization of this
12583 sort that would be valid is changing
12585 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12587 divided by 8 to
12589 SAVE_EXPR (I) * SAVE_EXPR (J)
12591 (where the same SAVE_EXPR (J) is used in the original and the
12592 transformed version). */
12595 multiple_of_p (tree type, const_tree top, const_tree bottom)
12597 gimple *stmt;
12598 tree t1, op1, op2;
12600 if (operand_equal_p (top, bottom, 0))
12601 return 1;
12603 if (TREE_CODE (type) != INTEGER_TYPE)
12604 return 0;
12606 switch (TREE_CODE (top))
12608 case BIT_AND_EXPR:
12609 /* Bitwise and provides a power of two multiple. If the mask is
12610 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12611 if (!integer_pow2p (bottom))
12612 return 0;
12613 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12614 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12616 case MULT_EXPR:
12617 if (TREE_CODE (bottom) == INTEGER_CST)
12619 op1 = TREE_OPERAND (top, 0);
12620 op2 = TREE_OPERAND (top, 1);
12621 if (TREE_CODE (op1) == INTEGER_CST)
12622 std::swap (op1, op2);
12623 if (TREE_CODE (op2) == INTEGER_CST)
12625 if (multiple_of_p (type, op2, bottom))
12626 return 1;
12627 /* Handle multiple_of_p ((x * 2 + 2) * 4, 8). */
12628 if (multiple_of_p (type, bottom, op2))
12630 widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
12631 wi::to_widest (op2));
12632 if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
12634 op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
12635 return multiple_of_p (type, op1, op2);
12638 return multiple_of_p (type, op1, bottom);
12641 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12642 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12644 case MINUS_EXPR:
12645 /* It is impossible to prove if op0 - op1 is multiple of bottom
12646 precisely, so be conservative here checking if both op0 and op1
12647 are multiple of bottom. Note we check the second operand first
12648 since it's usually simpler. */
12649 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12650 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12652 case PLUS_EXPR:
12653 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12654 as op0 - 3 if the expression has unsigned type. For example,
12655 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12656 op1 = TREE_OPERAND (top, 1);
12657 if (TYPE_UNSIGNED (type)
12658 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12659 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12660 return (multiple_of_p (type, op1, bottom)
12661 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12663 case LSHIFT_EXPR:
12664 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12666 op1 = TREE_OPERAND (top, 1);
12667 /* const_binop may not detect overflow correctly,
12668 so check for it explicitly here. */
12669 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12670 wi::to_wide (op1))
12671 && (t1 = fold_convert (type,
12672 const_binop (LSHIFT_EXPR, size_one_node,
12673 op1))) != 0
12674 && !TREE_OVERFLOW (t1))
12675 return multiple_of_p (type, t1, bottom);
12677 return 0;
12679 case NOP_EXPR:
12680 /* Can't handle conversions from non-integral or wider integral type. */
12681 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12682 || (TYPE_PRECISION (type)
12683 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12684 return 0;
12686 /* fall through */
12688 case SAVE_EXPR:
12689 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12691 case COND_EXPR:
12692 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12693 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12695 case INTEGER_CST:
12696 if (TREE_CODE (bottom) != INTEGER_CST
12697 || integer_zerop (bottom)
12698 || (TYPE_UNSIGNED (type)
12699 && (tree_int_cst_sgn (top) < 0
12700 || tree_int_cst_sgn (bottom) < 0)))
12701 return 0;
12702 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12703 SIGNED);
12705 case SSA_NAME:
12706 if (TREE_CODE (bottom) == INTEGER_CST
12707 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12708 && gimple_code (stmt) == GIMPLE_ASSIGN)
12710 enum tree_code code = gimple_assign_rhs_code (stmt);
12712 /* Check for special cases to see if top is defined as multiple
12713 of bottom:
12715 top = (X & ~(bottom - 1) ; bottom is power of 2
12719 Y = X % bottom
12720 top = X - Y. */
12721 if (code == BIT_AND_EXPR
12722 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12723 && TREE_CODE (op2) == INTEGER_CST
12724 && integer_pow2p (bottom)
12725 && wi::multiple_of_p (wi::to_widest (op2),
12726 wi::to_widest (bottom), UNSIGNED))
12727 return 1;
12729 op1 = gimple_assign_rhs1 (stmt);
12730 if (code == MINUS_EXPR
12731 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12732 && TREE_CODE (op2) == SSA_NAME
12733 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12734 && gimple_code (stmt) == GIMPLE_ASSIGN
12735 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12736 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12737 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12738 return 1;
12741 /* fall through */
12743 default:
12744 if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
12745 return multiple_p (wi::to_poly_widest (top),
12746 wi::to_poly_widest (bottom));
12748 return 0;
12752 #define tree_expr_nonnegative_warnv_p(X, Y) \
12753 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12755 #define RECURSE(X) \
12756 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12758 /* Return true if CODE or TYPE is known to be non-negative. */
12760 static bool
12761 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12763 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12764 && truth_value_p (code))
12765 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12766 have a signed:1 type (where the value is -1 and 0). */
12767 return true;
12768 return false;
12771 /* Return true if (CODE OP0) is known to be non-negative. If the return
12772 value is based on the assumption that signed overflow is undefined,
12773 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12774 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12776 bool
12777 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12778 bool *strict_overflow_p, int depth)
12780 if (TYPE_UNSIGNED (type))
12781 return true;
12783 switch (code)
12785 case ABS_EXPR:
12786 /* We can't return 1 if flag_wrapv is set because
12787 ABS_EXPR<INT_MIN> = INT_MIN. */
12788 if (!ANY_INTEGRAL_TYPE_P (type))
12789 return true;
12790 if (TYPE_OVERFLOW_UNDEFINED (type))
12792 *strict_overflow_p = true;
12793 return true;
12795 break;
12797 case NON_LVALUE_EXPR:
12798 case FLOAT_EXPR:
12799 case FIX_TRUNC_EXPR:
12800 return RECURSE (op0);
12802 CASE_CONVERT:
12804 tree inner_type = TREE_TYPE (op0);
12805 tree outer_type = type;
12807 if (TREE_CODE (outer_type) == REAL_TYPE)
12809 if (TREE_CODE (inner_type) == REAL_TYPE)
12810 return RECURSE (op0);
12811 if (INTEGRAL_TYPE_P (inner_type))
12813 if (TYPE_UNSIGNED (inner_type))
12814 return true;
12815 return RECURSE (op0);
12818 else if (INTEGRAL_TYPE_P (outer_type))
12820 if (TREE_CODE (inner_type) == REAL_TYPE)
12821 return RECURSE (op0);
12822 if (INTEGRAL_TYPE_P (inner_type))
12823 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12824 && TYPE_UNSIGNED (inner_type);
12827 break;
12829 default:
12830 return tree_simple_nonnegative_warnv_p (code, type);
12833 /* We don't know sign of `t', so be conservative and return false. */
12834 return false;
12837 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12838 value is based on the assumption that signed overflow is undefined,
12839 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12840 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12842 bool
12843 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12844 tree op1, bool *strict_overflow_p,
12845 int depth)
12847 if (TYPE_UNSIGNED (type))
12848 return true;
12850 switch (code)
12852 case POINTER_PLUS_EXPR:
12853 case PLUS_EXPR:
12854 if (FLOAT_TYPE_P (type))
12855 return RECURSE (op0) && RECURSE (op1);
12857 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12858 both unsigned and at least 2 bits shorter than the result. */
12859 if (TREE_CODE (type) == INTEGER_TYPE
12860 && TREE_CODE (op0) == NOP_EXPR
12861 && TREE_CODE (op1) == NOP_EXPR)
12863 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12864 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12865 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12866 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12868 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12869 TYPE_PRECISION (inner2)) + 1;
12870 return prec < TYPE_PRECISION (type);
12873 break;
12875 case MULT_EXPR:
12876 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12878 /* x * x is always non-negative for floating point x
12879 or without overflow. */
12880 if (operand_equal_p (op0, op1, 0)
12881 || (RECURSE (op0) && RECURSE (op1)))
12883 if (ANY_INTEGRAL_TYPE_P (type)
12884 && TYPE_OVERFLOW_UNDEFINED (type))
12885 *strict_overflow_p = true;
12886 return true;
12890 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12891 both unsigned and their total bits is shorter than the result. */
12892 if (TREE_CODE (type) == INTEGER_TYPE
12893 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12894 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12896 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12897 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12898 : TREE_TYPE (op0);
12899 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12900 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12901 : TREE_TYPE (op1);
12903 bool unsigned0 = TYPE_UNSIGNED (inner0);
12904 bool unsigned1 = TYPE_UNSIGNED (inner1);
12906 if (TREE_CODE (op0) == INTEGER_CST)
12907 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12909 if (TREE_CODE (op1) == INTEGER_CST)
12910 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12912 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12913 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12915 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12916 ? tree_int_cst_min_precision (op0, UNSIGNED)
12917 : TYPE_PRECISION (inner0);
12919 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12920 ? tree_int_cst_min_precision (op1, UNSIGNED)
12921 : TYPE_PRECISION (inner1);
12923 return precision0 + precision1 < TYPE_PRECISION (type);
12926 return false;
12928 case BIT_AND_EXPR:
12929 case MAX_EXPR:
12930 return RECURSE (op0) || RECURSE (op1);
12932 case BIT_IOR_EXPR:
12933 case BIT_XOR_EXPR:
12934 case MIN_EXPR:
12935 case RDIV_EXPR:
12936 case TRUNC_DIV_EXPR:
12937 case CEIL_DIV_EXPR:
12938 case FLOOR_DIV_EXPR:
12939 case ROUND_DIV_EXPR:
12940 return RECURSE (op0) && RECURSE (op1);
12942 case TRUNC_MOD_EXPR:
12943 return RECURSE (op0);
12945 case FLOOR_MOD_EXPR:
12946 return RECURSE (op1);
12948 case CEIL_MOD_EXPR:
12949 case ROUND_MOD_EXPR:
12950 default:
12951 return tree_simple_nonnegative_warnv_p (code, type);
12954 /* We don't know sign of `t', so be conservative and return false. */
12955 return false;
12958 /* Return true if T is known to be non-negative. If the return
12959 value is based on the assumption that signed overflow is undefined,
12960 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12961 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12963 bool
12964 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12966 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12967 return true;
12969 switch (TREE_CODE (t))
12971 case INTEGER_CST:
12972 return tree_int_cst_sgn (t) >= 0;
12974 case REAL_CST:
12975 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12977 case FIXED_CST:
12978 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12980 case COND_EXPR:
12981 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12983 case SSA_NAME:
12984 /* Limit the depth of recursion to avoid quadratic behavior.
12985 This is expected to catch almost all occurrences in practice.
12986 If this code misses important cases that unbounded recursion
12987 would not, passes that need this information could be revised
12988 to provide it through dataflow propagation. */
12989 return (!name_registered_for_update_p (t)
12990 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12991 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12992 strict_overflow_p, depth));
12994 default:
12995 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12999 /* Return true if T is known to be non-negative. If the return
13000 value is based on the assumption that signed overflow is undefined,
13001 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13002 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13004 bool
13005 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13006 bool *strict_overflow_p, int depth)
13008 switch (fn)
13010 CASE_CFN_ACOS:
13011 CASE_CFN_ACOSH:
13012 CASE_CFN_CABS:
13013 CASE_CFN_COSH:
13014 CASE_CFN_ERFC:
13015 CASE_CFN_EXP:
13016 CASE_CFN_EXP10:
13017 CASE_CFN_EXP2:
13018 CASE_CFN_FABS:
13019 CASE_CFN_FDIM:
13020 CASE_CFN_HYPOT:
13021 CASE_CFN_POW10:
13022 CASE_CFN_FFS:
13023 CASE_CFN_PARITY:
13024 CASE_CFN_POPCOUNT:
13025 CASE_CFN_CLZ:
13026 CASE_CFN_CLRSB:
13027 case CFN_BUILT_IN_BSWAP32:
13028 case CFN_BUILT_IN_BSWAP64:
13029 /* Always true. */
13030 return true;
13032 CASE_CFN_SQRT:
13033 CASE_CFN_SQRT_FN:
13034 /* sqrt(-0.0) is -0.0. */
13035 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13036 return true;
13037 return RECURSE (arg0);
13039 CASE_CFN_ASINH:
13040 CASE_CFN_ATAN:
13041 CASE_CFN_ATANH:
13042 CASE_CFN_CBRT:
13043 CASE_CFN_CEIL:
13044 CASE_CFN_CEIL_FN:
13045 CASE_CFN_ERF:
13046 CASE_CFN_EXPM1:
13047 CASE_CFN_FLOOR:
13048 CASE_CFN_FLOOR_FN:
13049 CASE_CFN_FMOD:
13050 CASE_CFN_FREXP:
13051 CASE_CFN_ICEIL:
13052 CASE_CFN_IFLOOR:
13053 CASE_CFN_IRINT:
13054 CASE_CFN_IROUND:
13055 CASE_CFN_LCEIL:
13056 CASE_CFN_LDEXP:
13057 CASE_CFN_LFLOOR:
13058 CASE_CFN_LLCEIL:
13059 CASE_CFN_LLFLOOR:
13060 CASE_CFN_LLRINT:
13061 CASE_CFN_LLROUND:
13062 CASE_CFN_LRINT:
13063 CASE_CFN_LROUND:
13064 CASE_CFN_MODF:
13065 CASE_CFN_NEARBYINT:
13066 CASE_CFN_NEARBYINT_FN:
13067 CASE_CFN_RINT:
13068 CASE_CFN_RINT_FN:
13069 CASE_CFN_ROUND:
13070 CASE_CFN_ROUND_FN:
13071 CASE_CFN_SCALB:
13072 CASE_CFN_SCALBLN:
13073 CASE_CFN_SCALBN:
13074 CASE_CFN_SIGNBIT:
13075 CASE_CFN_SIGNIFICAND:
13076 CASE_CFN_SINH:
13077 CASE_CFN_TANH:
13078 CASE_CFN_TRUNC:
13079 CASE_CFN_TRUNC_FN:
13080 /* True if the 1st argument is nonnegative. */
13081 return RECURSE (arg0);
13083 CASE_CFN_FMAX:
13084 CASE_CFN_FMAX_FN:
13085 /* True if the 1st OR 2nd arguments are nonnegative. */
13086 return RECURSE (arg0) || RECURSE (arg1);
13088 CASE_CFN_FMIN:
13089 CASE_CFN_FMIN_FN:
13090 /* True if the 1st AND 2nd arguments are nonnegative. */
13091 return RECURSE (arg0) && RECURSE (arg1);
13093 CASE_CFN_COPYSIGN:
13094 CASE_CFN_COPYSIGN_FN:
13095 /* True if the 2nd argument is nonnegative. */
13096 return RECURSE (arg1);
13098 CASE_CFN_POWI:
13099 /* True if the 1st argument is nonnegative or the second
13100 argument is an even integer. */
13101 if (TREE_CODE (arg1) == INTEGER_CST
13102 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13103 return true;
13104 return RECURSE (arg0);
13106 CASE_CFN_POW:
13107 /* True if the 1st argument is nonnegative or the second
13108 argument is an even integer valued real. */
13109 if (TREE_CODE (arg1) == REAL_CST)
13111 REAL_VALUE_TYPE c;
13112 HOST_WIDE_INT n;
13114 c = TREE_REAL_CST (arg1);
13115 n = real_to_integer (&c);
13116 if ((n & 1) == 0)
13118 REAL_VALUE_TYPE cint;
13119 real_from_integer (&cint, VOIDmode, n, SIGNED);
13120 if (real_identical (&c, &cint))
13121 return true;
13124 return RECURSE (arg0);
13126 default:
13127 break;
13129 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13132 /* Return true if T is known to be non-negative. If the return
13133 value is based on the assumption that signed overflow is undefined,
13134 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13135 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13137 static bool
13138 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13140 enum tree_code code = TREE_CODE (t);
13141 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13142 return true;
13144 switch (code)
13146 case TARGET_EXPR:
13148 tree temp = TARGET_EXPR_SLOT (t);
13149 t = TARGET_EXPR_INITIAL (t);
13151 /* If the initializer is non-void, then it's a normal expression
13152 that will be assigned to the slot. */
13153 if (!VOID_TYPE_P (t))
13154 return RECURSE (t);
13156 /* Otherwise, the initializer sets the slot in some way. One common
13157 way is an assignment statement at the end of the initializer. */
13158 while (1)
13160 if (TREE_CODE (t) == BIND_EXPR)
13161 t = expr_last (BIND_EXPR_BODY (t));
13162 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13163 || TREE_CODE (t) == TRY_CATCH_EXPR)
13164 t = expr_last (TREE_OPERAND (t, 0));
13165 else if (TREE_CODE (t) == STATEMENT_LIST)
13166 t = expr_last (t);
13167 else
13168 break;
13170 if (TREE_CODE (t) == MODIFY_EXPR
13171 && TREE_OPERAND (t, 0) == temp)
13172 return RECURSE (TREE_OPERAND (t, 1));
13174 return false;
13177 case CALL_EXPR:
13179 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13180 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13182 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13183 get_call_combined_fn (t),
13184 arg0,
13185 arg1,
13186 strict_overflow_p, depth);
13188 case COMPOUND_EXPR:
13189 case MODIFY_EXPR:
13190 return RECURSE (TREE_OPERAND (t, 1));
13192 case BIND_EXPR:
13193 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13195 case SAVE_EXPR:
13196 return RECURSE (TREE_OPERAND (t, 0));
13198 default:
13199 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13203 #undef RECURSE
13204 #undef tree_expr_nonnegative_warnv_p
13206 /* Return true if T is known to be non-negative. If the return
13207 value is based on the assumption that signed overflow is undefined,
13208 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13209 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13211 bool
13212 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13214 enum tree_code code;
13215 if (t == error_mark_node)
13216 return false;
13218 code = TREE_CODE (t);
13219 switch (TREE_CODE_CLASS (code))
13221 case tcc_binary:
13222 case tcc_comparison:
13223 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13224 TREE_TYPE (t),
13225 TREE_OPERAND (t, 0),
13226 TREE_OPERAND (t, 1),
13227 strict_overflow_p, depth);
13229 case tcc_unary:
13230 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13231 TREE_TYPE (t),
13232 TREE_OPERAND (t, 0),
13233 strict_overflow_p, depth);
13235 case tcc_constant:
13236 case tcc_declaration:
13237 case tcc_reference:
13238 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13240 default:
13241 break;
13244 switch (code)
13246 case TRUTH_AND_EXPR:
13247 case TRUTH_OR_EXPR:
13248 case TRUTH_XOR_EXPR:
13249 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13250 TREE_TYPE (t),
13251 TREE_OPERAND (t, 0),
13252 TREE_OPERAND (t, 1),
13253 strict_overflow_p, depth);
13254 case TRUTH_NOT_EXPR:
13255 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13256 TREE_TYPE (t),
13257 TREE_OPERAND (t, 0),
13258 strict_overflow_p, depth);
13260 case COND_EXPR:
13261 case CONSTRUCTOR:
13262 case OBJ_TYPE_REF:
13263 case ASSERT_EXPR:
13264 case ADDR_EXPR:
13265 case WITH_SIZE_EXPR:
13266 case SSA_NAME:
13267 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13269 default:
13270 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13274 /* Return true if `t' is known to be non-negative. Handle warnings
13275 about undefined signed overflow. */
13277 bool
13278 tree_expr_nonnegative_p (tree t)
13280 bool ret, strict_overflow_p;
13282 strict_overflow_p = false;
13283 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13284 if (strict_overflow_p)
13285 fold_overflow_warning (("assuming signed overflow does not occur when "
13286 "determining that expression is always "
13287 "non-negative"),
13288 WARN_STRICT_OVERFLOW_MISC);
13289 return ret;
13293 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13294 For floating point we further ensure that T is not denormal.
13295 Similar logic is present in nonzero_address in rtlanal.h.
13297 If the return value is based on the assumption that signed overflow
13298 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13299 change *STRICT_OVERFLOW_P. */
13301 bool
13302 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13303 bool *strict_overflow_p)
13305 switch (code)
13307 case ABS_EXPR:
13308 return tree_expr_nonzero_warnv_p (op0,
13309 strict_overflow_p);
13311 case NOP_EXPR:
13313 tree inner_type = TREE_TYPE (op0);
13314 tree outer_type = type;
13316 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13317 && tree_expr_nonzero_warnv_p (op0,
13318 strict_overflow_p));
13320 break;
13322 case NON_LVALUE_EXPR:
13323 return tree_expr_nonzero_warnv_p (op0,
13324 strict_overflow_p);
13326 default:
13327 break;
13330 return false;
13333 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13334 For floating point we further ensure that T is not denormal.
13335 Similar logic is present in nonzero_address in rtlanal.h.
13337 If the return value is based on the assumption that signed overflow
13338 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13339 change *STRICT_OVERFLOW_P. */
13341 bool
13342 tree_binary_nonzero_warnv_p (enum tree_code code,
13343 tree type,
13344 tree op0,
13345 tree op1, bool *strict_overflow_p)
13347 bool sub_strict_overflow_p;
13348 switch (code)
13350 case POINTER_PLUS_EXPR:
13351 case PLUS_EXPR:
13352 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13354 /* With the presence of negative values it is hard
13355 to say something. */
13356 sub_strict_overflow_p = false;
13357 if (!tree_expr_nonnegative_warnv_p (op0,
13358 &sub_strict_overflow_p)
13359 || !tree_expr_nonnegative_warnv_p (op1,
13360 &sub_strict_overflow_p))
13361 return false;
13362 /* One of operands must be positive and the other non-negative. */
13363 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13364 overflows, on a twos-complement machine the sum of two
13365 nonnegative numbers can never be zero. */
13366 return (tree_expr_nonzero_warnv_p (op0,
13367 strict_overflow_p)
13368 || tree_expr_nonzero_warnv_p (op1,
13369 strict_overflow_p));
13371 break;
13373 case MULT_EXPR:
13374 if (TYPE_OVERFLOW_UNDEFINED (type))
13376 if (tree_expr_nonzero_warnv_p (op0,
13377 strict_overflow_p)
13378 && tree_expr_nonzero_warnv_p (op1,
13379 strict_overflow_p))
13381 *strict_overflow_p = true;
13382 return true;
13385 break;
13387 case MIN_EXPR:
13388 sub_strict_overflow_p = false;
13389 if (tree_expr_nonzero_warnv_p (op0,
13390 &sub_strict_overflow_p)
13391 && tree_expr_nonzero_warnv_p (op1,
13392 &sub_strict_overflow_p))
13394 if (sub_strict_overflow_p)
13395 *strict_overflow_p = true;
13397 break;
13399 case MAX_EXPR:
13400 sub_strict_overflow_p = false;
13401 if (tree_expr_nonzero_warnv_p (op0,
13402 &sub_strict_overflow_p))
13404 if (sub_strict_overflow_p)
13405 *strict_overflow_p = true;
13407 /* When both operands are nonzero, then MAX must be too. */
13408 if (tree_expr_nonzero_warnv_p (op1,
13409 strict_overflow_p))
13410 return true;
13412 /* MAX where operand 0 is positive is positive. */
13413 return tree_expr_nonnegative_warnv_p (op0,
13414 strict_overflow_p);
13416 /* MAX where operand 1 is positive is positive. */
13417 else if (tree_expr_nonzero_warnv_p (op1,
13418 &sub_strict_overflow_p)
13419 && tree_expr_nonnegative_warnv_p (op1,
13420 &sub_strict_overflow_p))
13422 if (sub_strict_overflow_p)
13423 *strict_overflow_p = true;
13424 return true;
13426 break;
13428 case BIT_IOR_EXPR:
13429 return (tree_expr_nonzero_warnv_p (op1,
13430 strict_overflow_p)
13431 || tree_expr_nonzero_warnv_p (op0,
13432 strict_overflow_p));
13434 default:
13435 break;
13438 return false;
13441 /* Return true when T is an address and is known to be nonzero.
13442 For floating point we further ensure that T is not denormal.
13443 Similar logic is present in nonzero_address in rtlanal.h.
13445 If the return value is based on the assumption that signed overflow
13446 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13447 change *STRICT_OVERFLOW_P. */
13449 bool
13450 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13452 bool sub_strict_overflow_p;
13453 switch (TREE_CODE (t))
13455 case INTEGER_CST:
13456 return !integer_zerop (t);
13458 case ADDR_EXPR:
13460 tree base = TREE_OPERAND (t, 0);
13462 if (!DECL_P (base))
13463 base = get_base_address (base);
13465 if (base && TREE_CODE (base) == TARGET_EXPR)
13466 base = TARGET_EXPR_SLOT (base);
13468 if (!base)
13469 return false;
13471 /* For objects in symbol table check if we know they are non-zero.
13472 Don't do anything for variables and functions before symtab is built;
13473 it is quite possible that they will be declared weak later. */
13474 int nonzero_addr = maybe_nonzero_address (base);
13475 if (nonzero_addr >= 0)
13476 return nonzero_addr;
13478 /* Constants are never weak. */
13479 if (CONSTANT_CLASS_P (base))
13480 return true;
13482 return false;
13485 case COND_EXPR:
13486 sub_strict_overflow_p = false;
13487 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13488 &sub_strict_overflow_p)
13489 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13490 &sub_strict_overflow_p))
13492 if (sub_strict_overflow_p)
13493 *strict_overflow_p = true;
13494 return true;
13496 break;
13498 case SSA_NAME:
13499 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13500 break;
13501 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13503 default:
13504 break;
13506 return false;
13509 #define integer_valued_real_p(X) \
13510 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13512 #define RECURSE(X) \
13513 ((integer_valued_real_p) (X, depth + 1))
13515 /* Return true if the floating point result of (CODE OP0) has an
13516 integer value. We also allow +Inf, -Inf and NaN to be considered
13517 integer values. Return false for signaling NaN.
13519 DEPTH is the current nesting depth of the query. */
13521 bool
13522 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13524 switch (code)
13526 case FLOAT_EXPR:
13527 return true;
13529 case ABS_EXPR:
13530 return RECURSE (op0);
13532 CASE_CONVERT:
13534 tree type = TREE_TYPE (op0);
13535 if (TREE_CODE (type) == INTEGER_TYPE)
13536 return true;
13537 if (TREE_CODE (type) == REAL_TYPE)
13538 return RECURSE (op0);
13539 break;
13542 default:
13543 break;
13545 return false;
13548 /* Return true if the floating point result of (CODE OP0 OP1) has an
13549 integer value. We also allow +Inf, -Inf and NaN to be considered
13550 integer values. Return false for signaling NaN.
13552 DEPTH is the current nesting depth of the query. */
13554 bool
13555 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13557 switch (code)
13559 case PLUS_EXPR:
13560 case MINUS_EXPR:
13561 case MULT_EXPR:
13562 case MIN_EXPR:
13563 case MAX_EXPR:
13564 return RECURSE (op0) && RECURSE (op1);
13566 default:
13567 break;
13569 return false;
13572 /* Return true if the floating point result of calling FNDECL with arguments
13573 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13574 considered integer values. Return false for signaling NaN. If FNDECL
13575 takes fewer than 2 arguments, the remaining ARGn are null.
13577 DEPTH is the current nesting depth of the query. */
13579 bool
13580 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13582 switch (fn)
13584 CASE_CFN_CEIL:
13585 CASE_CFN_CEIL_FN:
13586 CASE_CFN_FLOOR:
13587 CASE_CFN_FLOOR_FN:
13588 CASE_CFN_NEARBYINT:
13589 CASE_CFN_NEARBYINT_FN:
13590 CASE_CFN_RINT:
13591 CASE_CFN_RINT_FN:
13592 CASE_CFN_ROUND:
13593 CASE_CFN_ROUND_FN:
13594 CASE_CFN_TRUNC:
13595 CASE_CFN_TRUNC_FN:
13596 return true;
13598 CASE_CFN_FMIN:
13599 CASE_CFN_FMIN_FN:
13600 CASE_CFN_FMAX:
13601 CASE_CFN_FMAX_FN:
13602 return RECURSE (arg0) && RECURSE (arg1);
13604 default:
13605 break;
13607 return false;
13610 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13611 has an integer value. We also allow +Inf, -Inf and NaN to be
13612 considered integer values. Return false for signaling NaN.
13614 DEPTH is the current nesting depth of the query. */
13616 bool
13617 integer_valued_real_single_p (tree t, int depth)
13619 switch (TREE_CODE (t))
13621 case REAL_CST:
13622 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13624 case COND_EXPR:
13625 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13627 case SSA_NAME:
13628 /* Limit the depth of recursion to avoid quadratic behavior.
13629 This is expected to catch almost all occurrences in practice.
13630 If this code misses important cases that unbounded recursion
13631 would not, passes that need this information could be revised
13632 to provide it through dataflow propagation. */
13633 return (!name_registered_for_update_p (t)
13634 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13635 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13636 depth));
13638 default:
13639 break;
13641 return false;
13644 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13645 has an integer value. We also allow +Inf, -Inf and NaN to be
13646 considered integer values. Return false for signaling NaN.
13648 DEPTH is the current nesting depth of the query. */
13650 static bool
13651 integer_valued_real_invalid_p (tree t, int depth)
13653 switch (TREE_CODE (t))
13655 case COMPOUND_EXPR:
13656 case MODIFY_EXPR:
13657 case BIND_EXPR:
13658 return RECURSE (TREE_OPERAND (t, 1));
13660 case SAVE_EXPR:
13661 return RECURSE (TREE_OPERAND (t, 0));
13663 default:
13664 break;
13666 return false;
13669 #undef RECURSE
13670 #undef integer_valued_real_p
13672 /* Return true if the floating point expression T has an integer value.
13673 We also allow +Inf, -Inf and NaN to be considered integer values.
13674 Return false for signaling NaN.
13676 DEPTH is the current nesting depth of the query. */
13678 bool
13679 integer_valued_real_p (tree t, int depth)
13681 if (t == error_mark_node)
13682 return false;
13684 tree_code code = TREE_CODE (t);
13685 switch (TREE_CODE_CLASS (code))
13687 case tcc_binary:
13688 case tcc_comparison:
13689 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13690 TREE_OPERAND (t, 1), depth);
13692 case tcc_unary:
13693 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13695 case tcc_constant:
13696 case tcc_declaration:
13697 case tcc_reference:
13698 return integer_valued_real_single_p (t, depth);
13700 default:
13701 break;
13704 switch (code)
13706 case COND_EXPR:
13707 case SSA_NAME:
13708 return integer_valued_real_single_p (t, depth);
13710 case CALL_EXPR:
13712 tree arg0 = (call_expr_nargs (t) > 0
13713 ? CALL_EXPR_ARG (t, 0)
13714 : NULL_TREE);
13715 tree arg1 = (call_expr_nargs (t) > 1
13716 ? CALL_EXPR_ARG (t, 1)
13717 : NULL_TREE);
13718 return integer_valued_real_call_p (get_call_combined_fn (t),
13719 arg0, arg1, depth);
13722 default:
13723 return integer_valued_real_invalid_p (t, depth);
13727 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13728 attempt to fold the expression to a constant without modifying TYPE,
13729 OP0 or OP1.
13731 If the expression could be simplified to a constant, then return
13732 the constant. If the expression would not be simplified to a
13733 constant, then return NULL_TREE. */
13735 tree
13736 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13738 tree tem = fold_binary (code, type, op0, op1);
13739 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13742 /* Given the components of a unary expression CODE, TYPE and OP0,
13743 attempt to fold the expression to a constant without modifying
13744 TYPE or OP0.
13746 If the expression could be simplified to a constant, then return
13747 the constant. If the expression would not be simplified to a
13748 constant, then return NULL_TREE. */
13750 tree
13751 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13753 tree tem = fold_unary (code, type, op0);
13754 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13757 /* If EXP represents referencing an element in a constant string
13758 (either via pointer arithmetic or array indexing), return the
13759 tree representing the value accessed, otherwise return NULL. */
13761 tree
13762 fold_read_from_constant_string (tree exp)
13764 if ((TREE_CODE (exp) == INDIRECT_REF
13765 || TREE_CODE (exp) == ARRAY_REF)
13766 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13768 tree exp1 = TREE_OPERAND (exp, 0);
13769 tree index;
13770 tree string;
13771 location_t loc = EXPR_LOCATION (exp);
13773 if (TREE_CODE (exp) == INDIRECT_REF)
13774 string = string_constant (exp1, &index, NULL, NULL);
13775 else
13777 tree low_bound = array_ref_low_bound (exp);
13778 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13780 /* Optimize the special-case of a zero lower bound.
13782 We convert the low_bound to sizetype to avoid some problems
13783 with constant folding. (E.g. suppose the lower bound is 1,
13784 and its mode is QI. Without the conversion,l (ARRAY
13785 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13786 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13787 if (! integer_zerop (low_bound))
13788 index = size_diffop_loc (loc, index,
13789 fold_convert_loc (loc, sizetype, low_bound));
13791 string = exp1;
13794 scalar_int_mode char_mode;
13795 if (string
13796 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13797 && TREE_CODE (string) == STRING_CST
13798 && TREE_CODE (index) == INTEGER_CST
13799 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13800 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13801 &char_mode)
13802 && GET_MODE_SIZE (char_mode) == 1)
13803 return build_int_cst_type (TREE_TYPE (exp),
13804 (TREE_STRING_POINTER (string)
13805 [TREE_INT_CST_LOW (index)]));
13807 return NULL;
13810 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13811 an integer constant, real, or fixed-point constant.
13813 TYPE is the type of the result. */
13815 static tree
13816 fold_negate_const (tree arg0, tree type)
13818 tree t = NULL_TREE;
13820 switch (TREE_CODE (arg0))
13822 case REAL_CST:
13823 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13824 break;
13826 case FIXED_CST:
13828 FIXED_VALUE_TYPE f;
13829 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13830 &(TREE_FIXED_CST (arg0)), NULL,
13831 TYPE_SATURATING (type));
13832 t = build_fixed (type, f);
13833 /* Propagate overflow flags. */
13834 if (overflow_p | TREE_OVERFLOW (arg0))
13835 TREE_OVERFLOW (t) = 1;
13836 break;
13839 default:
13840 if (poly_int_tree_p (arg0))
13842 wi::overflow_type overflow;
13843 poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
13844 t = force_fit_type (type, res, 1,
13845 (overflow && ! TYPE_UNSIGNED (type))
13846 || TREE_OVERFLOW (arg0));
13847 break;
13850 gcc_unreachable ();
13853 return t;
13856 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13857 an integer constant or real constant.
13859 TYPE is the type of the result. */
13861 tree
13862 fold_abs_const (tree arg0, tree type)
13864 tree t = NULL_TREE;
13866 switch (TREE_CODE (arg0))
13868 case INTEGER_CST:
13870 /* If the value is unsigned or non-negative, then the absolute value
13871 is the same as the ordinary value. */
13872 wide_int val = wi::to_wide (arg0);
13873 wi::overflow_type overflow = wi::OVF_NONE;
13874 if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
13877 /* If the value is negative, then the absolute value is
13878 its negation. */
13879 else
13880 val = wi::neg (val, &overflow);
13882 /* Force to the destination type, set TREE_OVERFLOW for signed
13883 TYPE only. */
13884 t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
13886 break;
13888 case REAL_CST:
13889 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13890 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13891 else
13892 t = arg0;
13893 break;
13895 default:
13896 gcc_unreachable ();
13899 return t;
13902 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13903 constant. TYPE is the type of the result. */
13905 static tree
13906 fold_not_const (const_tree arg0, tree type)
13908 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13910 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13913 /* Given CODE, a relational operator, the target type, TYPE and two
13914 constant operands OP0 and OP1, return the result of the
13915 relational operation. If the result is not a compile time
13916 constant, then return NULL_TREE. */
13918 static tree
13919 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13921 int result, invert;
13923 /* From here on, the only cases we handle are when the result is
13924 known to be a constant. */
13926 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13928 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13929 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13931 /* Handle the cases where either operand is a NaN. */
13932 if (real_isnan (c0) || real_isnan (c1))
13934 switch (code)
13936 case EQ_EXPR:
13937 case ORDERED_EXPR:
13938 result = 0;
13939 break;
13941 case NE_EXPR:
13942 case UNORDERED_EXPR:
13943 case UNLT_EXPR:
13944 case UNLE_EXPR:
13945 case UNGT_EXPR:
13946 case UNGE_EXPR:
13947 case UNEQ_EXPR:
13948 result = 1;
13949 break;
13951 case LT_EXPR:
13952 case LE_EXPR:
13953 case GT_EXPR:
13954 case GE_EXPR:
13955 case LTGT_EXPR:
13956 if (flag_trapping_math)
13957 return NULL_TREE;
13958 result = 0;
13959 break;
13961 default:
13962 gcc_unreachable ();
13965 return constant_boolean_node (result, type);
13968 return constant_boolean_node (real_compare (code, c0, c1), type);
13971 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13973 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13974 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13975 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13978 /* Handle equality/inequality of complex constants. */
13979 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13981 tree rcond = fold_relational_const (code, type,
13982 TREE_REALPART (op0),
13983 TREE_REALPART (op1));
13984 tree icond = fold_relational_const (code, type,
13985 TREE_IMAGPART (op0),
13986 TREE_IMAGPART (op1));
13987 if (code == EQ_EXPR)
13988 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13989 else if (code == NE_EXPR)
13990 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13991 else
13992 return NULL_TREE;
13995 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13997 if (!VECTOR_TYPE_P (type))
13999 /* Have vector comparison with scalar boolean result. */
14000 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14001 && known_eq (VECTOR_CST_NELTS (op0),
14002 VECTOR_CST_NELTS (op1)));
14003 unsigned HOST_WIDE_INT nunits;
14004 if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14005 return NULL_TREE;
14006 for (unsigned i = 0; i < nunits; i++)
14008 tree elem0 = VECTOR_CST_ELT (op0, i);
14009 tree elem1 = VECTOR_CST_ELT (op1, i);
14010 tree tmp = fold_relational_const (code, type, elem0, elem1);
14011 if (tmp == NULL_TREE)
14012 return NULL_TREE;
14013 if (integer_zerop (tmp))
14014 return constant_boolean_node (false, type);
14016 return constant_boolean_node (true, type);
14018 tree_vector_builder elts;
14019 if (!elts.new_binary_operation (type, op0, op1, false))
14020 return NULL_TREE;
14021 unsigned int count = elts.encoded_nelts ();
14022 for (unsigned i = 0; i < count; i++)
14024 tree elem_type = TREE_TYPE (type);
14025 tree elem0 = VECTOR_CST_ELT (op0, i);
14026 tree elem1 = VECTOR_CST_ELT (op1, i);
14028 tree tem = fold_relational_const (code, elem_type,
14029 elem0, elem1);
14031 if (tem == NULL_TREE)
14032 return NULL_TREE;
14034 elts.quick_push (build_int_cst (elem_type,
14035 integer_zerop (tem) ? 0 : -1));
14038 return elts.build ();
14041 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14043 To compute GT, swap the arguments and do LT.
14044 To compute GE, do LT and invert the result.
14045 To compute LE, swap the arguments, do LT and invert the result.
14046 To compute NE, do EQ and invert the result.
14048 Therefore, the code below must handle only EQ and LT. */
14050 if (code == LE_EXPR || code == GT_EXPR)
14052 std::swap (op0, op1);
14053 code = swap_tree_comparison (code);
14056 /* Note that it is safe to invert for real values here because we
14057 have already handled the one case that it matters. */
14059 invert = 0;
14060 if (code == NE_EXPR || code == GE_EXPR)
14062 invert = 1;
14063 code = invert_tree_comparison (code, false);
14066 /* Compute a result for LT or EQ if args permit;
14067 Otherwise return T. */
14068 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14070 if (code == EQ_EXPR)
14071 result = tree_int_cst_equal (op0, op1);
14072 else
14073 result = tree_int_cst_lt (op0, op1);
14075 else
14076 return NULL_TREE;
14078 if (invert)
14079 result ^= 1;
14080 return constant_boolean_node (result, type);
14083 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14084 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14085 itself. */
14087 tree
14088 fold_build_cleanup_point_expr (tree type, tree expr)
14090 /* If the expression does not have side effects then we don't have to wrap
14091 it with a cleanup point expression. */
14092 if (!TREE_SIDE_EFFECTS (expr))
14093 return expr;
14095 /* If the expression is a return, check to see if the expression inside the
14096 return has no side effects or the right hand side of the modify expression
14097 inside the return. If either don't have side effects set we don't need to
14098 wrap the expression in a cleanup point expression. Note we don't check the
14099 left hand side of the modify because it should always be a return decl. */
14100 if (TREE_CODE (expr) == RETURN_EXPR)
14102 tree op = TREE_OPERAND (expr, 0);
14103 if (!op || !TREE_SIDE_EFFECTS (op))
14104 return expr;
14105 op = TREE_OPERAND (op, 1);
14106 if (!TREE_SIDE_EFFECTS (op))
14107 return expr;
14110 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14113 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14114 of an indirection through OP0, or NULL_TREE if no simplification is
14115 possible. */
14117 tree
14118 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14120 tree sub = op0;
14121 tree subtype;
14122 poly_uint64 const_op01;
14124 STRIP_NOPS (sub);
14125 subtype = TREE_TYPE (sub);
14126 if (!POINTER_TYPE_P (subtype)
14127 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14128 return NULL_TREE;
14130 if (TREE_CODE (sub) == ADDR_EXPR)
14132 tree op = TREE_OPERAND (sub, 0);
14133 tree optype = TREE_TYPE (op);
14135 /* *&CONST_DECL -> to the value of the const decl. */
14136 if (TREE_CODE (op) == CONST_DECL)
14137 return DECL_INITIAL (op);
14138 /* *&p => p; make sure to handle *&"str"[cst] here. */
14139 if (type == optype)
14141 tree fop = fold_read_from_constant_string (op);
14142 if (fop)
14143 return fop;
14144 else
14145 return op;
14147 /* *(foo *)&fooarray => fooarray[0] */
14148 else if (TREE_CODE (optype) == ARRAY_TYPE
14149 && type == TREE_TYPE (optype)
14150 && (!in_gimple_form
14151 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14153 tree type_domain = TYPE_DOMAIN (optype);
14154 tree min_val = size_zero_node;
14155 if (type_domain && TYPE_MIN_VALUE (type_domain))
14156 min_val = TYPE_MIN_VALUE (type_domain);
14157 if (in_gimple_form
14158 && TREE_CODE (min_val) != INTEGER_CST)
14159 return NULL_TREE;
14160 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14161 NULL_TREE, NULL_TREE);
14163 /* *(foo *)&complexfoo => __real__ complexfoo */
14164 else if (TREE_CODE (optype) == COMPLEX_TYPE
14165 && type == TREE_TYPE (optype))
14166 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14167 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14168 else if (VECTOR_TYPE_P (optype)
14169 && type == TREE_TYPE (optype))
14171 tree part_width = TYPE_SIZE (type);
14172 tree index = bitsize_int (0);
14173 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14174 index);
14178 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14179 && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14181 tree op00 = TREE_OPERAND (sub, 0);
14182 tree op01 = TREE_OPERAND (sub, 1);
14184 STRIP_NOPS (op00);
14185 if (TREE_CODE (op00) == ADDR_EXPR)
14187 tree op00type;
14188 op00 = TREE_OPERAND (op00, 0);
14189 op00type = TREE_TYPE (op00);
14191 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14192 if (VECTOR_TYPE_P (op00type)
14193 && type == TREE_TYPE (op00type)
14194 /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14195 but we want to treat offsets with MSB set as negative.
14196 For the code below negative offsets are invalid and
14197 TYPE_SIZE of the element is something unsigned, so
14198 check whether op01 fits into poly_int64, which implies
14199 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14200 then just use poly_uint64 because we want to treat the
14201 value as unsigned. */
14202 && tree_fits_poly_int64_p (op01))
14204 tree part_width = TYPE_SIZE (type);
14205 poly_uint64 max_offset
14206 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14207 * TYPE_VECTOR_SUBPARTS (op00type));
14208 if (known_lt (const_op01, max_offset))
14210 tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14211 return fold_build3_loc (loc,
14212 BIT_FIELD_REF, type, op00,
14213 part_width, index);
14216 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14217 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14218 && type == TREE_TYPE (op00type))
14220 if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14221 const_op01))
14222 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14224 /* ((foo *)&fooarray)[1] => fooarray[1] */
14225 else if (TREE_CODE (op00type) == ARRAY_TYPE
14226 && type == TREE_TYPE (op00type))
14228 tree type_domain = TYPE_DOMAIN (op00type);
14229 tree min_val = size_zero_node;
14230 if (type_domain && TYPE_MIN_VALUE (type_domain))
14231 min_val = TYPE_MIN_VALUE (type_domain);
14232 poly_uint64 type_size, index;
14233 if (poly_int_tree_p (min_val)
14234 && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
14235 && multiple_p (const_op01, type_size, &index))
14237 poly_offset_int off = index + wi::to_poly_offset (min_val);
14238 op01 = wide_int_to_tree (sizetype, off);
14239 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14240 NULL_TREE, NULL_TREE);
14246 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14247 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14248 && type == TREE_TYPE (TREE_TYPE (subtype))
14249 && (!in_gimple_form
14250 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14252 tree type_domain;
14253 tree min_val = size_zero_node;
14254 sub = build_fold_indirect_ref_loc (loc, sub);
14255 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14256 if (type_domain && TYPE_MIN_VALUE (type_domain))
14257 min_val = TYPE_MIN_VALUE (type_domain);
14258 if (in_gimple_form
14259 && TREE_CODE (min_val) != INTEGER_CST)
14260 return NULL_TREE;
14261 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14262 NULL_TREE);
14265 return NULL_TREE;
14268 /* Builds an expression for an indirection through T, simplifying some
14269 cases. */
14271 tree
14272 build_fold_indirect_ref_loc (location_t loc, tree t)
14274 tree type = TREE_TYPE (TREE_TYPE (t));
14275 tree sub = fold_indirect_ref_1 (loc, type, t);
14277 if (sub)
14278 return sub;
14280 return build1_loc (loc, INDIRECT_REF, type, t);
14283 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14285 tree
14286 fold_indirect_ref_loc (location_t loc, tree t)
14288 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14290 if (sub)
14291 return sub;
14292 else
14293 return t;
14296 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14297 whose result is ignored. The type of the returned tree need not be
14298 the same as the original expression. */
14300 tree
14301 fold_ignored_result (tree t)
14303 if (!TREE_SIDE_EFFECTS (t))
14304 return integer_zero_node;
14306 for (;;)
14307 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14309 case tcc_unary:
14310 t = TREE_OPERAND (t, 0);
14311 break;
14313 case tcc_binary:
14314 case tcc_comparison:
14315 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14316 t = TREE_OPERAND (t, 0);
14317 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14318 t = TREE_OPERAND (t, 1);
14319 else
14320 return t;
14321 break;
14323 case tcc_expression:
14324 switch (TREE_CODE (t))
14326 case COMPOUND_EXPR:
14327 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14328 return t;
14329 t = TREE_OPERAND (t, 0);
14330 break;
14332 case COND_EXPR:
14333 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14334 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14335 return t;
14336 t = TREE_OPERAND (t, 0);
14337 break;
14339 default:
14340 return t;
14342 break;
14344 default:
14345 return t;
14349 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14351 tree
14352 round_up_loc (location_t loc, tree value, unsigned int divisor)
14354 tree div = NULL_TREE;
14356 if (divisor == 1)
14357 return value;
14359 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14360 have to do anything. Only do this when we are not given a const,
14361 because in that case, this check is more expensive than just
14362 doing it. */
14363 if (TREE_CODE (value) != INTEGER_CST)
14365 div = build_int_cst (TREE_TYPE (value), divisor);
14367 if (multiple_of_p (TREE_TYPE (value), value, div))
14368 return value;
14371 /* If divisor is a power of two, simplify this to bit manipulation. */
14372 if (pow2_or_zerop (divisor))
14374 if (TREE_CODE (value) == INTEGER_CST)
14376 wide_int val = wi::to_wide (value);
14377 bool overflow_p;
14379 if ((val & (divisor - 1)) == 0)
14380 return value;
14382 overflow_p = TREE_OVERFLOW (value);
14383 val += divisor - 1;
14384 val &= (int) -divisor;
14385 if (val == 0)
14386 overflow_p = true;
14388 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14390 else
14392 tree t;
14394 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14395 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14396 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14397 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14400 else
14402 if (!div)
14403 div = build_int_cst (TREE_TYPE (value), divisor);
14404 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14405 value = size_binop_loc (loc, MULT_EXPR, value, div);
14408 return value;
14411 /* Likewise, but round down. */
14413 tree
14414 round_down_loc (location_t loc, tree value, int divisor)
14416 tree div = NULL_TREE;
14418 gcc_assert (divisor > 0);
14419 if (divisor == 1)
14420 return value;
14422 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14423 have to do anything. Only do this when we are not given a const,
14424 because in that case, this check is more expensive than just
14425 doing it. */
14426 if (TREE_CODE (value) != INTEGER_CST)
14428 div = build_int_cst (TREE_TYPE (value), divisor);
14430 if (multiple_of_p (TREE_TYPE (value), value, div))
14431 return value;
14434 /* If divisor is a power of two, simplify this to bit manipulation. */
14435 if (pow2_or_zerop (divisor))
14437 tree t;
14439 t = build_int_cst (TREE_TYPE (value), -divisor);
14440 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14442 else
14444 if (!div)
14445 div = build_int_cst (TREE_TYPE (value), divisor);
14446 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14447 value = size_binop_loc (loc, MULT_EXPR, value, div);
14450 return value;
14453 /* Returns the pointer to the base of the object addressed by EXP and
14454 extracts the information about the offset of the access, storing it
14455 to PBITPOS and POFFSET. */
14457 static tree
14458 split_address_to_core_and_offset (tree exp,
14459 poly_int64_pod *pbitpos, tree *poffset)
14461 tree core;
14462 machine_mode mode;
14463 int unsignedp, reversep, volatilep;
14464 poly_int64 bitsize;
14465 location_t loc = EXPR_LOCATION (exp);
14467 if (TREE_CODE (exp) == ADDR_EXPR)
14469 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14470 poffset, &mode, &unsignedp, &reversep,
14471 &volatilep);
14472 core = build_fold_addr_expr_loc (loc, core);
14474 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14476 core = TREE_OPERAND (exp, 0);
14477 STRIP_NOPS (core);
14478 *pbitpos = 0;
14479 *poffset = TREE_OPERAND (exp, 1);
14480 if (poly_int_tree_p (*poffset))
14482 poly_offset_int tem
14483 = wi::sext (wi::to_poly_offset (*poffset),
14484 TYPE_PRECISION (TREE_TYPE (*poffset)));
14485 tem <<= LOG2_BITS_PER_UNIT;
14486 if (tem.to_shwi (pbitpos))
14487 *poffset = NULL_TREE;
14490 else
14492 core = exp;
14493 *pbitpos = 0;
14494 *poffset = NULL_TREE;
14497 return core;
14500 /* Returns true if addresses of E1 and E2 differ by a constant, false
14501 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14503 bool
14504 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14506 tree core1, core2;
14507 poly_int64 bitpos1, bitpos2;
14508 tree toffset1, toffset2, tdiff, type;
14510 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14511 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14513 poly_int64 bytepos1, bytepos2;
14514 if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14515 || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14516 || !operand_equal_p (core1, core2, 0))
14517 return false;
14519 if (toffset1 && toffset2)
14521 type = TREE_TYPE (toffset1);
14522 if (type != TREE_TYPE (toffset2))
14523 toffset2 = fold_convert (type, toffset2);
14525 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14526 if (!cst_and_fits_in_hwi (tdiff))
14527 return false;
14529 *diff = int_cst_value (tdiff);
14531 else if (toffset1 || toffset2)
14533 /* If only one of the offsets is non-constant, the difference cannot
14534 be a constant. */
14535 return false;
14537 else
14538 *diff = 0;
14540 *diff += bytepos1 - bytepos2;
14541 return true;
14544 /* Return OFF converted to a pointer offset type suitable as offset for
14545 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14546 tree
14547 convert_to_ptrofftype_loc (location_t loc, tree off)
14549 return fold_convert_loc (loc, sizetype, off);
14552 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14553 tree
14554 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14556 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14557 ptr, convert_to_ptrofftype_loc (loc, off));
14560 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14561 tree
14562 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14564 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14565 ptr, size_int (off));
14568 /* Return a pointer P to a NUL-terminated string representing the sequence
14569 of constant characters referred to by SRC (or a subsequence of such
14570 characters within it if SRC is a reference to a string plus some
14571 constant offset). If STRLEN is non-null, store the number of bytes
14572 in the string constant including the terminating NUL char. *STRLEN is
14573 typically strlen(P) + 1 in the absence of embedded NUL characters. */
14575 const char *
14576 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
14578 tree offset_node;
14579 tree mem_size;
14581 if (strlen)
14582 *strlen = 0;
14584 src = string_constant (src, &offset_node, &mem_size, NULL);
14585 if (src == 0)
14586 return NULL;
14588 unsigned HOST_WIDE_INT offset = 0;
14589 if (offset_node != NULL_TREE)
14591 if (!tree_fits_uhwi_p (offset_node))
14592 return NULL;
14593 else
14594 offset = tree_to_uhwi (offset_node);
14597 if (!tree_fits_uhwi_p (mem_size))
14598 return NULL;
14600 /* STRING_LENGTH is the size of the string literal, including any
14601 embedded NULs. STRING_SIZE is the size of the array the string
14602 literal is stored in. */
14603 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14604 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
14606 /* Ideally this would turn into a gcc_checking_assert over time. */
14607 if (string_length > string_size)
14608 string_length = string_size;
14610 const char *string = TREE_STRING_POINTER (src);
14612 /* Ideally this would turn into a gcc_checking_assert over time. */
14613 if (string_length > string_size)
14614 string_length = string_size;
14616 if (string_length == 0
14617 || offset >= string_size)
14618 return NULL;
14620 if (strlen)
14622 /* Compute and store the length of the substring at OFFSET.
14623 All offsets past the initial length refer to null strings. */
14624 if (offset < string_length)
14625 *strlen = string_length - offset;
14626 else
14627 *strlen = 1;
14629 else
14631 tree eltype = TREE_TYPE (TREE_TYPE (src));
14632 /* Support only properly NUL-terminated single byte strings. */
14633 if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
14634 return NULL;
14635 if (string[string_length - 1] != '\0')
14636 return NULL;
14639 return offset < string_length ? string + offset : "";
14642 /* Given a tree T, compute which bits in T may be nonzero. */
14644 wide_int
14645 tree_nonzero_bits (const_tree t)
14647 switch (TREE_CODE (t))
14649 case INTEGER_CST:
14650 return wi::to_wide (t);
14651 case SSA_NAME:
14652 return get_nonzero_bits (t);
14653 case NON_LVALUE_EXPR:
14654 case SAVE_EXPR:
14655 return tree_nonzero_bits (TREE_OPERAND (t, 0));
14656 case BIT_AND_EXPR:
14657 return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14658 tree_nonzero_bits (TREE_OPERAND (t, 1)));
14659 case BIT_IOR_EXPR:
14660 case BIT_XOR_EXPR:
14661 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14662 tree_nonzero_bits (TREE_OPERAND (t, 1)));
14663 case COND_EXPR:
14664 return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
14665 tree_nonzero_bits (TREE_OPERAND (t, 2)));
14666 CASE_CONVERT:
14667 return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
14668 TYPE_PRECISION (TREE_TYPE (t)),
14669 TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
14670 case PLUS_EXPR:
14671 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
14673 wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
14674 wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
14675 if (wi::bit_and (nzbits1, nzbits2) == 0)
14676 return wi::bit_or (nzbits1, nzbits2);
14678 break;
14679 case LSHIFT_EXPR:
14680 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
14682 tree type = TREE_TYPE (t);
14683 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
14684 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
14685 TYPE_PRECISION (type));
14686 return wi::neg_p (arg1)
14687 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
14688 : wi::lshift (nzbits, arg1);
14690 break;
14691 case RSHIFT_EXPR:
14692 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
14694 tree type = TREE_TYPE (t);
14695 wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
14696 wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
14697 TYPE_PRECISION (type));
14698 return wi::neg_p (arg1)
14699 ? wi::lshift (nzbits, -arg1)
14700 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
14702 break;
14703 default:
14704 break;
14707 return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
14710 #if CHECKING_P
14712 namespace selftest {
14714 /* Helper functions for writing tests of folding trees. */
14716 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14718 static void
14719 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14720 tree constant)
14722 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14725 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14726 wrapping WRAPPED_EXPR. */
14728 static void
14729 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14730 tree wrapped_expr)
14732 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14733 ASSERT_NE (wrapped_expr, result);
14734 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14735 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14738 /* Verify that various arithmetic binary operations are folded
14739 correctly. */
14741 static void
14742 test_arithmetic_folding ()
14744 tree type = integer_type_node;
14745 tree x = create_tmp_var_raw (type, "x");
14746 tree zero = build_zero_cst (type);
14747 tree one = build_int_cst (type, 1);
14749 /* Addition. */
14750 /* 1 <-- (0 + 1) */
14751 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14752 one);
14753 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14754 one);
14756 /* (nonlvalue)x <-- (x + 0) */
14757 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14760 /* Subtraction. */
14761 /* 0 <-- (x - x) */
14762 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14763 zero);
14764 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14767 /* Multiplication. */
14768 /* 0 <-- (x * 0) */
14769 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14770 zero);
14772 /* (nonlvalue)x <-- (x * 1) */
14773 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14777 /* Verify that various binary operations on vectors are folded
14778 correctly. */
14780 static void
14781 test_vector_folding ()
14783 tree inner_type = integer_type_node;
14784 tree type = build_vector_type (inner_type, 4);
14785 tree zero = build_zero_cst (type);
14786 tree one = build_one_cst (type);
14788 /* Verify equality tests that return a scalar boolean result. */
14789 tree res_type = boolean_type_node;
14790 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14791 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14792 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14793 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14796 /* Verify folding of VEC_DUPLICATE_EXPRs. */
14798 static void
14799 test_vec_duplicate_folding ()
14801 scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
14802 machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
14803 /* This will be 1 if VEC_MODE isn't a vector mode. */
14804 poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
14806 tree type = build_vector_type (ssizetype, nunits);
14807 tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
14808 tree dup5_cst = build_vector_from_val (type, ssize_int (5));
14809 ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
14812 /* Run all of the selftests within this file. */
14814 void
14815 fold_const_c_tests ()
14817 test_arithmetic_folding ();
14818 test_vector_folding ();
14819 test_vec_duplicate_folding ();
14822 } // namespace selftest
14824 #endif /* CHECKING_P */