diagnostic-show-locus.c: remove unused field from class colorizer
[official-gcc.git] / gcc / fold-const.c
blob0a5b168c3200168c04bc313eca53f56d313bf8f6
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
85 /* Nonzero if we are folding constants inside an initializer; zero
86 otherwise. */
87 int folding_initializer = 0;
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code {
93 COMPCODE_FALSE = 0,
94 COMPCODE_LT = 1,
95 COMPCODE_EQ = 2,
96 COMPCODE_LE = 3,
97 COMPCODE_GT = 4,
98 COMPCODE_LTGT = 5,
99 COMPCODE_GE = 6,
100 COMPCODE_ORD = 7,
101 COMPCODE_UNORD = 8,
102 COMPCODE_UNLT = 9,
103 COMPCODE_UNEQ = 10,
104 COMPCODE_UNLE = 11,
105 COMPCODE_UNGT = 12,
106 COMPCODE_NE = 13,
107 COMPCODE_UNGE = 14,
108 COMPCODE_TRUE = 15
111 static bool negate_expr_p (tree);
112 static tree negate_expr (tree);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int operand_equal_for_comparison_p (tree, tree, tree);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static bool vec_cst_ctor_to_array (tree, tree *);
141 static tree fold_negate_expr (location_t, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
147 static location_t
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc == UNKNOWN_LOCATION ? loc : tloc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
166 x = copy_node (x);
167 SET_EXPR_LOCATION (x, loc);
169 return x;
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173 division and returns the quotient. Otherwise returns
174 NULL_TREE. */
176 tree
177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
179 widest_int quo;
181 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 SIGNED, &quo))
183 return wide_int_to_tree (TREE_TYPE (arg1), quo);
185 return NULL_TREE;
188 /* This is nonzero if we should defer warnings about undefined
189 overflow. This facility exists because these warnings are a
190 special case. The code to estimate loop iterations does not want
191 to issue any warnings, since it works with expressions which do not
192 occur in user code. Various bits of cleanup code call fold(), but
193 only use the result if it has certain characteristics (e.g., is a
194 constant); that code only wants to issue a warning if the result is
195 used. */
197 static int fold_deferring_overflow_warnings;
199 /* If a warning about undefined overflow is deferred, this is the
200 warning. Note that this may cause us to turn two warnings into
201 one, but that is fine since it is sufficient to only give one
202 warning per expression. */
204 static const char* fold_deferred_overflow_warning;
206 /* If a warning about undefined overflow is deferred, this is the
207 level at which the warning should be emitted. */
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
211 /* Start deferring overflow warnings. We could use a stack here to
212 permit nested calls, but at present it is not necessary. */
214 void
215 fold_defer_overflow_warnings (void)
217 ++fold_deferring_overflow_warnings;
220 /* Stop deferring overflow warnings. If there is a pending warning,
221 and ISSUE is true, then issue the warning if appropriate. STMT is
222 the statement with which the warning should be associated (used for
223 location information); STMT may be NULL. CODE is the level of the
224 warning--a warn_strict_overflow_code value. This function will use
225 the smaller of CODE and the deferred code when deciding whether to
226 issue the warning. CODE may be zero to mean to always use the
227 deferred code. */
229 void
230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
232 const char *warnmsg;
233 location_t locus;
235 gcc_assert (fold_deferring_overflow_warnings > 0);
236 --fold_deferring_overflow_warnings;
237 if (fold_deferring_overflow_warnings > 0)
239 if (fold_deferred_overflow_warning != NULL
240 && code != 0
241 && code < (int) fold_deferred_overflow_code)
242 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243 return;
246 warnmsg = fold_deferred_overflow_warning;
247 fold_deferred_overflow_warning = NULL;
249 if (!issue || warnmsg == NULL)
250 return;
252 if (gimple_no_warning_p (stmt))
253 return;
255 /* Use the smallest code level when deciding to issue the
256 warning. */
257 if (code == 0 || code > (int) fold_deferred_overflow_code)
258 code = fold_deferred_overflow_code;
260 if (!issue_strict_overflow_warning (code))
261 return;
263 if (stmt == NULL)
264 locus = input_location;
265 else
266 locus = gimple_location (stmt);
267 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
270 /* Stop deferring overflow warnings, ignoring any deferred
271 warnings. */
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
276 fold_undefer_overflow_warnings (false, NULL, 0);
279 /* Whether we are deferring overflow warnings. */
281 bool
282 fold_deferring_overflow_warnings_p (void)
284 return fold_deferring_overflow_warnings > 0;
287 /* This is called when we fold something based on the fact that signed
288 overflow is undefined. */
290 void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
293 if (fold_deferring_overflow_warnings > 0)
295 if (fold_deferred_overflow_warning == NULL
296 || wc < fold_deferred_overflow_code)
298 fold_deferred_overflow_warning = gmsgid;
299 fold_deferred_overflow_code = wc;
302 else if (issue_strict_overflow_warning (wc))
303 warning (OPT_Wstrict_overflow, gmsgid);
306 /* Return true if the built-in mathematical function specified by CODE
307 is odd, i.e. -f(x) == f(-x). */
309 bool
310 negate_mathfn_p (combined_fn fn)
312 switch (fn)
314 CASE_CFN_ASIN:
315 CASE_CFN_ASINH:
316 CASE_CFN_ATAN:
317 CASE_CFN_ATANH:
318 CASE_CFN_CASIN:
319 CASE_CFN_CASINH:
320 CASE_CFN_CATAN:
321 CASE_CFN_CATANH:
322 CASE_CFN_CBRT:
323 CASE_CFN_CPROJ:
324 CASE_CFN_CSIN:
325 CASE_CFN_CSINH:
326 CASE_CFN_CTAN:
327 CASE_CFN_CTANH:
328 CASE_CFN_ERF:
329 CASE_CFN_LLROUND:
330 CASE_CFN_LROUND:
331 CASE_CFN_ROUND:
332 CASE_CFN_SIN:
333 CASE_CFN_SINH:
334 CASE_CFN_TAN:
335 CASE_CFN_TANH:
336 CASE_CFN_TRUNC:
337 return true;
339 CASE_CFN_LLRINT:
340 CASE_CFN_LRINT:
341 CASE_CFN_NEARBYINT:
342 CASE_CFN_RINT:
343 return !flag_rounding_math;
345 default:
346 break;
348 return false;
351 /* Check whether we may negate an integer constant T without causing
352 overflow. */
354 bool
355 may_negate_without_overflow_p (const_tree t)
357 tree type;
359 gcc_assert (TREE_CODE (t) == INTEGER_CST);
361 type = TREE_TYPE (t);
362 if (TYPE_UNSIGNED (type))
363 return false;
365 return !wi::only_sign_bit_p (t);
368 /* Determine whether an expression T can be cheaply negated using
369 the function negate_expr without introducing undefined overflow. */
371 static bool
372 negate_expr_p (tree t)
374 tree type;
376 if (t == 0)
377 return false;
379 type = TREE_TYPE (t);
381 STRIP_SIGN_NOPS (t);
382 switch (TREE_CODE (t))
384 case INTEGER_CST:
385 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
386 return true;
388 /* Check that -CST will not overflow type. */
389 return may_negate_without_overflow_p (t);
390 case BIT_NOT_EXPR:
391 return (INTEGRAL_TYPE_P (type)
392 && TYPE_OVERFLOW_WRAPS (type));
394 case FIXED_CST:
395 return true;
397 case NEGATE_EXPR:
398 return !TYPE_OVERFLOW_SANITIZED (type);
400 case REAL_CST:
401 /* We want to canonicalize to positive real constants. Pretend
402 that only negative ones can be easily negated. */
403 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405 case COMPLEX_CST:
406 return negate_expr_p (TREE_REALPART (t))
407 && negate_expr_p (TREE_IMAGPART (t));
409 case VECTOR_CST:
411 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
412 return true;
414 int count = TYPE_VECTOR_SUBPARTS (type), i;
416 for (i = 0; i < count; i++)
417 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
418 return false;
420 return true;
423 case COMPLEX_EXPR:
424 return negate_expr_p (TREE_OPERAND (t, 0))
425 && negate_expr_p (TREE_OPERAND (t, 1));
427 case CONJ_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0));
430 case PLUS_EXPR:
431 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
432 || HONOR_SIGNED_ZEROS (element_mode (type))
433 || (INTEGRAL_TYPE_P (type)
434 && ! TYPE_OVERFLOW_WRAPS (type)))
435 return false;
436 /* -(A + B) -> (-B) - A. */
437 if (negate_expr_p (TREE_OPERAND (t, 1)))
438 return true;
439 /* -(A + B) -> (-A) - B. */
440 return negate_expr_p (TREE_OPERAND (t, 0));
442 case MINUS_EXPR:
443 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
444 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 && !HONOR_SIGNED_ZEROS (element_mode (type))
446 && (! INTEGRAL_TYPE_P (type)
447 || TYPE_OVERFLOW_WRAPS (type));
449 case MULT_EXPR:
450 if (TYPE_UNSIGNED (type))
451 break;
452 /* INT_MIN/n * n doesn't overflow while negating one operand it does
453 if n is a (negative) power of two. */
454 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
455 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
456 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
457 && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
458 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
459 && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
460 break;
462 /* Fall through. */
464 case RDIV_EXPR:
465 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
466 return negate_expr_p (TREE_OPERAND (t, 1))
467 || negate_expr_p (TREE_OPERAND (t, 0));
468 break;
470 case TRUNC_DIV_EXPR:
471 case ROUND_DIV_EXPR:
472 case EXACT_DIV_EXPR:
473 if (TYPE_UNSIGNED (type))
474 break;
475 if (negate_expr_p (TREE_OPERAND (t, 0)))
476 return true;
477 /* In general we can't negate B in A / B, because if A is INT_MIN and
478 B is 1, we may turn this into INT_MIN / -1 which is undefined
479 and actually traps on some architectures. */
480 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
481 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
482 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
483 && ! integer_onep (TREE_OPERAND (t, 1))))
484 return negate_expr_p (TREE_OPERAND (t, 1));
485 break;
487 case NOP_EXPR:
488 /* Negate -((double)float) as (double)(-float). */
489 if (TREE_CODE (type) == REAL_TYPE)
491 tree tem = strip_float_extensions (t);
492 if (tem != t)
493 return negate_expr_p (tem);
495 break;
497 case CALL_EXPR:
498 /* Negate -f(x) as f(-x). */
499 if (negate_mathfn_p (get_call_combined_fn (t)))
500 return negate_expr_p (CALL_EXPR_ARG (t, 0));
501 break;
503 case RSHIFT_EXPR:
504 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
505 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
507 tree op1 = TREE_OPERAND (t, 1);
508 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
509 return true;
511 break;
513 default:
514 break;
516 return false;
519 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
520 simplification is possible.
521 If negate_expr_p would return true for T, NULL_TREE will never be
522 returned. */
524 static tree
525 fold_negate_expr_1 (location_t loc, tree t)
527 tree type = TREE_TYPE (t);
528 tree tem;
530 switch (TREE_CODE (t))
532 /* Convert - (~A) to A + 1. */
533 case BIT_NOT_EXPR:
534 if (INTEGRAL_TYPE_P (type))
535 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
536 build_one_cst (type));
537 break;
539 case INTEGER_CST:
540 tem = fold_negate_const (t, type);
541 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && !TYPE_OVERFLOW_TRAPS (type)
544 && TYPE_OVERFLOW_WRAPS (type))
545 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
546 return tem;
547 break;
549 case REAL_CST:
550 tem = fold_negate_const (t, type);
551 return tem;
553 case FIXED_CST:
554 tem = fold_negate_const (t, type);
555 return tem;
557 case COMPLEX_CST:
559 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
560 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
561 if (rpart && ipart)
562 return build_complex (type, rpart, ipart);
564 break;
566 case VECTOR_CST:
568 int count = TYPE_VECTOR_SUBPARTS (type), i;
569 tree *elts = XALLOCAVEC (tree, count);
571 for (i = 0; i < count; i++)
573 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
574 if (elts[i] == NULL_TREE)
575 return NULL_TREE;
578 return build_vector (type, elts);
581 case COMPLEX_EXPR:
582 if (negate_expr_p (t))
583 return fold_build2_loc (loc, COMPLEX_EXPR, type,
584 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
585 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
586 break;
588 case CONJ_EXPR:
589 if (negate_expr_p (t))
590 return fold_build1_loc (loc, CONJ_EXPR, type,
591 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
592 break;
594 case NEGATE_EXPR:
595 if (!TYPE_OVERFLOW_SANITIZED (type))
596 return TREE_OPERAND (t, 0);
597 break;
599 case PLUS_EXPR:
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
601 && !HONOR_SIGNED_ZEROS (element_mode (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1)))
606 tem = negate_expr (TREE_OPERAND (t, 1));
607 return fold_build2_loc (loc, MINUS_EXPR, type,
608 tem, TREE_OPERAND (t, 0));
611 /* -(A + B) -> (-A) - B. */
612 if (negate_expr_p (TREE_OPERAND (t, 0)))
614 tem = negate_expr (TREE_OPERAND (t, 0));
615 return fold_build2_loc (loc, MINUS_EXPR, type,
616 tem, TREE_OPERAND (t, 1));
619 break;
621 case MINUS_EXPR:
622 /* - (A - B) -> B - A */
623 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
624 && !HONOR_SIGNED_ZEROS (element_mode (type)))
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
627 break;
629 case MULT_EXPR:
630 if (TYPE_UNSIGNED (type))
631 break;
633 /* Fall through. */
635 case RDIV_EXPR:
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 TREE_OPERAND (t, 0), negate_expr (tem));
642 tem = TREE_OPERAND (t, 0);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 negate_expr (tem), TREE_OPERAND (t, 1));
647 break;
649 case TRUNC_DIV_EXPR:
650 case ROUND_DIV_EXPR:
651 case EXACT_DIV_EXPR:
652 if (TYPE_UNSIGNED (type))
653 break;
654 if (negate_expr_p (TREE_OPERAND (t, 0)))
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 negate_expr (TREE_OPERAND (t, 0)),
657 TREE_OPERAND (t, 1));
658 /* In general we can't negate B in A / B, because if A is INT_MIN and
659 B is 1, we may turn this into INT_MIN / -1 which is undefined
660 and actually traps on some architectures. */
661 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
662 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
663 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
664 && ! integer_onep (TREE_OPERAND (t, 1))))
665 && negate_expr_p (TREE_OPERAND (t, 1)))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 TREE_OPERAND (t, 0),
668 negate_expr (TREE_OPERAND (t, 1)));
669 break;
671 case NOP_EXPR:
672 /* Convert -((double)float) into (double)(-float). */
673 if (TREE_CODE (type) == REAL_TYPE)
675 tem = strip_float_extensions (t);
676 if (tem != t && negate_expr_p (tem))
677 return fold_convert_loc (loc, type, negate_expr (tem));
679 break;
681 case CALL_EXPR:
682 /* Negate -f(x) as f(-x). */
683 if (negate_mathfn_p (get_call_combined_fn (t))
684 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 tree fndecl, arg;
688 fndecl = get_callee_fndecl (t);
689 arg = negate_expr (CALL_EXPR_ARG (t, 0));
690 return build_call_expr_loc (loc, fndecl, 1, arg);
692 break;
694 case RSHIFT_EXPR:
695 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
696 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 tree op1 = TREE_OPERAND (t, 1);
699 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
709 break;
711 default:
712 break;
715 return NULL_TREE;
718 /* A wrapper for fold_negate_expr_1. */
720 static tree
721 fold_negate_expr (location_t loc, tree t)
723 tree type = TREE_TYPE (t);
724 STRIP_SIGN_NOPS (t);
725 tree tem = fold_negate_expr_1 (loc, t);
726 if (tem == NULL_TREE)
727 return NULL_TREE;
728 return fold_convert_loc (loc, type, tem);
731 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
732 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
733 return NULL_TREE. */
735 static tree
736 negate_expr (tree t)
738 tree type, tem;
739 location_t loc;
741 if (t == NULL_TREE)
742 return NULL_TREE;
744 loc = EXPR_LOCATION (t);
745 type = TREE_TYPE (t);
746 STRIP_SIGN_NOPS (t);
748 tem = fold_negate_expr (loc, t);
749 if (!tem)
750 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
751 return fold_convert_loc (loc, type, tem);
754 /* Split a tree IN into a constant, literal and variable parts that could be
755 combined with CODE to make IN. "constant" means an expression with
756 TREE_CONSTANT but that isn't an actual constant. CODE must be a
757 commutative arithmetic operation. Store the constant part into *CONP,
758 the literal in *LITP and return the variable part. If a part isn't
759 present, set it to null. If the tree does not decompose in this way,
760 return the entire tree as the variable part and the other parts as null.
762 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
763 case, we negate an operand that was subtracted. Except if it is a
764 literal for which we use *MINUS_LITP instead.
766 If NEGATE_P is true, we are negating all of IN, again except a literal
767 for which we use *MINUS_LITP instead. If a variable part is of pointer
768 type, it is negated after converting to TYPE. This prevents us from
769 generating illegal MINUS pointer expression. LOC is the location of
770 the converted variable part.
772 If IN is itself a literal or constant, return it as appropriate.
774 Note that we do not guarantee that any of the three values will be the
775 same type as IN, but they will have the same signedness and mode. */
777 static tree
778 split_tree (tree in, tree type, enum tree_code code,
779 tree *minus_varp, tree *conp, tree *minus_conp,
780 tree *litp, tree *minus_litp, int negate_p)
782 tree var = 0;
783 *minus_varp = 0;
784 *conp = 0;
785 *minus_conp = 0;
786 *litp = 0;
787 *minus_litp = 0;
789 /* Strip any conversions that don't change the machine mode or signedness. */
790 STRIP_SIGN_NOPS (in);
792 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
793 || TREE_CODE (in) == FIXED_CST)
794 *litp = in;
795 else if (TREE_CODE (in) == code
796 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
797 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
798 /* We can associate addition and subtraction together (even
799 though the C standard doesn't say so) for integers because
800 the value is not affected. For reals, the value might be
801 affected, so we can't. */
802 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
803 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
804 || (code == MINUS_EXPR
805 && (TREE_CODE (in) == PLUS_EXPR
806 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
808 tree op0 = TREE_OPERAND (in, 0);
809 tree op1 = TREE_OPERAND (in, 1);
810 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
811 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
813 /* First see if either of the operands is a literal, then a constant. */
814 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
815 || TREE_CODE (op0) == FIXED_CST)
816 *litp = op0, op0 = 0;
817 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
818 || TREE_CODE (op1) == FIXED_CST)
819 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
821 if (op0 != 0 && TREE_CONSTANT (op0))
822 *conp = op0, op0 = 0;
823 else if (op1 != 0 && TREE_CONSTANT (op1))
824 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
826 /* If we haven't dealt with either operand, this is not a case we can
827 decompose. Otherwise, VAR is either of the ones remaining, if any. */
828 if (op0 != 0 && op1 != 0)
829 var = in;
830 else if (op0 != 0)
831 var = op0;
832 else
833 var = op1, neg_var_p = neg1_p;
835 /* Now do any needed negations. */
836 if (neg_litp_p)
837 *minus_litp = *litp, *litp = 0;
838 if (neg_conp_p && *conp)
839 *minus_conp = *conp, *conp = 0;
840 if (neg_var_p && var)
841 *minus_varp = var, var = 0;
843 else if (TREE_CONSTANT (in))
844 *conp = in;
845 else if (TREE_CODE (in) == BIT_NOT_EXPR
846 && code == PLUS_EXPR)
848 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
849 when IN is constant. */
850 *litp = build_minus_one_cst (type);
851 *minus_varp = TREE_OPERAND (in, 0);
853 else
854 var = in;
856 if (negate_p)
858 if (*litp)
859 *minus_litp = *litp, *litp = 0;
860 else if (*minus_litp)
861 *litp = *minus_litp, *minus_litp = 0;
862 if (*conp)
863 *minus_conp = *conp, *conp = 0;
864 else if (*minus_conp)
865 *conp = *minus_conp, *minus_conp = 0;
866 if (var)
867 *minus_varp = var, var = 0;
868 else if (*minus_varp)
869 var = *minus_varp, *minus_varp = 0;
872 if (*litp
873 && TREE_OVERFLOW_P (*litp))
874 *litp = drop_tree_overflow (*litp);
875 if (*minus_litp
876 && TREE_OVERFLOW_P (*minus_litp))
877 *minus_litp = drop_tree_overflow (*minus_litp);
879 return var;
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
887 static tree
888 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
890 if (t1 == 0)
892 gcc_assert (t2 == 0 || code != MINUS_EXPR);
893 return t2;
895 else if (t2 == 0)
896 return t1;
898 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
899 try to fold this since we will have infinite recursion. But do
900 deal with any NEGATE_EXPRs. */
901 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
902 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
903 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905 if (code == PLUS_EXPR)
907 if (TREE_CODE (t1) == NEGATE_EXPR)
908 return build2_loc (loc, MINUS_EXPR, type,
909 fold_convert_loc (loc, type, t2),
910 fold_convert_loc (loc, type,
911 TREE_OPERAND (t1, 0)));
912 else if (TREE_CODE (t2) == NEGATE_EXPR)
913 return build2_loc (loc, MINUS_EXPR, type,
914 fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type,
916 TREE_OPERAND (t2, 0)));
917 else if (integer_zerop (t2))
918 return fold_convert_loc (loc, type, t1);
920 else if (code == MINUS_EXPR)
922 if (integer_zerop (t2))
923 return fold_convert_loc (loc, type, t1);
926 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
934 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
935 for use in int_const_binop, size_binop and size_diffop. */
937 static bool
938 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
941 return false;
942 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
943 return false;
945 switch (code)
947 case LSHIFT_EXPR:
948 case RSHIFT_EXPR:
949 case LROTATE_EXPR:
950 case RROTATE_EXPR:
951 return true;
953 default:
954 break;
957 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
958 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
959 && TYPE_MODE (type1) == TYPE_MODE (type2);
963 /* Combine two integer constants ARG1 and ARG2 under operation CODE
964 to produce a new constant. Return NULL_TREE if we don't know how
965 to evaluate CODE at compile-time. */
967 static tree
968 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
969 int overflowable)
971 wide_int res;
972 tree t;
973 tree type = TREE_TYPE (arg1);
974 signop sign = TYPE_SIGN (type);
975 bool overflow = false;
977 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
979 switch (code)
981 case BIT_IOR_EXPR:
982 res = wi::bit_or (arg1, arg2);
983 break;
985 case BIT_XOR_EXPR:
986 res = wi::bit_xor (arg1, arg2);
987 break;
989 case BIT_AND_EXPR:
990 res = wi::bit_and (arg1, arg2);
991 break;
993 case RSHIFT_EXPR:
994 case LSHIFT_EXPR:
995 if (wi::neg_p (arg2))
997 arg2 = -arg2;
998 if (code == RSHIFT_EXPR)
999 code = LSHIFT_EXPR;
1000 else
1001 code = RSHIFT_EXPR;
1004 if (code == RSHIFT_EXPR)
1005 /* It's unclear from the C standard whether shifts can overflow.
1006 The following code ignores overflow; perhaps a C standard
1007 interpretation ruling is needed. */
1008 res = wi::rshift (arg1, arg2, sign);
1009 else
1010 res = wi::lshift (arg1, arg2);
1011 break;
1013 case RROTATE_EXPR:
1014 case LROTATE_EXPR:
1015 if (wi::neg_p (arg2))
1017 arg2 = -arg2;
1018 if (code == RROTATE_EXPR)
1019 code = LROTATE_EXPR;
1020 else
1021 code = RROTATE_EXPR;
1024 if (code == RROTATE_EXPR)
1025 res = wi::rrotate (arg1, arg2);
1026 else
1027 res = wi::lrotate (arg1, arg2);
1028 break;
1030 case PLUS_EXPR:
1031 res = wi::add (arg1, arg2, sign, &overflow);
1032 break;
1034 case MINUS_EXPR:
1035 res = wi::sub (arg1, arg2, sign, &overflow);
1036 break;
1038 case MULT_EXPR:
1039 res = wi::mul (arg1, arg2, sign, &overflow);
1040 break;
1042 case MULT_HIGHPART_EXPR:
1043 res = wi::mul_high (arg1, arg2, sign);
1044 break;
1046 case TRUNC_DIV_EXPR:
1047 case EXACT_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1051 break;
1053 case FLOOR_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_floor (arg1, arg2, sign, &overflow);
1057 break;
1059 case CEIL_DIV_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1063 break;
1065 case ROUND_DIV_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::div_round (arg1, arg2, sign, &overflow);
1069 break;
1071 case TRUNC_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1075 break;
1077 case FLOOR_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1081 break;
1083 case CEIL_MOD_EXPR:
1084 if (arg2 == 0)
1085 return NULL_TREE;
1086 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1087 break;
1089 case ROUND_MOD_EXPR:
1090 if (arg2 == 0)
1091 return NULL_TREE;
1092 res = wi::mod_round (arg1, arg2, sign, &overflow);
1093 break;
1095 case MIN_EXPR:
1096 res = wi::min (arg1, arg2, sign);
1097 break;
1099 case MAX_EXPR:
1100 res = wi::max (arg1, arg2, sign);
1101 break;
1103 default:
1104 return NULL_TREE;
1107 t = force_fit_type (type, res, overflowable,
1108 (((sign == SIGNED || overflowable == -1)
1109 && overflow)
1110 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1112 return t;
1115 tree
1116 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1118 return int_const_binop_1 (code, arg1, arg2, 1);
1121 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1122 constant. We assume ARG1 and ARG2 have the same data type, or at least
1123 are the same kind of constant and the same machine mode. Return zero if
1124 combining the constants is not allowed in the current operating mode. */
1126 static tree
1127 const_binop (enum tree_code code, tree arg1, tree arg2)
1129 /* Sanity check for the recursive cases. */
1130 if (!arg1 || !arg2)
1131 return NULL_TREE;
1133 STRIP_NOPS (arg1);
1134 STRIP_NOPS (arg2);
1136 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1138 if (code == POINTER_PLUS_EXPR)
1139 return int_const_binop (PLUS_EXPR,
1140 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1142 return int_const_binop (code, arg1, arg2);
1145 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1147 machine_mode mode;
1148 REAL_VALUE_TYPE d1;
1149 REAL_VALUE_TYPE d2;
1150 REAL_VALUE_TYPE value;
1151 REAL_VALUE_TYPE result;
1152 bool inexact;
1153 tree t, type;
1155 /* The following codes are handled by real_arithmetic. */
1156 switch (code)
1158 case PLUS_EXPR:
1159 case MINUS_EXPR:
1160 case MULT_EXPR:
1161 case RDIV_EXPR:
1162 case MIN_EXPR:
1163 case MAX_EXPR:
1164 break;
1166 default:
1167 return NULL_TREE;
1170 d1 = TREE_REAL_CST (arg1);
1171 d2 = TREE_REAL_CST (arg2);
1173 type = TREE_TYPE (arg1);
1174 mode = TYPE_MODE (type);
1176 /* Don't perform operation if we honor signaling NaNs and
1177 either operand is a signaling NaN. */
1178 if (HONOR_SNANS (mode)
1179 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1180 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1181 return NULL_TREE;
1183 /* Don't perform operation if it would raise a division
1184 by zero exception. */
1185 if (code == RDIV_EXPR
1186 && real_equal (&d2, &dconst0)
1187 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1188 return NULL_TREE;
1190 /* If either operand is a NaN, just return it. Otherwise, set up
1191 for floating-point trap; we return an overflow. */
1192 if (REAL_VALUE_ISNAN (d1))
1194 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1195 is off. */
1196 d1.signalling = 0;
1197 t = build_real (type, d1);
1198 return t;
1200 else if (REAL_VALUE_ISNAN (d2))
1202 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1203 is off. */
1204 d2.signalling = 0;
1205 t = build_real (type, d2);
1206 return t;
1209 inexact = real_arithmetic (&value, code, &d1, &d2);
1210 real_convert (&result, mode, &value);
1212 /* Don't constant fold this floating point operation if
1213 the result has overflowed and flag_trapping_math. */
1214 if (flag_trapping_math
1215 && MODE_HAS_INFINITIES (mode)
1216 && REAL_VALUE_ISINF (result)
1217 && !REAL_VALUE_ISINF (d1)
1218 && !REAL_VALUE_ISINF (d2))
1219 return NULL_TREE;
1221 /* Don't constant fold this floating point operation if the
1222 result may dependent upon the run-time rounding mode and
1223 flag_rounding_math is set, or if GCC's software emulation
1224 is unable to accurately represent the result. */
1225 if ((flag_rounding_math
1226 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1227 && (inexact || !real_identical (&result, &value)))
1228 return NULL_TREE;
1230 t = build_real (type, result);
1232 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1233 return t;
1236 if (TREE_CODE (arg1) == FIXED_CST)
1238 FIXED_VALUE_TYPE f1;
1239 FIXED_VALUE_TYPE f2;
1240 FIXED_VALUE_TYPE result;
1241 tree t, type;
1242 int sat_p;
1243 bool overflow_p;
1245 /* The following codes are handled by fixed_arithmetic. */
1246 switch (code)
1248 case PLUS_EXPR:
1249 case MINUS_EXPR:
1250 case MULT_EXPR:
1251 case TRUNC_DIV_EXPR:
1252 if (TREE_CODE (arg2) != FIXED_CST)
1253 return NULL_TREE;
1254 f2 = TREE_FIXED_CST (arg2);
1255 break;
1257 case LSHIFT_EXPR:
1258 case RSHIFT_EXPR:
1260 if (TREE_CODE (arg2) != INTEGER_CST)
1261 return NULL_TREE;
1262 wide_int w2 = arg2;
1263 f2.data.high = w2.elt (1);
1264 f2.data.low = w2.ulow ();
1265 f2.mode = SImode;
1267 break;
1269 default:
1270 return NULL_TREE;
1273 f1 = TREE_FIXED_CST (arg1);
1274 type = TREE_TYPE (arg1);
1275 sat_p = TYPE_SATURATING (type);
1276 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1277 t = build_fixed (type, result);
1278 /* Propagate overflow flags. */
1279 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1280 TREE_OVERFLOW (t) = 1;
1281 return t;
1284 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1286 tree type = TREE_TYPE (arg1);
1287 tree r1 = TREE_REALPART (arg1);
1288 tree i1 = TREE_IMAGPART (arg1);
1289 tree r2 = TREE_REALPART (arg2);
1290 tree i2 = TREE_IMAGPART (arg2);
1291 tree real, imag;
1293 switch (code)
1295 case PLUS_EXPR:
1296 case MINUS_EXPR:
1297 real = const_binop (code, r1, r2);
1298 imag = const_binop (code, i1, i2);
1299 break;
1301 case MULT_EXPR:
1302 if (COMPLEX_FLOAT_TYPE_P (type))
1303 return do_mpc_arg2 (arg1, arg2, type,
1304 /* do_nonfinite= */ folding_initializer,
1305 mpc_mul);
1307 real = const_binop (MINUS_EXPR,
1308 const_binop (MULT_EXPR, r1, r2),
1309 const_binop (MULT_EXPR, i1, i2));
1310 imag = const_binop (PLUS_EXPR,
1311 const_binop (MULT_EXPR, r1, i2),
1312 const_binop (MULT_EXPR, i1, r2));
1313 break;
1315 case RDIV_EXPR:
1316 if (COMPLEX_FLOAT_TYPE_P (type))
1317 return do_mpc_arg2 (arg1, arg2, type,
1318 /* do_nonfinite= */ folding_initializer,
1319 mpc_div);
1320 /* Fallthru. */
1321 case TRUNC_DIV_EXPR:
1322 case CEIL_DIV_EXPR:
1323 case FLOOR_DIV_EXPR:
1324 case ROUND_DIV_EXPR:
1325 if (flag_complex_method == 0)
1327 /* Keep this algorithm in sync with
1328 tree-complex.c:expand_complex_div_straight().
1330 Expand complex division to scalars, straightforward algorithm.
1331 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1332 t = br*br + bi*bi
1334 tree magsquared
1335 = const_binop (PLUS_EXPR,
1336 const_binop (MULT_EXPR, r2, r2),
1337 const_binop (MULT_EXPR, i2, i2));
1338 tree t1
1339 = const_binop (PLUS_EXPR,
1340 const_binop (MULT_EXPR, r1, r2),
1341 const_binop (MULT_EXPR, i1, i2));
1342 tree t2
1343 = const_binop (MINUS_EXPR,
1344 const_binop (MULT_EXPR, i1, r2),
1345 const_binop (MULT_EXPR, r1, i2));
1347 real = const_binop (code, t1, magsquared);
1348 imag = const_binop (code, t2, magsquared);
1350 else
1352 /* Keep this algorithm in sync with
1353 tree-complex.c:expand_complex_div_wide().
1355 Expand complex division to scalars, modified algorithm to minimize
1356 overflow with wide input ranges. */
1357 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1358 fold_abs_const (r2, TREE_TYPE (type)),
1359 fold_abs_const (i2, TREE_TYPE (type)));
1361 if (integer_nonzerop (compare))
1363 /* In the TRUE branch, we compute
1364 ratio = br/bi;
1365 div = (br * ratio) + bi;
1366 tr = (ar * ratio) + ai;
1367 ti = (ai * ratio) - ar;
1368 tr = tr / div;
1369 ti = ti / div; */
1370 tree ratio = const_binop (code, r2, i2);
1371 tree div = const_binop (PLUS_EXPR, i2,
1372 const_binop (MULT_EXPR, r2, ratio));
1373 real = const_binop (MULT_EXPR, r1, ratio);
1374 real = const_binop (PLUS_EXPR, real, i1);
1375 real = const_binop (code, real, div);
1377 imag = const_binop (MULT_EXPR, i1, ratio);
1378 imag = const_binop (MINUS_EXPR, imag, r1);
1379 imag = const_binop (code, imag, div);
1381 else
1383 /* In the FALSE branch, we compute
1384 ratio = d/c;
1385 divisor = (d * ratio) + c;
1386 tr = (b * ratio) + a;
1387 ti = b - (a * ratio);
1388 tr = tr / div;
1389 ti = ti / div; */
1390 tree ratio = const_binop (code, i2, r2);
1391 tree div = const_binop (PLUS_EXPR, r2,
1392 const_binop (MULT_EXPR, i2, ratio));
1394 real = const_binop (MULT_EXPR, i1, ratio);
1395 real = const_binop (PLUS_EXPR, real, r1);
1396 real = const_binop (code, real, div);
1398 imag = const_binop (MULT_EXPR, r1, ratio);
1399 imag = const_binop (MINUS_EXPR, i1, imag);
1400 imag = const_binop (code, imag, div);
1403 break;
1405 default:
1406 return NULL_TREE;
1409 if (real && imag)
1410 return build_complex (type, real, imag);
1413 if (TREE_CODE (arg1) == VECTOR_CST
1414 && TREE_CODE (arg2) == VECTOR_CST)
1416 tree type = TREE_TYPE (arg1);
1417 int count = TYPE_VECTOR_SUBPARTS (type), i;
1418 tree *elts = XALLOCAVEC (tree, count);
1420 for (i = 0; i < count; i++)
1422 tree elem1 = VECTOR_CST_ELT (arg1, i);
1423 tree elem2 = VECTOR_CST_ELT (arg2, i);
1425 elts[i] = const_binop (code, elem1, elem2);
1427 /* It is possible that const_binop cannot handle the given
1428 code and return NULL_TREE */
1429 if (elts[i] == NULL_TREE)
1430 return NULL_TREE;
1433 return build_vector (type, elts);
1436 /* Shifts allow a scalar offset for a vector. */
1437 if (TREE_CODE (arg1) == VECTOR_CST
1438 && TREE_CODE (arg2) == INTEGER_CST)
1440 tree type = TREE_TYPE (arg1);
1441 int count = TYPE_VECTOR_SUBPARTS (type), i;
1442 tree *elts = XALLOCAVEC (tree, count);
1444 for (i = 0; i < count; i++)
1446 tree elem1 = VECTOR_CST_ELT (arg1, i);
1448 elts[i] = const_binop (code, elem1, arg2);
1450 /* It is possible that const_binop cannot handle the given
1451 code and return NULL_TREE. */
1452 if (elts[i] == NULL_TREE)
1453 return NULL_TREE;
1456 return build_vector (type, elts);
1458 return NULL_TREE;
1461 /* Overload that adds a TYPE parameter to be able to dispatch
1462 to fold_relational_const. */
1464 tree
1465 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1467 if (TREE_CODE_CLASS (code) == tcc_comparison)
1468 return fold_relational_const (code, type, arg1, arg2);
1470 /* ??? Until we make the const_binop worker take the type of the
1471 result as argument put those cases that need it here. */
1472 switch (code)
1474 case COMPLEX_EXPR:
1475 if ((TREE_CODE (arg1) == REAL_CST
1476 && TREE_CODE (arg2) == REAL_CST)
1477 || (TREE_CODE (arg1) == INTEGER_CST
1478 && TREE_CODE (arg2) == INTEGER_CST))
1479 return build_complex (type, arg1, arg2);
1480 return NULL_TREE;
1482 case VEC_PACK_TRUNC_EXPR:
1483 case VEC_PACK_FIX_TRUNC_EXPR:
1485 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1486 tree *elts;
1488 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1489 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1490 if (TREE_CODE (arg1) != VECTOR_CST
1491 || TREE_CODE (arg2) != VECTOR_CST)
1492 return NULL_TREE;
1494 elts = XALLOCAVEC (tree, nelts);
1495 if (!vec_cst_ctor_to_array (arg1, elts)
1496 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1497 return NULL_TREE;
1499 for (i = 0; i < nelts; i++)
1501 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1502 ? NOP_EXPR : FIX_TRUNC_EXPR,
1503 TREE_TYPE (type), elts[i]);
1504 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1505 return NULL_TREE;
1508 return build_vector (type, elts);
1511 case VEC_WIDEN_MULT_LO_EXPR:
1512 case VEC_WIDEN_MULT_HI_EXPR:
1513 case VEC_WIDEN_MULT_EVEN_EXPR:
1514 case VEC_WIDEN_MULT_ODD_EXPR:
1516 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1517 unsigned int out, ofs, scale;
1518 tree *elts;
1520 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1521 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1522 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1523 return NULL_TREE;
1525 elts = XALLOCAVEC (tree, nelts * 4);
1526 if (!vec_cst_ctor_to_array (arg1, elts)
1527 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1528 return NULL_TREE;
1530 if (code == VEC_WIDEN_MULT_LO_EXPR)
1531 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1532 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1533 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1534 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1535 scale = 1, ofs = 0;
1536 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1537 scale = 1, ofs = 1;
1539 for (out = 0; out < nelts; out++)
1541 unsigned int in1 = (out << scale) + ofs;
1542 unsigned int in2 = in1 + nelts * 2;
1543 tree t1, t2;
1545 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1546 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1548 if (t1 == NULL_TREE || t2 == NULL_TREE)
1549 return NULL_TREE;
1550 elts[out] = const_binop (MULT_EXPR, t1, t2);
1551 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1552 return NULL_TREE;
1555 return build_vector (type, elts);
1558 default:;
1561 if (TREE_CODE_CLASS (code) != tcc_binary)
1562 return NULL_TREE;
1564 /* Make sure type and arg0 have the same saturating flag. */
1565 gcc_checking_assert (TYPE_SATURATING (type)
1566 == TYPE_SATURATING (TREE_TYPE (arg1)));
1568 return const_binop (code, arg1, arg2);
1571 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1572 Return zero if computing the constants is not possible. */
1574 tree
1575 const_unop (enum tree_code code, tree type, tree arg0)
1577 /* Don't perform the operation, other than NEGATE and ABS, if
1578 flag_signaling_nans is on and the operand is a signaling NaN. */
1579 if (TREE_CODE (arg0) == REAL_CST
1580 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1581 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1582 && code != NEGATE_EXPR
1583 && code != ABS_EXPR)
1584 return NULL_TREE;
1586 switch (code)
1588 CASE_CONVERT:
1589 case FLOAT_EXPR:
1590 case FIX_TRUNC_EXPR:
1591 case FIXED_CONVERT_EXPR:
1592 return fold_convert_const (code, type, arg0);
1594 case ADDR_SPACE_CONVERT_EXPR:
1595 /* If the source address is 0, and the source address space
1596 cannot have a valid object at 0, fold to dest type null. */
1597 if (integer_zerop (arg0)
1598 && !(targetm.addr_space.zero_address_valid
1599 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1600 return fold_convert_const (code, type, arg0);
1601 break;
1603 case VIEW_CONVERT_EXPR:
1604 return fold_view_convert_expr (type, arg0);
1606 case NEGATE_EXPR:
1608 /* Can't call fold_negate_const directly here as that doesn't
1609 handle all cases and we might not be able to negate some
1610 constants. */
1611 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1612 if (tem && CONSTANT_CLASS_P (tem))
1613 return tem;
1614 break;
1617 case ABS_EXPR:
1618 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1619 return fold_abs_const (arg0, type);
1620 break;
1622 case CONJ_EXPR:
1623 if (TREE_CODE (arg0) == COMPLEX_CST)
1625 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1626 TREE_TYPE (type));
1627 return build_complex (type, TREE_REALPART (arg0), ipart);
1629 break;
1631 case BIT_NOT_EXPR:
1632 if (TREE_CODE (arg0) == INTEGER_CST)
1633 return fold_not_const (arg0, type);
1634 /* Perform BIT_NOT_EXPR on each element individually. */
1635 else if (TREE_CODE (arg0) == VECTOR_CST)
1637 tree *elements;
1638 tree elem;
1639 unsigned count = VECTOR_CST_NELTS (arg0), i;
1641 elements = XALLOCAVEC (tree, count);
1642 for (i = 0; i < count; i++)
1644 elem = VECTOR_CST_ELT (arg0, i);
1645 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1646 if (elem == NULL_TREE)
1647 break;
1648 elements[i] = elem;
1650 if (i == count)
1651 return build_vector (type, elements);
1653 break;
1655 case TRUTH_NOT_EXPR:
1656 if (TREE_CODE (arg0) == INTEGER_CST)
1657 return constant_boolean_node (integer_zerop (arg0), type);
1658 break;
1660 case REALPART_EXPR:
1661 if (TREE_CODE (arg0) == COMPLEX_CST)
1662 return fold_convert (type, TREE_REALPART (arg0));
1663 break;
1665 case IMAGPART_EXPR:
1666 if (TREE_CODE (arg0) == COMPLEX_CST)
1667 return fold_convert (type, TREE_IMAGPART (arg0));
1668 break;
1670 case VEC_UNPACK_LO_EXPR:
1671 case VEC_UNPACK_HI_EXPR:
1672 case VEC_UNPACK_FLOAT_LO_EXPR:
1673 case VEC_UNPACK_FLOAT_HI_EXPR:
1675 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1676 tree *elts;
1677 enum tree_code subcode;
1679 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1680 if (TREE_CODE (arg0) != VECTOR_CST)
1681 return NULL_TREE;
1683 elts = XALLOCAVEC (tree, nelts * 2);
1684 if (!vec_cst_ctor_to_array (arg0, elts))
1685 return NULL_TREE;
1687 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1688 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1689 elts += nelts;
1691 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1692 subcode = NOP_EXPR;
1693 else
1694 subcode = FLOAT_EXPR;
1696 for (i = 0; i < nelts; i++)
1698 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1699 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1700 return NULL_TREE;
1703 return build_vector (type, elts);
1706 case REDUC_MIN_EXPR:
1707 case REDUC_MAX_EXPR:
1708 case REDUC_PLUS_EXPR:
1710 unsigned int nelts, i;
1711 tree *elts;
1712 enum tree_code subcode;
1714 if (TREE_CODE (arg0) != VECTOR_CST)
1715 return NULL_TREE;
1716 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1718 elts = XALLOCAVEC (tree, nelts);
1719 if (!vec_cst_ctor_to_array (arg0, elts))
1720 return NULL_TREE;
1722 switch (code)
1724 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1725 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1726 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1727 default: gcc_unreachable ();
1730 for (i = 1; i < nelts; i++)
1732 elts[0] = const_binop (subcode, elts[0], elts[i]);
1733 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1734 return NULL_TREE;
1737 return elts[0];
1740 default:
1741 break;
1744 return NULL_TREE;
1747 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1748 indicates which particular sizetype to create. */
1750 tree
1751 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1753 return build_int_cst (sizetype_tab[(int) kind], number);
1756 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1757 is a tree code. The type of the result is taken from the operands.
1758 Both must be equivalent integer types, ala int_binop_types_match_p.
1759 If the operands are constant, so is the result. */
1761 tree
1762 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1764 tree type = TREE_TYPE (arg0);
1766 if (arg0 == error_mark_node || arg1 == error_mark_node)
1767 return error_mark_node;
1769 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1770 TREE_TYPE (arg1)));
1772 /* Handle the special case of two integer constants faster. */
1773 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1775 /* And some specific cases even faster than that. */
1776 if (code == PLUS_EXPR)
1778 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1779 return arg1;
1780 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1781 return arg0;
1783 else if (code == MINUS_EXPR)
1785 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1786 return arg0;
1788 else if (code == MULT_EXPR)
1790 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1791 return arg1;
1794 /* Handle general case of two integer constants. For sizetype
1795 constant calculations we always want to know about overflow,
1796 even in the unsigned case. */
1797 return int_const_binop_1 (code, arg0, arg1, -1);
1800 return fold_build2_loc (loc, code, type, arg0, arg1);
1803 /* Given two values, either both of sizetype or both of bitsizetype,
1804 compute the difference between the two values. Return the value
1805 in signed type corresponding to the type of the operands. */
1807 tree
1808 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1810 tree type = TREE_TYPE (arg0);
1811 tree ctype;
1813 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1814 TREE_TYPE (arg1)));
1816 /* If the type is already signed, just do the simple thing. */
1817 if (!TYPE_UNSIGNED (type))
1818 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1820 if (type == sizetype)
1821 ctype = ssizetype;
1822 else if (type == bitsizetype)
1823 ctype = sbitsizetype;
1824 else
1825 ctype = signed_type_for (type);
1827 /* If either operand is not a constant, do the conversions to the signed
1828 type and subtract. The hardware will do the right thing with any
1829 overflow in the subtraction. */
1830 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1831 return size_binop_loc (loc, MINUS_EXPR,
1832 fold_convert_loc (loc, ctype, arg0),
1833 fold_convert_loc (loc, ctype, arg1));
1835 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1836 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1837 overflow) and negate (which can't either). Special-case a result
1838 of zero while we're here. */
1839 if (tree_int_cst_equal (arg0, arg1))
1840 return build_int_cst (ctype, 0);
1841 else if (tree_int_cst_lt (arg1, arg0))
1842 return fold_convert_loc (loc, ctype,
1843 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1844 else
1845 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1846 fold_convert_loc (loc, ctype,
1847 size_binop_loc (loc,
1848 MINUS_EXPR,
1849 arg1, arg0)));
1852 /* A subroutine of fold_convert_const handling conversions of an
1853 INTEGER_CST to another integer type. */
1855 static tree
1856 fold_convert_const_int_from_int (tree type, const_tree arg1)
1858 /* Given an integer constant, make new constant with new type,
1859 appropriately sign-extended or truncated. Use widest_int
1860 so that any extension is done according ARG1's type. */
1861 return force_fit_type (type, wi::to_widest (arg1),
1862 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1863 TREE_OVERFLOW (arg1));
1866 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1867 to an integer type. */
1869 static tree
1870 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1872 bool overflow = false;
1873 tree t;
1875 /* The following code implements the floating point to integer
1876 conversion rules required by the Java Language Specification,
1877 that IEEE NaNs are mapped to zero and values that overflow
1878 the target precision saturate, i.e. values greater than
1879 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1880 are mapped to INT_MIN. These semantics are allowed by the
1881 C and C++ standards that simply state that the behavior of
1882 FP-to-integer conversion is unspecified upon overflow. */
1884 wide_int val;
1885 REAL_VALUE_TYPE r;
1886 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1888 switch (code)
1890 case FIX_TRUNC_EXPR:
1891 real_trunc (&r, VOIDmode, &x);
1892 break;
1894 default:
1895 gcc_unreachable ();
1898 /* If R is NaN, return zero and show we have an overflow. */
1899 if (REAL_VALUE_ISNAN (r))
1901 overflow = true;
1902 val = wi::zero (TYPE_PRECISION (type));
1905 /* See if R is less than the lower bound or greater than the
1906 upper bound. */
1908 if (! overflow)
1910 tree lt = TYPE_MIN_VALUE (type);
1911 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1912 if (real_less (&r, &l))
1914 overflow = true;
1915 val = lt;
1919 if (! overflow)
1921 tree ut = TYPE_MAX_VALUE (type);
1922 if (ut)
1924 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1925 if (real_less (&u, &r))
1927 overflow = true;
1928 val = ut;
1933 if (! overflow)
1934 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1936 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1937 return t;
1940 /* A subroutine of fold_convert_const handling conversions of a
1941 FIXED_CST to an integer type. */
1943 static tree
1944 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1946 tree t;
1947 double_int temp, temp_trunc;
1948 unsigned int mode;
1950 /* Right shift FIXED_CST to temp by fbit. */
1951 temp = TREE_FIXED_CST (arg1).data;
1952 mode = TREE_FIXED_CST (arg1).mode;
1953 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1955 temp = temp.rshift (GET_MODE_FBIT (mode),
1956 HOST_BITS_PER_DOUBLE_INT,
1957 SIGNED_FIXED_POINT_MODE_P (mode));
1959 /* Left shift temp to temp_trunc by fbit. */
1960 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1961 HOST_BITS_PER_DOUBLE_INT,
1962 SIGNED_FIXED_POINT_MODE_P (mode));
1964 else
1966 temp = double_int_zero;
1967 temp_trunc = double_int_zero;
1970 /* If FIXED_CST is negative, we need to round the value toward 0.
1971 By checking if the fractional bits are not zero to add 1 to temp. */
1972 if (SIGNED_FIXED_POINT_MODE_P (mode)
1973 && temp_trunc.is_negative ()
1974 && TREE_FIXED_CST (arg1).data != temp_trunc)
1975 temp += double_int_one;
1977 /* Given a fixed-point constant, make new constant with new type,
1978 appropriately sign-extended or truncated. */
1979 t = force_fit_type (type, temp, -1,
1980 (temp.is_negative ()
1981 && (TYPE_UNSIGNED (type)
1982 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1983 | TREE_OVERFLOW (arg1));
1985 return t;
1988 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1989 to another floating point type. */
1991 static tree
1992 fold_convert_const_real_from_real (tree type, const_tree arg1)
1994 REAL_VALUE_TYPE value;
1995 tree t;
1997 /* Don't perform the operation if flag_signaling_nans is on
1998 and the operand is a signaling NaN. */
1999 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2000 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2001 return NULL_TREE;
2003 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2004 t = build_real (type, value);
2006 /* If converting an infinity or NAN to a representation that doesn't
2007 have one, set the overflow bit so that we can produce some kind of
2008 error message at the appropriate point if necessary. It's not the
2009 most user-friendly message, but it's better than nothing. */
2010 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2011 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2012 TREE_OVERFLOW (t) = 1;
2013 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2014 && !MODE_HAS_NANS (TYPE_MODE (type)))
2015 TREE_OVERFLOW (t) = 1;
2016 /* Regular overflow, conversion produced an infinity in a mode that
2017 can't represent them. */
2018 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2019 && REAL_VALUE_ISINF (value)
2020 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2021 TREE_OVERFLOW (t) = 1;
2022 else
2023 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2024 return t;
2027 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2028 to a floating point type. */
2030 static tree
2031 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2033 REAL_VALUE_TYPE value;
2034 tree t;
2036 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2037 t = build_real (type, value);
2039 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2040 return t;
2043 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2044 to another fixed-point type. */
2046 static tree
2047 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2049 FIXED_VALUE_TYPE value;
2050 tree t;
2051 bool overflow_p;
2053 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2054 TYPE_SATURATING (type));
2055 t = build_fixed (type, value);
2057 /* Propagate overflow flags. */
2058 if (overflow_p | TREE_OVERFLOW (arg1))
2059 TREE_OVERFLOW (t) = 1;
2060 return t;
2063 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2064 to a fixed-point type. */
2066 static tree
2067 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2069 FIXED_VALUE_TYPE value;
2070 tree t;
2071 bool overflow_p;
2072 double_int di;
2074 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2076 di.low = TREE_INT_CST_ELT (arg1, 0);
2077 if (TREE_INT_CST_NUNITS (arg1) == 1)
2078 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2079 else
2080 di.high = TREE_INT_CST_ELT (arg1, 1);
2082 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2083 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2084 TYPE_SATURATING (type));
2085 t = build_fixed (type, value);
2087 /* Propagate overflow flags. */
2088 if (overflow_p | TREE_OVERFLOW (arg1))
2089 TREE_OVERFLOW (t) = 1;
2090 return t;
2093 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2094 to a fixed-point type. */
2096 static tree
2097 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2099 FIXED_VALUE_TYPE value;
2100 tree t;
2101 bool overflow_p;
2103 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2104 &TREE_REAL_CST (arg1),
2105 TYPE_SATURATING (type));
2106 t = build_fixed (type, value);
2108 /* Propagate overflow flags. */
2109 if (overflow_p | TREE_OVERFLOW (arg1))
2110 TREE_OVERFLOW (t) = 1;
2111 return t;
2114 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2115 type TYPE. If no simplification can be done return NULL_TREE. */
2117 static tree
2118 fold_convert_const (enum tree_code code, tree type, tree arg1)
2120 if (TREE_TYPE (arg1) == type)
2121 return arg1;
2123 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2124 || TREE_CODE (type) == OFFSET_TYPE)
2126 if (TREE_CODE (arg1) == INTEGER_CST)
2127 return fold_convert_const_int_from_int (type, arg1);
2128 else if (TREE_CODE (arg1) == REAL_CST)
2129 return fold_convert_const_int_from_real (code, type, arg1);
2130 else if (TREE_CODE (arg1) == FIXED_CST)
2131 return fold_convert_const_int_from_fixed (type, arg1);
2133 else if (TREE_CODE (type) == REAL_TYPE)
2135 if (TREE_CODE (arg1) == INTEGER_CST)
2136 return build_real_from_int_cst (type, arg1);
2137 else if (TREE_CODE (arg1) == REAL_CST)
2138 return fold_convert_const_real_from_real (type, arg1);
2139 else if (TREE_CODE (arg1) == FIXED_CST)
2140 return fold_convert_const_real_from_fixed (type, arg1);
2142 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2144 if (TREE_CODE (arg1) == FIXED_CST)
2145 return fold_convert_const_fixed_from_fixed (type, arg1);
2146 else if (TREE_CODE (arg1) == INTEGER_CST)
2147 return fold_convert_const_fixed_from_int (type, arg1);
2148 else if (TREE_CODE (arg1) == REAL_CST)
2149 return fold_convert_const_fixed_from_real (type, arg1);
2151 else if (TREE_CODE (type) == VECTOR_TYPE)
2153 if (TREE_CODE (arg1) == VECTOR_CST
2154 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2156 int len = TYPE_VECTOR_SUBPARTS (type);
2157 tree elttype = TREE_TYPE (type);
2158 tree *v = XALLOCAVEC (tree, len);
2159 for (int i = 0; i < len; ++i)
2161 tree elt = VECTOR_CST_ELT (arg1, i);
2162 tree cvt = fold_convert_const (code, elttype, elt);
2163 if (cvt == NULL_TREE)
2164 return NULL_TREE;
2165 v[i] = cvt;
2167 return build_vector (type, v);
2170 return NULL_TREE;
2173 /* Construct a vector of zero elements of vector type TYPE. */
2175 static tree
2176 build_zero_vector (tree type)
2178 tree t;
2180 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2181 return build_vector_from_val (type, t);
2184 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2186 bool
2187 fold_convertible_p (const_tree type, const_tree arg)
2189 tree orig = TREE_TYPE (arg);
2191 if (type == orig)
2192 return true;
2194 if (TREE_CODE (arg) == ERROR_MARK
2195 || TREE_CODE (type) == ERROR_MARK
2196 || TREE_CODE (orig) == ERROR_MARK)
2197 return false;
2199 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2200 return true;
2202 switch (TREE_CODE (type))
2204 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2205 case POINTER_TYPE: case REFERENCE_TYPE:
2206 case OFFSET_TYPE:
2207 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2208 || TREE_CODE (orig) == OFFSET_TYPE);
2210 case REAL_TYPE:
2211 case FIXED_POINT_TYPE:
2212 case VECTOR_TYPE:
2213 case VOID_TYPE:
2214 return TREE_CODE (type) == TREE_CODE (orig);
2216 default:
2217 return false;
2221 /* Convert expression ARG to type TYPE. Used by the middle-end for
2222 simple conversions in preference to calling the front-end's convert. */
2224 tree
2225 fold_convert_loc (location_t loc, tree type, tree arg)
2227 tree orig = TREE_TYPE (arg);
2228 tree tem;
2230 if (type == orig)
2231 return arg;
2233 if (TREE_CODE (arg) == ERROR_MARK
2234 || TREE_CODE (type) == ERROR_MARK
2235 || TREE_CODE (orig) == ERROR_MARK)
2236 return error_mark_node;
2238 switch (TREE_CODE (type))
2240 case POINTER_TYPE:
2241 case REFERENCE_TYPE:
2242 /* Handle conversions between pointers to different address spaces. */
2243 if (POINTER_TYPE_P (orig)
2244 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2245 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2246 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2247 /* fall through */
2249 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2250 case OFFSET_TYPE:
2251 if (TREE_CODE (arg) == INTEGER_CST)
2253 tem = fold_convert_const (NOP_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2257 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2258 || TREE_CODE (orig) == OFFSET_TYPE)
2259 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2260 if (TREE_CODE (orig) == COMPLEX_TYPE)
2261 return fold_convert_loc (loc, type,
2262 fold_build1_loc (loc, REALPART_EXPR,
2263 TREE_TYPE (orig), arg));
2264 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2265 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2266 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2268 case REAL_TYPE:
2269 if (TREE_CODE (arg) == INTEGER_CST)
2271 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2272 if (tem != NULL_TREE)
2273 return tem;
2275 else if (TREE_CODE (arg) == REAL_CST)
2277 tem = fold_convert_const (NOP_EXPR, type, arg);
2278 if (tem != NULL_TREE)
2279 return tem;
2281 else if (TREE_CODE (arg) == FIXED_CST)
2283 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2284 if (tem != NULL_TREE)
2285 return tem;
2288 switch (TREE_CODE (orig))
2290 case INTEGER_TYPE:
2291 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2292 case POINTER_TYPE: case REFERENCE_TYPE:
2293 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2295 case REAL_TYPE:
2296 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2298 case FIXED_POINT_TYPE:
2299 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2301 case COMPLEX_TYPE:
2302 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2303 return fold_convert_loc (loc, type, tem);
2305 default:
2306 gcc_unreachable ();
2309 case FIXED_POINT_TYPE:
2310 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2311 || TREE_CODE (arg) == REAL_CST)
2313 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2314 if (tem != NULL_TREE)
2315 goto fold_convert_exit;
2318 switch (TREE_CODE (orig))
2320 case FIXED_POINT_TYPE:
2321 case INTEGER_TYPE:
2322 case ENUMERAL_TYPE:
2323 case BOOLEAN_TYPE:
2324 case REAL_TYPE:
2325 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2327 case COMPLEX_TYPE:
2328 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2329 return fold_convert_loc (loc, type, tem);
2331 default:
2332 gcc_unreachable ();
2335 case COMPLEX_TYPE:
2336 switch (TREE_CODE (orig))
2338 case INTEGER_TYPE:
2339 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2340 case POINTER_TYPE: case REFERENCE_TYPE:
2341 case REAL_TYPE:
2342 case FIXED_POINT_TYPE:
2343 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2344 fold_convert_loc (loc, TREE_TYPE (type), arg),
2345 fold_convert_loc (loc, TREE_TYPE (type),
2346 integer_zero_node));
2347 case COMPLEX_TYPE:
2349 tree rpart, ipart;
2351 if (TREE_CODE (arg) == COMPLEX_EXPR)
2353 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2354 TREE_OPERAND (arg, 0));
2355 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2356 TREE_OPERAND (arg, 1));
2357 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2360 arg = save_expr (arg);
2361 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2362 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2363 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2364 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2365 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2368 default:
2369 gcc_unreachable ();
2372 case VECTOR_TYPE:
2373 if (integer_zerop (arg))
2374 return build_zero_vector (type);
2375 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2376 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2377 || TREE_CODE (orig) == VECTOR_TYPE);
2378 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2380 case VOID_TYPE:
2381 tem = fold_ignored_result (arg);
2382 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2384 default:
2385 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2386 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2387 gcc_unreachable ();
2389 fold_convert_exit:
2390 protected_set_expr_location_unshare (tem, loc);
2391 return tem;
2394 /* Return false if expr can be assumed not to be an lvalue, true
2395 otherwise. */
2397 static bool
2398 maybe_lvalue_p (const_tree x)
2400 /* We only need to wrap lvalue tree codes. */
2401 switch (TREE_CODE (x))
2403 case VAR_DECL:
2404 case PARM_DECL:
2405 case RESULT_DECL:
2406 case LABEL_DECL:
2407 case FUNCTION_DECL:
2408 case SSA_NAME:
2410 case COMPONENT_REF:
2411 case MEM_REF:
2412 case INDIRECT_REF:
2413 case ARRAY_REF:
2414 case ARRAY_RANGE_REF:
2415 case BIT_FIELD_REF:
2416 case OBJ_TYPE_REF:
2418 case REALPART_EXPR:
2419 case IMAGPART_EXPR:
2420 case PREINCREMENT_EXPR:
2421 case PREDECREMENT_EXPR:
2422 case SAVE_EXPR:
2423 case TRY_CATCH_EXPR:
2424 case WITH_CLEANUP_EXPR:
2425 case COMPOUND_EXPR:
2426 case MODIFY_EXPR:
2427 case TARGET_EXPR:
2428 case COND_EXPR:
2429 case BIND_EXPR:
2430 break;
2432 default:
2433 /* Assume the worst for front-end tree codes. */
2434 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2435 break;
2436 return false;
2439 return true;
2442 /* Return an expr equal to X but certainly not valid as an lvalue. */
2444 tree
2445 non_lvalue_loc (location_t loc, tree x)
2447 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2448 us. */
2449 if (in_gimple_form)
2450 return x;
2452 if (! maybe_lvalue_p (x))
2453 return x;
2454 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2457 /* When pedantic, return an expr equal to X but certainly not valid as a
2458 pedantic lvalue. Otherwise, return X. */
2460 static tree
2461 pedantic_non_lvalue_loc (location_t loc, tree x)
2463 return protected_set_expr_location_unshare (x, loc);
2466 /* Given a tree comparison code, return the code that is the logical inverse.
2467 It is generally not safe to do this for floating-point comparisons, except
2468 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2469 ERROR_MARK in this case. */
2471 enum tree_code
2472 invert_tree_comparison (enum tree_code code, bool honor_nans)
2474 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2475 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2476 return ERROR_MARK;
2478 switch (code)
2480 case EQ_EXPR:
2481 return NE_EXPR;
2482 case NE_EXPR:
2483 return EQ_EXPR;
2484 case GT_EXPR:
2485 return honor_nans ? UNLE_EXPR : LE_EXPR;
2486 case GE_EXPR:
2487 return honor_nans ? UNLT_EXPR : LT_EXPR;
2488 case LT_EXPR:
2489 return honor_nans ? UNGE_EXPR : GE_EXPR;
2490 case LE_EXPR:
2491 return honor_nans ? UNGT_EXPR : GT_EXPR;
2492 case LTGT_EXPR:
2493 return UNEQ_EXPR;
2494 case UNEQ_EXPR:
2495 return LTGT_EXPR;
2496 case UNGT_EXPR:
2497 return LE_EXPR;
2498 case UNGE_EXPR:
2499 return LT_EXPR;
2500 case UNLT_EXPR:
2501 return GE_EXPR;
2502 case UNLE_EXPR:
2503 return GT_EXPR;
2504 case ORDERED_EXPR:
2505 return UNORDERED_EXPR;
2506 case UNORDERED_EXPR:
2507 return ORDERED_EXPR;
2508 default:
2509 gcc_unreachable ();
2513 /* Similar, but return the comparison that results if the operands are
2514 swapped. This is safe for floating-point. */
2516 enum tree_code
2517 swap_tree_comparison (enum tree_code code)
2519 switch (code)
2521 case EQ_EXPR:
2522 case NE_EXPR:
2523 case ORDERED_EXPR:
2524 case UNORDERED_EXPR:
2525 case LTGT_EXPR:
2526 case UNEQ_EXPR:
2527 return code;
2528 case GT_EXPR:
2529 return LT_EXPR;
2530 case GE_EXPR:
2531 return LE_EXPR;
2532 case LT_EXPR:
2533 return GT_EXPR;
2534 case LE_EXPR:
2535 return GE_EXPR;
2536 case UNGT_EXPR:
2537 return UNLT_EXPR;
2538 case UNGE_EXPR:
2539 return UNLE_EXPR;
2540 case UNLT_EXPR:
2541 return UNGT_EXPR;
2542 case UNLE_EXPR:
2543 return UNGE_EXPR;
2544 default:
2545 gcc_unreachable ();
2550 /* Convert a comparison tree code from an enum tree_code representation
2551 into a compcode bit-based encoding. This function is the inverse of
2552 compcode_to_comparison. */
2554 static enum comparison_code
2555 comparison_to_compcode (enum tree_code code)
2557 switch (code)
2559 case LT_EXPR:
2560 return COMPCODE_LT;
2561 case EQ_EXPR:
2562 return COMPCODE_EQ;
2563 case LE_EXPR:
2564 return COMPCODE_LE;
2565 case GT_EXPR:
2566 return COMPCODE_GT;
2567 case NE_EXPR:
2568 return COMPCODE_NE;
2569 case GE_EXPR:
2570 return COMPCODE_GE;
2571 case ORDERED_EXPR:
2572 return COMPCODE_ORD;
2573 case UNORDERED_EXPR:
2574 return COMPCODE_UNORD;
2575 case UNLT_EXPR:
2576 return COMPCODE_UNLT;
2577 case UNEQ_EXPR:
2578 return COMPCODE_UNEQ;
2579 case UNLE_EXPR:
2580 return COMPCODE_UNLE;
2581 case UNGT_EXPR:
2582 return COMPCODE_UNGT;
2583 case LTGT_EXPR:
2584 return COMPCODE_LTGT;
2585 case UNGE_EXPR:
2586 return COMPCODE_UNGE;
2587 default:
2588 gcc_unreachable ();
2592 /* Convert a compcode bit-based encoding of a comparison operator back
2593 to GCC's enum tree_code representation. This function is the
2594 inverse of comparison_to_compcode. */
2596 static enum tree_code
2597 compcode_to_comparison (enum comparison_code code)
2599 switch (code)
2601 case COMPCODE_LT:
2602 return LT_EXPR;
2603 case COMPCODE_EQ:
2604 return EQ_EXPR;
2605 case COMPCODE_LE:
2606 return LE_EXPR;
2607 case COMPCODE_GT:
2608 return GT_EXPR;
2609 case COMPCODE_NE:
2610 return NE_EXPR;
2611 case COMPCODE_GE:
2612 return GE_EXPR;
2613 case COMPCODE_ORD:
2614 return ORDERED_EXPR;
2615 case COMPCODE_UNORD:
2616 return UNORDERED_EXPR;
2617 case COMPCODE_UNLT:
2618 return UNLT_EXPR;
2619 case COMPCODE_UNEQ:
2620 return UNEQ_EXPR;
2621 case COMPCODE_UNLE:
2622 return UNLE_EXPR;
2623 case COMPCODE_UNGT:
2624 return UNGT_EXPR;
2625 case COMPCODE_LTGT:
2626 return LTGT_EXPR;
2627 case COMPCODE_UNGE:
2628 return UNGE_EXPR;
2629 default:
2630 gcc_unreachable ();
2634 /* Return a tree for the comparison which is the combination of
2635 doing the AND or OR (depending on CODE) of the two operations LCODE
2636 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2637 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2638 if this makes the transformation invalid. */
2640 tree
2641 combine_comparisons (location_t loc,
2642 enum tree_code code, enum tree_code lcode,
2643 enum tree_code rcode, tree truth_type,
2644 tree ll_arg, tree lr_arg)
2646 bool honor_nans = HONOR_NANS (ll_arg);
2647 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2648 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2649 int compcode;
2651 switch (code)
2653 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2654 compcode = lcompcode & rcompcode;
2655 break;
2657 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2658 compcode = lcompcode | rcompcode;
2659 break;
2661 default:
2662 return NULL_TREE;
2665 if (!honor_nans)
2667 /* Eliminate unordered comparisons, as well as LTGT and ORD
2668 which are not used unless the mode has NaNs. */
2669 compcode &= ~COMPCODE_UNORD;
2670 if (compcode == COMPCODE_LTGT)
2671 compcode = COMPCODE_NE;
2672 else if (compcode == COMPCODE_ORD)
2673 compcode = COMPCODE_TRUE;
2675 else if (flag_trapping_math)
2677 /* Check that the original operation and the optimized ones will trap
2678 under the same condition. */
2679 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2680 && (lcompcode != COMPCODE_EQ)
2681 && (lcompcode != COMPCODE_ORD);
2682 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2683 && (rcompcode != COMPCODE_EQ)
2684 && (rcompcode != COMPCODE_ORD);
2685 bool trap = (compcode & COMPCODE_UNORD) == 0
2686 && (compcode != COMPCODE_EQ)
2687 && (compcode != COMPCODE_ORD);
2689 /* In a short-circuited boolean expression the LHS might be
2690 such that the RHS, if evaluated, will never trap. For
2691 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2692 if neither x nor y is NaN. (This is a mixed blessing: for
2693 example, the expression above will never trap, hence
2694 optimizing it to x < y would be invalid). */
2695 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2696 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2697 rtrap = false;
2699 /* If the comparison was short-circuited, and only the RHS
2700 trapped, we may now generate a spurious trap. */
2701 if (rtrap && !ltrap
2702 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2703 return NULL_TREE;
2705 /* If we changed the conditions that cause a trap, we lose. */
2706 if ((ltrap || rtrap) != trap)
2707 return NULL_TREE;
2710 if (compcode == COMPCODE_TRUE)
2711 return constant_boolean_node (true, truth_type);
2712 else if (compcode == COMPCODE_FALSE)
2713 return constant_boolean_node (false, truth_type);
2714 else
2716 enum tree_code tcode;
2718 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2719 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2723 /* Return nonzero if two operands (typically of the same tree node)
2724 are necessarily equal. FLAGS modifies behavior as follows:
2726 If OEP_ONLY_CONST is set, only return nonzero for constants.
2727 This function tests whether the operands are indistinguishable;
2728 it does not test whether they are equal using C's == operation.
2729 The distinction is important for IEEE floating point, because
2730 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2731 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2733 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2734 even though it may hold multiple values during a function.
2735 This is because a GCC tree node guarantees that nothing else is
2736 executed between the evaluation of its "operands" (which may often
2737 be evaluated in arbitrary order). Hence if the operands themselves
2738 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2739 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2740 unset means assuming isochronic (or instantaneous) tree equivalence.
2741 Unless comparing arbitrary expression trees, such as from different
2742 statements, this flag can usually be left unset.
2744 If OEP_PURE_SAME is set, then pure functions with identical arguments
2745 are considered the same. It is used when the caller has other ways
2746 to ensure that global memory is unchanged in between.
2748 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2749 not values of expressions.
2751 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2752 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2754 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2755 any operand with side effect. This is unnecesarily conservative in the
2756 case we know that arg0 and arg1 are in disjoint code paths (such as in
2757 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2758 addresses with TREE_CONSTANT flag set so we know that &var == &var
2759 even if var is volatile. */
2762 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2764 /* When checking, verify at the outermost operand_equal_p call that
2765 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2766 hash value. */
2767 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2769 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2771 if (arg0 != arg1)
2773 inchash::hash hstate0 (0), hstate1 (0);
2774 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2775 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2776 hashval_t h0 = hstate0.end ();
2777 hashval_t h1 = hstate1.end ();
2778 gcc_assert (h0 == h1);
2780 return 1;
2782 else
2783 return 0;
2786 /* If either is ERROR_MARK, they aren't equal. */
2787 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2788 || TREE_TYPE (arg0) == error_mark_node
2789 || TREE_TYPE (arg1) == error_mark_node)
2790 return 0;
2792 /* Similar, if either does not have a type (like a released SSA name),
2793 they aren't equal. */
2794 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2795 return 0;
2797 /* We cannot consider pointers to different address space equal. */
2798 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2799 && POINTER_TYPE_P (TREE_TYPE (arg1))
2800 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2801 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2802 return 0;
2804 /* Check equality of integer constants before bailing out due to
2805 precision differences. */
2806 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2808 /* Address of INTEGER_CST is not defined; check that we did not forget
2809 to drop the OEP_ADDRESS_OF flags. */
2810 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2811 return tree_int_cst_equal (arg0, arg1);
2814 if (!(flags & OEP_ADDRESS_OF))
2816 /* If both types don't have the same signedness, then we can't consider
2817 them equal. We must check this before the STRIP_NOPS calls
2818 because they may change the signedness of the arguments. As pointers
2819 strictly don't have a signedness, require either two pointers or
2820 two non-pointers as well. */
2821 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2822 || POINTER_TYPE_P (TREE_TYPE (arg0))
2823 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2824 return 0;
2826 /* If both types don't have the same precision, then it is not safe
2827 to strip NOPs. */
2828 if (element_precision (TREE_TYPE (arg0))
2829 != element_precision (TREE_TYPE (arg1)))
2830 return 0;
2832 STRIP_NOPS (arg0);
2833 STRIP_NOPS (arg1);
2835 #if 0
2836 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2837 sanity check once the issue is solved. */
2838 else
2839 /* Addresses of conversions and SSA_NAMEs (and many other things)
2840 are not defined. Check that we did not forget to drop the
2841 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2842 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2843 && TREE_CODE (arg0) != SSA_NAME);
2844 #endif
2846 /* In case both args are comparisons but with different comparison
2847 code, try to swap the comparison operands of one arg to produce
2848 a match and compare that variant. */
2849 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2850 && COMPARISON_CLASS_P (arg0)
2851 && COMPARISON_CLASS_P (arg1))
2853 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2855 if (TREE_CODE (arg0) == swap_code)
2856 return operand_equal_p (TREE_OPERAND (arg0, 0),
2857 TREE_OPERAND (arg1, 1), flags)
2858 && operand_equal_p (TREE_OPERAND (arg0, 1),
2859 TREE_OPERAND (arg1, 0), flags);
2862 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2864 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2865 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2867 else if (flags & OEP_ADDRESS_OF)
2869 /* If we are interested in comparing addresses ignore
2870 MEM_REF wrappings of the base that can appear just for
2871 TBAA reasons. */
2872 if (TREE_CODE (arg0) == MEM_REF
2873 && DECL_P (arg1)
2874 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2875 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2876 && integer_zerop (TREE_OPERAND (arg0, 1)))
2877 return 1;
2878 else if (TREE_CODE (arg1) == MEM_REF
2879 && DECL_P (arg0)
2880 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2881 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2882 && integer_zerop (TREE_OPERAND (arg1, 1)))
2883 return 1;
2884 return 0;
2886 else
2887 return 0;
2890 /* When not checking adddresses, this is needed for conversions and for
2891 COMPONENT_REF. Might as well play it safe and always test this. */
2892 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2893 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2894 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2895 && !(flags & OEP_ADDRESS_OF)))
2896 return 0;
2898 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2899 We don't care about side effects in that case because the SAVE_EXPR
2900 takes care of that for us. In all other cases, two expressions are
2901 equal if they have no side effects. If we have two identical
2902 expressions with side effects that should be treated the same due
2903 to the only side effects being identical SAVE_EXPR's, that will
2904 be detected in the recursive calls below.
2905 If we are taking an invariant address of two identical objects
2906 they are necessarily equal as well. */
2907 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2908 && (TREE_CODE (arg0) == SAVE_EXPR
2909 || (flags & OEP_MATCH_SIDE_EFFECTS)
2910 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2911 return 1;
2913 /* Next handle constant cases, those for which we can return 1 even
2914 if ONLY_CONST is set. */
2915 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2916 switch (TREE_CODE (arg0))
2918 case INTEGER_CST:
2919 return tree_int_cst_equal (arg0, arg1);
2921 case FIXED_CST:
2922 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2923 TREE_FIXED_CST (arg1));
2925 case REAL_CST:
2926 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2927 return 1;
2930 if (!HONOR_SIGNED_ZEROS (arg0))
2932 /* If we do not distinguish between signed and unsigned zero,
2933 consider them equal. */
2934 if (real_zerop (arg0) && real_zerop (arg1))
2935 return 1;
2937 return 0;
2939 case VECTOR_CST:
2941 unsigned i;
2943 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2944 return 0;
2946 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2948 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2949 VECTOR_CST_ELT (arg1, i), flags))
2950 return 0;
2952 return 1;
2955 case COMPLEX_CST:
2956 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2957 flags)
2958 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2959 flags));
2961 case STRING_CST:
2962 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2963 && ! memcmp (TREE_STRING_POINTER (arg0),
2964 TREE_STRING_POINTER (arg1),
2965 TREE_STRING_LENGTH (arg0)));
2967 case ADDR_EXPR:
2968 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2969 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2970 flags | OEP_ADDRESS_OF
2971 | OEP_MATCH_SIDE_EFFECTS);
2972 case CONSTRUCTOR:
2973 /* In GIMPLE empty constructors are allowed in initializers of
2974 aggregates. */
2975 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2976 default:
2977 break;
2980 if (flags & OEP_ONLY_CONST)
2981 return 0;
2983 /* Define macros to test an operand from arg0 and arg1 for equality and a
2984 variant that allows null and views null as being different from any
2985 non-null value. In the latter case, if either is null, the both
2986 must be; otherwise, do the normal comparison. */
2987 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2988 TREE_OPERAND (arg1, N), flags)
2990 #define OP_SAME_WITH_NULL(N) \
2991 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2992 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2994 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2996 case tcc_unary:
2997 /* Two conversions are equal only if signedness and modes match. */
2998 switch (TREE_CODE (arg0))
3000 CASE_CONVERT:
3001 case FIX_TRUNC_EXPR:
3002 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3003 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3004 return 0;
3005 break;
3006 default:
3007 break;
3010 return OP_SAME (0);
3013 case tcc_comparison:
3014 case tcc_binary:
3015 if (OP_SAME (0) && OP_SAME (1))
3016 return 1;
3018 /* For commutative ops, allow the other order. */
3019 return (commutative_tree_code (TREE_CODE (arg0))
3020 && operand_equal_p (TREE_OPERAND (arg0, 0),
3021 TREE_OPERAND (arg1, 1), flags)
3022 && operand_equal_p (TREE_OPERAND (arg0, 1),
3023 TREE_OPERAND (arg1, 0), flags));
3025 case tcc_reference:
3026 /* If either of the pointer (or reference) expressions we are
3027 dereferencing contain a side effect, these cannot be equal,
3028 but their addresses can be. */
3029 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3030 && (TREE_SIDE_EFFECTS (arg0)
3031 || TREE_SIDE_EFFECTS (arg1)))
3032 return 0;
3034 switch (TREE_CODE (arg0))
3036 case INDIRECT_REF:
3037 if (!(flags & OEP_ADDRESS_OF)
3038 && (TYPE_ALIGN (TREE_TYPE (arg0))
3039 != TYPE_ALIGN (TREE_TYPE (arg1))))
3040 return 0;
3041 flags &= ~OEP_ADDRESS_OF;
3042 return OP_SAME (0);
3044 case IMAGPART_EXPR:
3045 /* Require the same offset. */
3046 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3047 TYPE_SIZE (TREE_TYPE (arg1)),
3048 flags & ~OEP_ADDRESS_OF))
3049 return 0;
3051 /* Fallthru. */
3052 case REALPART_EXPR:
3053 case VIEW_CONVERT_EXPR:
3054 return OP_SAME (0);
3056 case TARGET_MEM_REF:
3057 case MEM_REF:
3058 if (!(flags & OEP_ADDRESS_OF))
3060 /* Require equal access sizes */
3061 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3062 && (!TYPE_SIZE (TREE_TYPE (arg0))
3063 || !TYPE_SIZE (TREE_TYPE (arg1))
3064 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3065 TYPE_SIZE (TREE_TYPE (arg1)),
3066 flags)))
3067 return 0;
3068 /* Verify that access happens in similar types. */
3069 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3070 return 0;
3071 /* Verify that accesses are TBAA compatible. */
3072 if (!alias_ptr_types_compatible_p
3073 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3074 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3075 || (MR_DEPENDENCE_CLIQUE (arg0)
3076 != MR_DEPENDENCE_CLIQUE (arg1))
3077 || (MR_DEPENDENCE_BASE (arg0)
3078 != MR_DEPENDENCE_BASE (arg1)))
3079 return 0;
3080 /* Verify that alignment is compatible. */
3081 if (TYPE_ALIGN (TREE_TYPE (arg0))
3082 != TYPE_ALIGN (TREE_TYPE (arg1)))
3083 return 0;
3085 flags &= ~OEP_ADDRESS_OF;
3086 return (OP_SAME (0) && OP_SAME (1)
3087 /* TARGET_MEM_REF require equal extra operands. */
3088 && (TREE_CODE (arg0) != TARGET_MEM_REF
3089 || (OP_SAME_WITH_NULL (2)
3090 && OP_SAME_WITH_NULL (3)
3091 && OP_SAME_WITH_NULL (4))));
3093 case ARRAY_REF:
3094 case ARRAY_RANGE_REF:
3095 if (!OP_SAME (0))
3096 return 0;
3097 flags &= ~OEP_ADDRESS_OF;
3098 /* Compare the array index by value if it is constant first as we
3099 may have different types but same value here. */
3100 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3101 TREE_OPERAND (arg1, 1))
3102 || OP_SAME (1))
3103 && OP_SAME_WITH_NULL (2)
3104 && OP_SAME_WITH_NULL (3)
3105 /* Compare low bound and element size as with OEP_ADDRESS_OF
3106 we have to account for the offset of the ref. */
3107 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3108 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3109 || (operand_equal_p (array_ref_low_bound
3110 (CONST_CAST_TREE (arg0)),
3111 array_ref_low_bound
3112 (CONST_CAST_TREE (arg1)), flags)
3113 && operand_equal_p (array_ref_element_size
3114 (CONST_CAST_TREE (arg0)),
3115 array_ref_element_size
3116 (CONST_CAST_TREE (arg1)),
3117 flags))));
3119 case COMPONENT_REF:
3120 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3121 may be NULL when we're called to compare MEM_EXPRs. */
3122 if (!OP_SAME_WITH_NULL (0)
3123 || !OP_SAME (1))
3124 return 0;
3125 flags &= ~OEP_ADDRESS_OF;
3126 return OP_SAME_WITH_NULL (2);
3128 case BIT_FIELD_REF:
3129 if (!OP_SAME (0))
3130 return 0;
3131 flags &= ~OEP_ADDRESS_OF;
3132 return OP_SAME (1) && OP_SAME (2);
3134 default:
3135 return 0;
3138 case tcc_expression:
3139 switch (TREE_CODE (arg0))
3141 case ADDR_EXPR:
3142 /* Be sure we pass right ADDRESS_OF flag. */
3143 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3144 return operand_equal_p (TREE_OPERAND (arg0, 0),
3145 TREE_OPERAND (arg1, 0),
3146 flags | OEP_ADDRESS_OF);
3148 case TRUTH_NOT_EXPR:
3149 return OP_SAME (0);
3151 case TRUTH_ANDIF_EXPR:
3152 case TRUTH_ORIF_EXPR:
3153 return OP_SAME (0) && OP_SAME (1);
3155 case FMA_EXPR:
3156 case WIDEN_MULT_PLUS_EXPR:
3157 case WIDEN_MULT_MINUS_EXPR:
3158 if (!OP_SAME (2))
3159 return 0;
3160 /* The multiplcation operands are commutative. */
3161 /* FALLTHRU */
3163 case TRUTH_AND_EXPR:
3164 case TRUTH_OR_EXPR:
3165 case TRUTH_XOR_EXPR:
3166 if (OP_SAME (0) && OP_SAME (1))
3167 return 1;
3169 /* Otherwise take into account this is a commutative operation. */
3170 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3171 TREE_OPERAND (arg1, 1), flags)
3172 && operand_equal_p (TREE_OPERAND (arg0, 1),
3173 TREE_OPERAND (arg1, 0), flags));
3175 case COND_EXPR:
3176 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3177 return 0;
3178 flags &= ~OEP_ADDRESS_OF;
3179 return OP_SAME (0);
3181 case BIT_INSERT_EXPR:
3182 /* BIT_INSERT_EXPR has an implict operand as the type precision
3183 of op1. Need to check to make sure they are the same. */
3184 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3185 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3186 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3187 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3188 return false;
3189 /* FALLTHRU */
3191 case VEC_COND_EXPR:
3192 case DOT_PROD_EXPR:
3193 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3195 case MODIFY_EXPR:
3196 case INIT_EXPR:
3197 case COMPOUND_EXPR:
3198 case PREDECREMENT_EXPR:
3199 case PREINCREMENT_EXPR:
3200 case POSTDECREMENT_EXPR:
3201 case POSTINCREMENT_EXPR:
3202 if (flags & OEP_LEXICOGRAPHIC)
3203 return OP_SAME (0) && OP_SAME (1);
3204 return 0;
3206 case CLEANUP_POINT_EXPR:
3207 case EXPR_STMT:
3208 if (flags & OEP_LEXICOGRAPHIC)
3209 return OP_SAME (0);
3210 return 0;
3212 default:
3213 return 0;
3216 case tcc_vl_exp:
3217 switch (TREE_CODE (arg0))
3219 case CALL_EXPR:
3220 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3221 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3222 /* If not both CALL_EXPRs are either internal or normal function
3223 functions, then they are not equal. */
3224 return 0;
3225 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3227 /* If the CALL_EXPRs call different internal functions, then they
3228 are not equal. */
3229 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3230 return 0;
3232 else
3234 /* If the CALL_EXPRs call different functions, then they are not
3235 equal. */
3236 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3237 flags))
3238 return 0;
3241 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3243 unsigned int cef = call_expr_flags (arg0);
3244 if (flags & OEP_PURE_SAME)
3245 cef &= ECF_CONST | ECF_PURE;
3246 else
3247 cef &= ECF_CONST;
3248 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3249 return 0;
3252 /* Now see if all the arguments are the same. */
3254 const_call_expr_arg_iterator iter0, iter1;
3255 const_tree a0, a1;
3256 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3257 a1 = first_const_call_expr_arg (arg1, &iter1);
3258 a0 && a1;
3259 a0 = next_const_call_expr_arg (&iter0),
3260 a1 = next_const_call_expr_arg (&iter1))
3261 if (! operand_equal_p (a0, a1, flags))
3262 return 0;
3264 /* If we get here and both argument lists are exhausted
3265 then the CALL_EXPRs are equal. */
3266 return ! (a0 || a1);
3268 default:
3269 return 0;
3272 case tcc_declaration:
3273 /* Consider __builtin_sqrt equal to sqrt. */
3274 return (TREE_CODE (arg0) == FUNCTION_DECL
3275 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3276 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3277 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3279 case tcc_exceptional:
3280 if (TREE_CODE (arg0) == CONSTRUCTOR)
3282 /* In GIMPLE constructors are used only to build vectors from
3283 elements. Individual elements in the constructor must be
3284 indexed in increasing order and form an initial sequence.
3286 We make no effort to compare constructors in generic.
3287 (see sem_variable::equals in ipa-icf which can do so for
3288 constants). */
3289 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3290 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3291 return 0;
3293 /* Be sure that vectors constructed have the same representation.
3294 We only tested element precision and modes to match.
3295 Vectors may be BLKmode and thus also check that the number of
3296 parts match. */
3297 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3298 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3299 return 0;
3301 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3302 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3303 unsigned int len = vec_safe_length (v0);
3305 if (len != vec_safe_length (v1))
3306 return 0;
3308 for (unsigned int i = 0; i < len; i++)
3310 constructor_elt *c0 = &(*v0)[i];
3311 constructor_elt *c1 = &(*v1)[i];
3313 if (!operand_equal_p (c0->value, c1->value, flags)
3314 /* In GIMPLE the indexes can be either NULL or matching i.
3315 Double check this so we won't get false
3316 positives for GENERIC. */
3317 || (c0->index
3318 && (TREE_CODE (c0->index) != INTEGER_CST
3319 || !compare_tree_int (c0->index, i)))
3320 || (c1->index
3321 && (TREE_CODE (c1->index) != INTEGER_CST
3322 || !compare_tree_int (c1->index, i))))
3323 return 0;
3325 return 1;
3327 else if (TREE_CODE (arg0) == STATEMENT_LIST
3328 && (flags & OEP_LEXICOGRAPHIC))
3330 /* Compare the STATEMENT_LISTs. */
3331 tree_stmt_iterator tsi1, tsi2;
3332 tree body1 = CONST_CAST_TREE (arg0);
3333 tree body2 = CONST_CAST_TREE (arg1);
3334 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3335 tsi_next (&tsi1), tsi_next (&tsi2))
3337 /* The lists don't have the same number of statements. */
3338 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3339 return 0;
3340 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3341 return 1;
3342 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3343 OEP_LEXICOGRAPHIC))
3344 return 0;
3347 return 0;
3349 case tcc_statement:
3350 switch (TREE_CODE (arg0))
3352 case RETURN_EXPR:
3353 if (flags & OEP_LEXICOGRAPHIC)
3354 return OP_SAME_WITH_NULL (0);
3355 return 0;
3356 default:
3357 return 0;
3360 default:
3361 return 0;
3364 #undef OP_SAME
3365 #undef OP_SAME_WITH_NULL
3368 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3369 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3371 When in doubt, return 0. */
3373 static int
3374 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3376 int unsignedp1, unsignedpo;
3377 tree primarg0, primarg1, primother;
3378 unsigned int correct_width;
3380 if (operand_equal_p (arg0, arg1, 0))
3381 return 1;
3383 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3384 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3385 return 0;
3387 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3388 and see if the inner values are the same. This removes any
3389 signedness comparison, which doesn't matter here. */
3390 primarg0 = arg0, primarg1 = arg1;
3391 STRIP_NOPS (primarg0);
3392 STRIP_NOPS (primarg1);
3393 if (operand_equal_p (primarg0, primarg1, 0))
3394 return 1;
3396 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3397 actual comparison operand, ARG0.
3399 First throw away any conversions to wider types
3400 already present in the operands. */
3402 primarg1 = get_narrower (arg1, &unsignedp1);
3403 primother = get_narrower (other, &unsignedpo);
3405 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3406 if (unsignedp1 == unsignedpo
3407 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3408 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3410 tree type = TREE_TYPE (arg0);
3412 /* Make sure shorter operand is extended the right way
3413 to match the longer operand. */
3414 primarg1 = fold_convert (signed_or_unsigned_type_for
3415 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3417 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3418 return 1;
3421 return 0;
3424 /* See if ARG is an expression that is either a comparison or is performing
3425 arithmetic on comparisons. The comparisons must only be comparing
3426 two different values, which will be stored in *CVAL1 and *CVAL2; if
3427 they are nonzero it means that some operands have already been found.
3428 No variables may be used anywhere else in the expression except in the
3429 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3430 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3432 If this is true, return 1. Otherwise, return zero. */
3434 static int
3435 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3437 enum tree_code code = TREE_CODE (arg);
3438 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3440 /* We can handle some of the tcc_expression cases here. */
3441 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3442 tclass = tcc_unary;
3443 else if (tclass == tcc_expression
3444 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3445 || code == COMPOUND_EXPR))
3446 tclass = tcc_binary;
3448 else if (tclass == tcc_expression && code == SAVE_EXPR
3449 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3451 /* If we've already found a CVAL1 or CVAL2, this expression is
3452 two complex to handle. */
3453 if (*cval1 || *cval2)
3454 return 0;
3456 tclass = tcc_unary;
3457 *save_p = 1;
3460 switch (tclass)
3462 case tcc_unary:
3463 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3465 case tcc_binary:
3466 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3467 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3468 cval1, cval2, save_p));
3470 case tcc_constant:
3471 return 1;
3473 case tcc_expression:
3474 if (code == COND_EXPR)
3475 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3476 cval1, cval2, save_p)
3477 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3478 cval1, cval2, save_p)
3479 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3480 cval1, cval2, save_p));
3481 return 0;
3483 case tcc_comparison:
3484 /* First see if we can handle the first operand, then the second. For
3485 the second operand, we know *CVAL1 can't be zero. It must be that
3486 one side of the comparison is each of the values; test for the
3487 case where this isn't true by failing if the two operands
3488 are the same. */
3490 if (operand_equal_p (TREE_OPERAND (arg, 0),
3491 TREE_OPERAND (arg, 1), 0))
3492 return 0;
3494 if (*cval1 == 0)
3495 *cval1 = TREE_OPERAND (arg, 0);
3496 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3498 else if (*cval2 == 0)
3499 *cval2 = TREE_OPERAND (arg, 0);
3500 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3502 else
3503 return 0;
3505 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3507 else if (*cval2 == 0)
3508 *cval2 = TREE_OPERAND (arg, 1);
3509 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3511 else
3512 return 0;
3514 return 1;
3516 default:
3517 return 0;
3521 /* ARG is a tree that is known to contain just arithmetic operations and
3522 comparisons. Evaluate the operations in the tree substituting NEW0 for
3523 any occurrence of OLD0 as an operand of a comparison and likewise for
3524 NEW1 and OLD1. */
3526 static tree
3527 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3528 tree old1, tree new1)
3530 tree type = TREE_TYPE (arg);
3531 enum tree_code code = TREE_CODE (arg);
3532 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3534 /* We can handle some of the tcc_expression cases here. */
3535 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3536 tclass = tcc_unary;
3537 else if (tclass == tcc_expression
3538 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3539 tclass = tcc_binary;
3541 switch (tclass)
3543 case tcc_unary:
3544 return fold_build1_loc (loc, code, type,
3545 eval_subst (loc, TREE_OPERAND (arg, 0),
3546 old0, new0, old1, new1));
3548 case tcc_binary:
3549 return fold_build2_loc (loc, code, type,
3550 eval_subst (loc, TREE_OPERAND (arg, 0),
3551 old0, new0, old1, new1),
3552 eval_subst (loc, TREE_OPERAND (arg, 1),
3553 old0, new0, old1, new1));
3555 case tcc_expression:
3556 switch (code)
3558 case SAVE_EXPR:
3559 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3560 old1, new1);
3562 case COMPOUND_EXPR:
3563 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3564 old1, new1);
3566 case COND_EXPR:
3567 return fold_build3_loc (loc, code, type,
3568 eval_subst (loc, TREE_OPERAND (arg, 0),
3569 old0, new0, old1, new1),
3570 eval_subst (loc, TREE_OPERAND (arg, 1),
3571 old0, new0, old1, new1),
3572 eval_subst (loc, TREE_OPERAND (arg, 2),
3573 old0, new0, old1, new1));
3574 default:
3575 break;
3577 /* Fall through - ??? */
3579 case tcc_comparison:
3581 tree arg0 = TREE_OPERAND (arg, 0);
3582 tree arg1 = TREE_OPERAND (arg, 1);
3584 /* We need to check both for exact equality and tree equality. The
3585 former will be true if the operand has a side-effect. In that
3586 case, we know the operand occurred exactly once. */
3588 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3589 arg0 = new0;
3590 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3591 arg0 = new1;
3593 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3594 arg1 = new0;
3595 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3596 arg1 = new1;
3598 return fold_build2_loc (loc, code, type, arg0, arg1);
3601 default:
3602 return arg;
3606 /* Return a tree for the case when the result of an expression is RESULT
3607 converted to TYPE and OMITTED was previously an operand of the expression
3608 but is now not needed (e.g., we folded OMITTED * 0).
3610 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3611 the conversion of RESULT to TYPE. */
3613 tree
3614 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3616 tree t = fold_convert_loc (loc, type, result);
3618 /* If the resulting operand is an empty statement, just return the omitted
3619 statement casted to void. */
3620 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3621 return build1_loc (loc, NOP_EXPR, void_type_node,
3622 fold_ignored_result (omitted));
3624 if (TREE_SIDE_EFFECTS (omitted))
3625 return build2_loc (loc, COMPOUND_EXPR, type,
3626 fold_ignored_result (omitted), t);
3628 return non_lvalue_loc (loc, t);
3631 /* Return a tree for the case when the result of an expression is RESULT
3632 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3633 of the expression but are now not needed.
3635 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3636 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3637 evaluated before OMITTED2. Otherwise, if neither has side effects,
3638 just do the conversion of RESULT to TYPE. */
3640 tree
3641 omit_two_operands_loc (location_t loc, tree type, tree result,
3642 tree omitted1, tree omitted2)
3644 tree t = fold_convert_loc (loc, type, result);
3646 if (TREE_SIDE_EFFECTS (omitted2))
3647 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3648 if (TREE_SIDE_EFFECTS (omitted1))
3649 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3651 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3655 /* Return a simplified tree node for the truth-negation of ARG. This
3656 never alters ARG itself. We assume that ARG is an operation that
3657 returns a truth value (0 or 1).
3659 FIXME: one would think we would fold the result, but it causes
3660 problems with the dominator optimizer. */
3662 static tree
3663 fold_truth_not_expr (location_t loc, tree arg)
3665 tree type = TREE_TYPE (arg);
3666 enum tree_code code = TREE_CODE (arg);
3667 location_t loc1, loc2;
3669 /* If this is a comparison, we can simply invert it, except for
3670 floating-point non-equality comparisons, in which case we just
3671 enclose a TRUTH_NOT_EXPR around what we have. */
3673 if (TREE_CODE_CLASS (code) == tcc_comparison)
3675 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3676 if (FLOAT_TYPE_P (op_type)
3677 && flag_trapping_math
3678 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3679 && code != NE_EXPR && code != EQ_EXPR)
3680 return NULL_TREE;
3682 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3683 if (code == ERROR_MARK)
3684 return NULL_TREE;
3686 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3687 TREE_OPERAND (arg, 1));
3688 if (TREE_NO_WARNING (arg))
3689 TREE_NO_WARNING (ret) = 1;
3690 return ret;
3693 switch (code)
3695 case INTEGER_CST:
3696 return constant_boolean_node (integer_zerop (arg), type);
3698 case TRUTH_AND_EXPR:
3699 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3700 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3701 return build2_loc (loc, TRUTH_OR_EXPR, type,
3702 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3703 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3705 case TRUTH_OR_EXPR:
3706 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3707 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3708 return build2_loc (loc, TRUTH_AND_EXPR, type,
3709 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3710 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3712 case TRUTH_XOR_EXPR:
3713 /* Here we can invert either operand. We invert the first operand
3714 unless the second operand is a TRUTH_NOT_EXPR in which case our
3715 result is the XOR of the first operand with the inside of the
3716 negation of the second operand. */
3718 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3719 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3720 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3721 else
3722 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3723 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3724 TREE_OPERAND (arg, 1));
3726 case TRUTH_ANDIF_EXPR:
3727 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3728 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3729 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3730 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3731 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3733 case TRUTH_ORIF_EXPR:
3734 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3735 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3736 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3737 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3738 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3740 case TRUTH_NOT_EXPR:
3741 return TREE_OPERAND (arg, 0);
3743 case COND_EXPR:
3745 tree arg1 = TREE_OPERAND (arg, 1);
3746 tree arg2 = TREE_OPERAND (arg, 2);
3748 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3749 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3751 /* A COND_EXPR may have a throw as one operand, which
3752 then has void type. Just leave void operands
3753 as they are. */
3754 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3755 VOID_TYPE_P (TREE_TYPE (arg1))
3756 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3757 VOID_TYPE_P (TREE_TYPE (arg2))
3758 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3761 case COMPOUND_EXPR:
3762 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3763 return build2_loc (loc, COMPOUND_EXPR, type,
3764 TREE_OPERAND (arg, 0),
3765 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3767 case NON_LVALUE_EXPR:
3768 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3769 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3771 CASE_CONVERT:
3772 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3773 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3775 /* fall through */
3777 case FLOAT_EXPR:
3778 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3779 return build1_loc (loc, TREE_CODE (arg), type,
3780 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3782 case BIT_AND_EXPR:
3783 if (!integer_onep (TREE_OPERAND (arg, 1)))
3784 return NULL_TREE;
3785 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3787 case SAVE_EXPR:
3788 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3790 case CLEANUP_POINT_EXPR:
3791 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3792 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3793 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3795 default:
3796 return NULL_TREE;
3800 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3801 assume that ARG is an operation that returns a truth value (0 or 1
3802 for scalars, 0 or -1 for vectors). Return the folded expression if
3803 folding is successful. Otherwise, return NULL_TREE. */
3805 static tree
3806 fold_invert_truthvalue (location_t loc, tree arg)
3808 tree type = TREE_TYPE (arg);
3809 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3810 ? BIT_NOT_EXPR
3811 : TRUTH_NOT_EXPR,
3812 type, arg);
3815 /* Return a simplified tree node for the truth-negation of ARG. This
3816 never alters ARG itself. We assume that ARG is an operation that
3817 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3819 tree
3820 invert_truthvalue_loc (location_t loc, tree arg)
3822 if (TREE_CODE (arg) == ERROR_MARK)
3823 return arg;
3825 tree type = TREE_TYPE (arg);
3826 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3827 ? BIT_NOT_EXPR
3828 : TRUTH_NOT_EXPR,
3829 type, arg);
3832 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3833 with code CODE. This optimization is unsafe. */
3834 static tree
3835 distribute_real_division (location_t loc, enum tree_code code, tree type,
3836 tree arg0, tree arg1)
3838 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3839 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3841 /* (A / C) +- (B / C) -> (A +- B) / C. */
3842 if (mul0 == mul1
3843 && operand_equal_p (TREE_OPERAND (arg0, 1),
3844 TREE_OPERAND (arg1, 1), 0))
3845 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3846 fold_build2_loc (loc, code, type,
3847 TREE_OPERAND (arg0, 0),
3848 TREE_OPERAND (arg1, 0)),
3849 TREE_OPERAND (arg0, 1));
3851 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3852 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3853 TREE_OPERAND (arg1, 0), 0)
3854 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3855 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3857 REAL_VALUE_TYPE r0, r1;
3858 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3859 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3860 if (!mul0)
3861 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3862 if (!mul1)
3863 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3864 real_arithmetic (&r0, code, &r0, &r1);
3865 return fold_build2_loc (loc, MULT_EXPR, type,
3866 TREE_OPERAND (arg0, 0),
3867 build_real (type, r0));
3870 return NULL_TREE;
3873 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3874 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3875 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3876 is the original memory reference used to preserve the alias set of
3877 the access. */
3879 static tree
3880 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3881 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3882 int unsignedp, int reversep)
3884 tree result, bftype;
3886 /* Attempt not to lose the access path if possible. */
3887 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3889 tree ninner = TREE_OPERAND (orig_inner, 0);
3890 machine_mode nmode;
3891 HOST_WIDE_INT nbitsize, nbitpos;
3892 tree noffset;
3893 int nunsignedp, nreversep, nvolatilep = 0;
3894 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3895 &noffset, &nmode, &nunsignedp,
3896 &nreversep, &nvolatilep);
3897 if (base == inner
3898 && noffset == NULL_TREE
3899 && nbitsize >= bitsize
3900 && nbitpos <= bitpos
3901 && bitpos + bitsize <= nbitpos + nbitsize
3902 && !reversep
3903 && !nreversep
3904 && !nvolatilep)
3906 inner = ninner;
3907 bitpos -= nbitpos;
3911 alias_set_type iset = get_alias_set (orig_inner);
3912 if (iset == 0 && get_alias_set (inner) != iset)
3913 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3914 build_fold_addr_expr (inner),
3915 build_int_cst (ptr_type_node, 0));
3917 if (bitpos == 0 && !reversep)
3919 tree size = TYPE_SIZE (TREE_TYPE (inner));
3920 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3921 || POINTER_TYPE_P (TREE_TYPE (inner)))
3922 && tree_fits_shwi_p (size)
3923 && tree_to_shwi (size) == bitsize)
3924 return fold_convert_loc (loc, type, inner);
3927 bftype = type;
3928 if (TYPE_PRECISION (bftype) != bitsize
3929 || TYPE_UNSIGNED (bftype) == !unsignedp)
3930 bftype = build_nonstandard_integer_type (bitsize, 0);
3932 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3933 bitsize_int (bitsize), bitsize_int (bitpos));
3934 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3936 if (bftype != type)
3937 result = fold_convert_loc (loc, type, result);
3939 return result;
3942 /* Optimize a bit-field compare.
3944 There are two cases: First is a compare against a constant and the
3945 second is a comparison of two items where the fields are at the same
3946 bit position relative to the start of a chunk (byte, halfword, word)
3947 large enough to contain it. In these cases we can avoid the shift
3948 implicit in bitfield extractions.
3950 For constants, we emit a compare of the shifted constant with the
3951 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3952 compared. For two fields at the same position, we do the ANDs with the
3953 similar mask and compare the result of the ANDs.
3955 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3956 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3957 are the left and right operands of the comparison, respectively.
3959 If the optimization described above can be done, we return the resulting
3960 tree. Otherwise we return zero. */
3962 static tree
3963 optimize_bit_field_compare (location_t loc, enum tree_code code,
3964 tree compare_type, tree lhs, tree rhs)
3966 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3967 tree type = TREE_TYPE (lhs);
3968 tree unsigned_type;
3969 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3970 machine_mode lmode, rmode, nmode;
3971 int lunsignedp, runsignedp;
3972 int lreversep, rreversep;
3973 int lvolatilep = 0, rvolatilep = 0;
3974 tree linner, rinner = NULL_TREE;
3975 tree mask;
3976 tree offset;
3978 /* Get all the information about the extractions being done. If the bit size
3979 if the same as the size of the underlying object, we aren't doing an
3980 extraction at all and so can do nothing. We also don't want to
3981 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3982 then will no longer be able to replace it. */
3983 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3984 &lunsignedp, &lreversep, &lvolatilep);
3985 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3986 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3987 return 0;
3989 if (const_p)
3990 rreversep = lreversep;
3991 else
3993 /* If this is not a constant, we can only do something if bit positions,
3994 sizes, signedness and storage order are the same. */
3995 rinner
3996 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3997 &runsignedp, &rreversep, &rvolatilep);
3999 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4000 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
4001 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
4002 return 0;
4005 /* Honor the C++ memory model and mimic what RTL expansion does. */
4006 unsigned HOST_WIDE_INT bitstart = 0;
4007 unsigned HOST_WIDE_INT bitend = 0;
4008 if (TREE_CODE (lhs) == COMPONENT_REF)
4010 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
4011 if (offset != NULL_TREE)
4012 return 0;
4015 /* See if we can find a mode to refer to this field. We should be able to,
4016 but fail if we can't. */
4017 nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4018 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4019 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4020 TYPE_ALIGN (TREE_TYPE (rinner))),
4021 word_mode, false);
4022 if (nmode == VOIDmode)
4023 return 0;
4025 /* Set signed and unsigned types of the precision of this mode for the
4026 shifts below. */
4027 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4029 /* Compute the bit position and size for the new reference and our offset
4030 within it. If the new reference is the same size as the original, we
4031 won't optimize anything, so return zero. */
4032 nbitsize = GET_MODE_BITSIZE (nmode);
4033 nbitpos = lbitpos & ~ (nbitsize - 1);
4034 lbitpos -= nbitpos;
4035 if (nbitsize == lbitsize)
4036 return 0;
4038 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4039 lbitpos = nbitsize - lbitsize - lbitpos;
4041 /* Make the mask to be used against the extracted field. */
4042 mask = build_int_cst_type (unsigned_type, -1);
4043 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4044 mask = const_binop (RSHIFT_EXPR, mask,
4045 size_int (nbitsize - lbitsize - lbitpos));
4047 if (! const_p)
4048 /* If not comparing with constant, just rework the comparison
4049 and return. */
4050 return fold_build2_loc (loc, code, compare_type,
4051 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4052 make_bit_field_ref (loc, linner, lhs,
4053 unsigned_type,
4054 nbitsize, nbitpos,
4055 1, lreversep),
4056 mask),
4057 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4058 make_bit_field_ref (loc, rinner, rhs,
4059 unsigned_type,
4060 nbitsize, nbitpos,
4061 1, rreversep),
4062 mask));
4064 /* Otherwise, we are handling the constant case. See if the constant is too
4065 big for the field. Warn and return a tree for 0 (false) if so. We do
4066 this not only for its own sake, but to avoid having to test for this
4067 error case below. If we didn't, we might generate wrong code.
4069 For unsigned fields, the constant shifted right by the field length should
4070 be all zero. For signed fields, the high-order bits should agree with
4071 the sign bit. */
4073 if (lunsignedp)
4075 if (wi::lrshift (rhs, lbitsize) != 0)
4077 warning (0, "comparison is always %d due to width of bit-field",
4078 code == NE_EXPR);
4079 return constant_boolean_node (code == NE_EXPR, compare_type);
4082 else
4084 wide_int tem = wi::arshift (rhs, lbitsize - 1);
4085 if (tem != 0 && tem != -1)
4087 warning (0, "comparison is always %d due to width of bit-field",
4088 code == NE_EXPR);
4089 return constant_boolean_node (code == NE_EXPR, compare_type);
4093 /* Single-bit compares should always be against zero. */
4094 if (lbitsize == 1 && ! integer_zerop (rhs))
4096 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4097 rhs = build_int_cst (type, 0);
4100 /* Make a new bitfield reference, shift the constant over the
4101 appropriate number of bits and mask it with the computed mask
4102 (in case this was a signed field). If we changed it, make a new one. */
4103 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4104 nbitsize, nbitpos, 1, lreversep);
4106 rhs = const_binop (BIT_AND_EXPR,
4107 const_binop (LSHIFT_EXPR,
4108 fold_convert_loc (loc, unsigned_type, rhs),
4109 size_int (lbitpos)),
4110 mask);
4112 lhs = build2_loc (loc, code, compare_type,
4113 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4114 return lhs;
4117 /* Subroutine for fold_truth_andor_1: decode a field reference.
4119 If EXP is a comparison reference, we return the innermost reference.
4121 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4122 set to the starting bit number.
4124 If the innermost field can be completely contained in a mode-sized
4125 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4127 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4128 otherwise it is not changed.
4130 *PUNSIGNEDP is set to the signedness of the field.
4132 *PREVERSEP is set to the storage order of the field.
4134 *PMASK is set to the mask used. This is either contained in a
4135 BIT_AND_EXPR or derived from the width of the field.
4137 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4139 Return 0 if this is not a component reference or is one that we can't
4140 do anything with. */
4142 static tree
4143 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4144 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4145 int *punsignedp, int *preversep, int *pvolatilep,
4146 tree *pmask, tree *pand_mask)
4148 tree exp = *exp_;
4149 tree outer_type = 0;
4150 tree and_mask = 0;
4151 tree mask, inner, offset;
4152 tree unsigned_type;
4153 unsigned int precision;
4155 /* All the optimizations using this function assume integer fields.
4156 There are problems with FP fields since the type_for_size call
4157 below can fail for, e.g., XFmode. */
4158 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4159 return 0;
4161 /* We are interested in the bare arrangement of bits, so strip everything
4162 that doesn't affect the machine mode. However, record the type of the
4163 outermost expression if it may matter below. */
4164 if (CONVERT_EXPR_P (exp)
4165 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4166 outer_type = TREE_TYPE (exp);
4167 STRIP_NOPS (exp);
4169 if (TREE_CODE (exp) == BIT_AND_EXPR)
4171 and_mask = TREE_OPERAND (exp, 1);
4172 exp = TREE_OPERAND (exp, 0);
4173 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4174 if (TREE_CODE (and_mask) != INTEGER_CST)
4175 return 0;
4178 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4179 punsignedp, preversep, pvolatilep);
4180 if ((inner == exp && and_mask == 0)
4181 || *pbitsize < 0 || offset != 0
4182 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4183 /* Reject out-of-bound accesses (PR79731). */
4184 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4185 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4186 *pbitpos + *pbitsize) < 0))
4187 return 0;
4189 *exp_ = exp;
4191 /* If the number of bits in the reference is the same as the bitsize of
4192 the outer type, then the outer type gives the signedness. Otherwise
4193 (in case of a small bitfield) the signedness is unchanged. */
4194 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4195 *punsignedp = TYPE_UNSIGNED (outer_type);
4197 /* Compute the mask to access the bitfield. */
4198 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4199 precision = TYPE_PRECISION (unsigned_type);
4201 mask = build_int_cst_type (unsigned_type, -1);
4203 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4204 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4206 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4207 if (and_mask != 0)
4208 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4209 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4211 *pmask = mask;
4212 *pand_mask = and_mask;
4213 return inner;
4216 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4217 bit positions and MASK is SIGNED. */
4219 static int
4220 all_ones_mask_p (const_tree mask, unsigned int size)
4222 tree type = TREE_TYPE (mask);
4223 unsigned int precision = TYPE_PRECISION (type);
4225 /* If this function returns true when the type of the mask is
4226 UNSIGNED, then there will be errors. In particular see
4227 gcc.c-torture/execute/990326-1.c. There does not appear to be
4228 any documentation paper trail as to why this is so. But the pre
4229 wide-int worked with that restriction and it has been preserved
4230 here. */
4231 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4232 return false;
4234 return wi::mask (size, false, precision) == mask;
4237 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4238 represents the sign bit of EXP's type. If EXP represents a sign
4239 or zero extension, also test VAL against the unextended type.
4240 The return value is the (sub)expression whose sign bit is VAL,
4241 or NULL_TREE otherwise. */
4243 tree
4244 sign_bit_p (tree exp, const_tree val)
4246 int width;
4247 tree t;
4249 /* Tree EXP must have an integral type. */
4250 t = TREE_TYPE (exp);
4251 if (! INTEGRAL_TYPE_P (t))
4252 return NULL_TREE;
4254 /* Tree VAL must be an integer constant. */
4255 if (TREE_CODE (val) != INTEGER_CST
4256 || TREE_OVERFLOW (val))
4257 return NULL_TREE;
4259 width = TYPE_PRECISION (t);
4260 if (wi::only_sign_bit_p (val, width))
4261 return exp;
4263 /* Handle extension from a narrower type. */
4264 if (TREE_CODE (exp) == NOP_EXPR
4265 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4266 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4268 return NULL_TREE;
4271 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4272 to be evaluated unconditionally. */
4274 static int
4275 simple_operand_p (const_tree exp)
4277 /* Strip any conversions that don't change the machine mode. */
4278 STRIP_NOPS (exp);
4280 return (CONSTANT_CLASS_P (exp)
4281 || TREE_CODE (exp) == SSA_NAME
4282 || (DECL_P (exp)
4283 && ! TREE_ADDRESSABLE (exp)
4284 && ! TREE_THIS_VOLATILE (exp)
4285 && ! DECL_NONLOCAL (exp)
4286 /* Don't regard global variables as simple. They may be
4287 allocated in ways unknown to the compiler (shared memory,
4288 #pragma weak, etc). */
4289 && ! TREE_PUBLIC (exp)
4290 && ! DECL_EXTERNAL (exp)
4291 /* Weakrefs are not safe to be read, since they can be NULL.
4292 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4293 have DECL_WEAK flag set. */
4294 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4295 /* Loading a static variable is unduly expensive, but global
4296 registers aren't expensive. */
4297 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4300 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4301 to be evaluated unconditionally.
4302 I addition to simple_operand_p, we assume that comparisons, conversions,
4303 and logic-not operations are simple, if their operands are simple, too. */
4305 static bool
4306 simple_operand_p_2 (tree exp)
4308 enum tree_code code;
4310 if (TREE_SIDE_EFFECTS (exp)
4311 || tree_could_trap_p (exp))
4312 return false;
4314 while (CONVERT_EXPR_P (exp))
4315 exp = TREE_OPERAND (exp, 0);
4317 code = TREE_CODE (exp);
4319 if (TREE_CODE_CLASS (code) == tcc_comparison)
4320 return (simple_operand_p (TREE_OPERAND (exp, 0))
4321 && simple_operand_p (TREE_OPERAND (exp, 1)));
4323 if (code == TRUTH_NOT_EXPR)
4324 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4326 return simple_operand_p (exp);
4330 /* The following functions are subroutines to fold_range_test and allow it to
4331 try to change a logical combination of comparisons into a range test.
4333 For example, both
4334 X == 2 || X == 3 || X == 4 || X == 5
4336 X >= 2 && X <= 5
4337 are converted to
4338 (unsigned) (X - 2) <= 3
4340 We describe each set of comparisons as being either inside or outside
4341 a range, using a variable named like IN_P, and then describe the
4342 range with a lower and upper bound. If one of the bounds is omitted,
4343 it represents either the highest or lowest value of the type.
4345 In the comments below, we represent a range by two numbers in brackets
4346 preceded by a "+" to designate being inside that range, or a "-" to
4347 designate being outside that range, so the condition can be inverted by
4348 flipping the prefix. An omitted bound is represented by a "-". For
4349 example, "- [-, 10]" means being outside the range starting at the lowest
4350 possible value and ending at 10, in other words, being greater than 10.
4351 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4352 always false.
4354 We set up things so that the missing bounds are handled in a consistent
4355 manner so neither a missing bound nor "true" and "false" need to be
4356 handled using a special case. */
4358 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4359 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4360 and UPPER1_P are nonzero if the respective argument is an upper bound
4361 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4362 must be specified for a comparison. ARG1 will be converted to ARG0's
4363 type if both are specified. */
4365 static tree
4366 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4367 tree arg1, int upper1_p)
4369 tree tem;
4370 int result;
4371 int sgn0, sgn1;
4373 /* If neither arg represents infinity, do the normal operation.
4374 Else, if not a comparison, return infinity. Else handle the special
4375 comparison rules. Note that most of the cases below won't occur, but
4376 are handled for consistency. */
4378 if (arg0 != 0 && arg1 != 0)
4380 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4381 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4382 STRIP_NOPS (tem);
4383 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4386 if (TREE_CODE_CLASS (code) != tcc_comparison)
4387 return 0;
4389 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4390 for neither. In real maths, we cannot assume open ended ranges are
4391 the same. But, this is computer arithmetic, where numbers are finite.
4392 We can therefore make the transformation of any unbounded range with
4393 the value Z, Z being greater than any representable number. This permits
4394 us to treat unbounded ranges as equal. */
4395 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4396 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4397 switch (code)
4399 case EQ_EXPR:
4400 result = sgn0 == sgn1;
4401 break;
4402 case NE_EXPR:
4403 result = sgn0 != sgn1;
4404 break;
4405 case LT_EXPR:
4406 result = sgn0 < sgn1;
4407 break;
4408 case LE_EXPR:
4409 result = sgn0 <= sgn1;
4410 break;
4411 case GT_EXPR:
4412 result = sgn0 > sgn1;
4413 break;
4414 case GE_EXPR:
4415 result = sgn0 >= sgn1;
4416 break;
4417 default:
4418 gcc_unreachable ();
4421 return constant_boolean_node (result, type);
4424 /* Helper routine for make_range. Perform one step for it, return
4425 new expression if the loop should continue or NULL_TREE if it should
4426 stop. */
4428 tree
4429 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4430 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4431 bool *strict_overflow_p)
4433 tree arg0_type = TREE_TYPE (arg0);
4434 tree n_low, n_high, low = *p_low, high = *p_high;
4435 int in_p = *p_in_p, n_in_p;
4437 switch (code)
4439 case TRUTH_NOT_EXPR:
4440 /* We can only do something if the range is testing for zero. */
4441 if (low == NULL_TREE || high == NULL_TREE
4442 || ! integer_zerop (low) || ! integer_zerop (high))
4443 return NULL_TREE;
4444 *p_in_p = ! in_p;
4445 return arg0;
4447 case EQ_EXPR: case NE_EXPR:
4448 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4449 /* We can only do something if the range is testing for zero
4450 and if the second operand is an integer constant. Note that
4451 saying something is "in" the range we make is done by
4452 complementing IN_P since it will set in the initial case of
4453 being not equal to zero; "out" is leaving it alone. */
4454 if (low == NULL_TREE || high == NULL_TREE
4455 || ! integer_zerop (low) || ! integer_zerop (high)
4456 || TREE_CODE (arg1) != INTEGER_CST)
4457 return NULL_TREE;
4459 switch (code)
4461 case NE_EXPR: /* - [c, c] */
4462 low = high = arg1;
4463 break;
4464 case EQ_EXPR: /* + [c, c] */
4465 in_p = ! in_p, low = high = arg1;
4466 break;
4467 case GT_EXPR: /* - [-, c] */
4468 low = 0, high = arg1;
4469 break;
4470 case GE_EXPR: /* + [c, -] */
4471 in_p = ! in_p, low = arg1, high = 0;
4472 break;
4473 case LT_EXPR: /* - [c, -] */
4474 low = arg1, high = 0;
4475 break;
4476 case LE_EXPR: /* + [-, c] */
4477 in_p = ! in_p, low = 0, high = arg1;
4478 break;
4479 default:
4480 gcc_unreachable ();
4483 /* If this is an unsigned comparison, we also know that EXP is
4484 greater than or equal to zero. We base the range tests we make
4485 on that fact, so we record it here so we can parse existing
4486 range tests. We test arg0_type since often the return type
4487 of, e.g. EQ_EXPR, is boolean. */
4488 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4490 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4491 in_p, low, high, 1,
4492 build_int_cst (arg0_type, 0),
4493 NULL_TREE))
4494 return NULL_TREE;
4496 in_p = n_in_p, low = n_low, high = n_high;
4498 /* If the high bound is missing, but we have a nonzero low
4499 bound, reverse the range so it goes from zero to the low bound
4500 minus 1. */
4501 if (high == 0 && low && ! integer_zerop (low))
4503 in_p = ! in_p;
4504 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4505 build_int_cst (TREE_TYPE (low), 1), 0);
4506 low = build_int_cst (arg0_type, 0);
4510 *p_low = low;
4511 *p_high = high;
4512 *p_in_p = in_p;
4513 return arg0;
4515 case NEGATE_EXPR:
4516 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4517 low and high are non-NULL, then normalize will DTRT. */
4518 if (!TYPE_UNSIGNED (arg0_type)
4519 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4521 if (low == NULL_TREE)
4522 low = TYPE_MIN_VALUE (arg0_type);
4523 if (high == NULL_TREE)
4524 high = TYPE_MAX_VALUE (arg0_type);
4527 /* (-x) IN [a,b] -> x in [-b, -a] */
4528 n_low = range_binop (MINUS_EXPR, exp_type,
4529 build_int_cst (exp_type, 0),
4530 0, high, 1);
4531 n_high = range_binop (MINUS_EXPR, exp_type,
4532 build_int_cst (exp_type, 0),
4533 0, low, 0);
4534 if (n_high != 0 && TREE_OVERFLOW (n_high))
4535 return NULL_TREE;
4536 goto normalize;
4538 case BIT_NOT_EXPR:
4539 /* ~ X -> -X - 1 */
4540 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4541 build_int_cst (exp_type, 1));
4543 case PLUS_EXPR:
4544 case MINUS_EXPR:
4545 if (TREE_CODE (arg1) != INTEGER_CST)
4546 return NULL_TREE;
4548 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4549 move a constant to the other side. */
4550 if (!TYPE_UNSIGNED (arg0_type)
4551 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4552 return NULL_TREE;
4554 /* If EXP is signed, any overflow in the computation is undefined,
4555 so we don't worry about it so long as our computations on
4556 the bounds don't overflow. For unsigned, overflow is defined
4557 and this is exactly the right thing. */
4558 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4559 arg0_type, low, 0, arg1, 0);
4560 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4561 arg0_type, high, 1, arg1, 0);
4562 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4563 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4564 return NULL_TREE;
4566 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4567 *strict_overflow_p = true;
4569 normalize:
4570 /* Check for an unsigned range which has wrapped around the maximum
4571 value thus making n_high < n_low, and normalize it. */
4572 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4574 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4575 build_int_cst (TREE_TYPE (n_high), 1), 0);
4576 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4577 build_int_cst (TREE_TYPE (n_low), 1), 0);
4579 /* If the range is of the form +/- [ x+1, x ], we won't
4580 be able to normalize it. But then, it represents the
4581 whole range or the empty set, so make it
4582 +/- [ -, - ]. */
4583 if (tree_int_cst_equal (n_low, low)
4584 && tree_int_cst_equal (n_high, high))
4585 low = high = 0;
4586 else
4587 in_p = ! in_p;
4589 else
4590 low = n_low, high = n_high;
4592 *p_low = low;
4593 *p_high = high;
4594 *p_in_p = in_p;
4595 return arg0;
4597 CASE_CONVERT:
4598 case NON_LVALUE_EXPR:
4599 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4600 return NULL_TREE;
4602 if (! INTEGRAL_TYPE_P (arg0_type)
4603 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4604 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4605 return NULL_TREE;
4607 n_low = low, n_high = high;
4609 if (n_low != 0)
4610 n_low = fold_convert_loc (loc, arg0_type, n_low);
4612 if (n_high != 0)
4613 n_high = fold_convert_loc (loc, arg0_type, n_high);
4615 /* If we're converting arg0 from an unsigned type, to exp,
4616 a signed type, we will be doing the comparison as unsigned.
4617 The tests above have already verified that LOW and HIGH
4618 are both positive.
4620 So we have to ensure that we will handle large unsigned
4621 values the same way that the current signed bounds treat
4622 negative values. */
4624 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4626 tree high_positive;
4627 tree equiv_type;
4628 /* For fixed-point modes, we need to pass the saturating flag
4629 as the 2nd parameter. */
4630 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4631 equiv_type
4632 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4633 TYPE_SATURATING (arg0_type));
4634 else
4635 equiv_type
4636 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4638 /* A range without an upper bound is, naturally, unbounded.
4639 Since convert would have cropped a very large value, use
4640 the max value for the destination type. */
4641 high_positive
4642 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4643 : TYPE_MAX_VALUE (arg0_type);
4645 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4646 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4647 fold_convert_loc (loc, arg0_type,
4648 high_positive),
4649 build_int_cst (arg0_type, 1));
4651 /* If the low bound is specified, "and" the range with the
4652 range for which the original unsigned value will be
4653 positive. */
4654 if (low != 0)
4656 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4657 1, fold_convert_loc (loc, arg0_type,
4658 integer_zero_node),
4659 high_positive))
4660 return NULL_TREE;
4662 in_p = (n_in_p == in_p);
4664 else
4666 /* Otherwise, "or" the range with the range of the input
4667 that will be interpreted as negative. */
4668 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4669 1, fold_convert_loc (loc, arg0_type,
4670 integer_zero_node),
4671 high_positive))
4672 return NULL_TREE;
4674 in_p = (in_p != n_in_p);
4678 *p_low = n_low;
4679 *p_high = n_high;
4680 *p_in_p = in_p;
4681 return arg0;
4683 default:
4684 return NULL_TREE;
4688 /* Given EXP, a logical expression, set the range it is testing into
4689 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4690 actually being tested. *PLOW and *PHIGH will be made of the same
4691 type as the returned expression. If EXP is not a comparison, we
4692 will most likely not be returning a useful value and range. Set
4693 *STRICT_OVERFLOW_P to true if the return value is only valid
4694 because signed overflow is undefined; otherwise, do not change
4695 *STRICT_OVERFLOW_P. */
4697 tree
4698 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4699 bool *strict_overflow_p)
4701 enum tree_code code;
4702 tree arg0, arg1 = NULL_TREE;
4703 tree exp_type, nexp;
4704 int in_p;
4705 tree low, high;
4706 location_t loc = EXPR_LOCATION (exp);
4708 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4709 and see if we can refine the range. Some of the cases below may not
4710 happen, but it doesn't seem worth worrying about this. We "continue"
4711 the outer loop when we've changed something; otherwise we "break"
4712 the switch, which will "break" the while. */
4714 in_p = 0;
4715 low = high = build_int_cst (TREE_TYPE (exp), 0);
4717 while (1)
4719 code = TREE_CODE (exp);
4720 exp_type = TREE_TYPE (exp);
4721 arg0 = NULL_TREE;
4723 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4725 if (TREE_OPERAND_LENGTH (exp) > 0)
4726 arg0 = TREE_OPERAND (exp, 0);
4727 if (TREE_CODE_CLASS (code) == tcc_binary
4728 || TREE_CODE_CLASS (code) == tcc_comparison
4729 || (TREE_CODE_CLASS (code) == tcc_expression
4730 && TREE_OPERAND_LENGTH (exp) > 1))
4731 arg1 = TREE_OPERAND (exp, 1);
4733 if (arg0 == NULL_TREE)
4734 break;
4736 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4737 &high, &in_p, strict_overflow_p);
4738 if (nexp == NULL_TREE)
4739 break;
4740 exp = nexp;
4743 /* If EXP is a constant, we can evaluate whether this is true or false. */
4744 if (TREE_CODE (exp) == INTEGER_CST)
4746 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4747 exp, 0, low, 0))
4748 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4749 exp, 1, high, 1)));
4750 low = high = 0;
4751 exp = 0;
4754 *pin_p = in_p, *plow = low, *phigh = high;
4755 return exp;
4758 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4759 a bitwise check i.e. when
4760 LOW == 0xXX...X00...0
4761 HIGH == 0xXX...X11...1
4762 Return corresponding mask in MASK and stem in VALUE. */
4764 static bool
4765 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4766 tree *value)
4768 if (TREE_CODE (low) != INTEGER_CST
4769 || TREE_CODE (high) != INTEGER_CST)
4770 return false;
4772 unsigned prec = TYPE_PRECISION (type);
4773 wide_int lo = wi::to_wide (low, prec);
4774 wide_int hi = wi::to_wide (high, prec);
4776 wide_int end_mask = lo ^ hi;
4777 if ((end_mask & (end_mask + 1)) != 0
4778 || (lo & end_mask) != 0)
4779 return false;
4781 wide_int stem_mask = ~end_mask;
4782 wide_int stem = lo & stem_mask;
4783 if (stem != (hi & stem_mask))
4784 return false;
4786 *mask = wide_int_to_tree (type, stem_mask);
4787 *value = wide_int_to_tree (type, stem);
4789 return true;
4792 /* Helper routine for build_range_check and match.pd. Return the type to
4793 perform the check or NULL if it shouldn't be optimized. */
4795 tree
4796 range_check_type (tree etype)
4798 /* First make sure that arithmetics in this type is valid, then make sure
4799 that it wraps around. */
4800 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4801 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4802 TYPE_UNSIGNED (etype));
4804 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4806 tree utype, minv, maxv;
4808 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4809 for the type in question, as we rely on this here. */
4810 utype = unsigned_type_for (etype);
4811 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4812 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4813 build_int_cst (TREE_TYPE (maxv), 1), 1);
4814 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4816 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4817 minv, 1, maxv, 1)))
4818 etype = utype;
4819 else
4820 return NULL_TREE;
4822 return etype;
4825 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4826 type, TYPE, return an expression to test if EXP is in (or out of, depending
4827 on IN_P) the range. Return 0 if the test couldn't be created. */
4829 tree
4830 build_range_check (location_t loc, tree type, tree exp, int in_p,
4831 tree low, tree high)
4833 tree etype = TREE_TYPE (exp), mask, value;
4835 /* Disable this optimization for function pointer expressions
4836 on targets that require function pointer canonicalization. */
4837 if (targetm.have_canonicalize_funcptr_for_compare ()
4838 && TREE_CODE (etype) == POINTER_TYPE
4839 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4840 return NULL_TREE;
4842 if (! in_p)
4844 value = build_range_check (loc, type, exp, 1, low, high);
4845 if (value != 0)
4846 return invert_truthvalue_loc (loc, value);
4848 return 0;
4851 if (low == 0 && high == 0)
4852 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4854 if (low == 0)
4855 return fold_build2_loc (loc, LE_EXPR, type, exp,
4856 fold_convert_loc (loc, etype, high));
4858 if (high == 0)
4859 return fold_build2_loc (loc, GE_EXPR, type, exp,
4860 fold_convert_loc (loc, etype, low));
4862 if (operand_equal_p (low, high, 0))
4863 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4864 fold_convert_loc (loc, etype, low));
4866 if (TREE_CODE (exp) == BIT_AND_EXPR
4867 && maskable_range_p (low, high, etype, &mask, &value))
4868 return fold_build2_loc (loc, EQ_EXPR, type,
4869 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4870 exp, mask),
4871 value);
4873 if (integer_zerop (low))
4875 if (! TYPE_UNSIGNED (etype))
4877 etype = unsigned_type_for (etype);
4878 high = fold_convert_loc (loc, etype, high);
4879 exp = fold_convert_loc (loc, etype, exp);
4881 return build_range_check (loc, type, exp, 1, 0, high);
4884 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4885 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4887 int prec = TYPE_PRECISION (etype);
4889 if (wi::mask (prec - 1, false, prec) == high)
4891 if (TYPE_UNSIGNED (etype))
4893 tree signed_etype = signed_type_for (etype);
4894 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4895 etype
4896 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4897 else
4898 etype = signed_etype;
4899 exp = fold_convert_loc (loc, etype, exp);
4901 return fold_build2_loc (loc, GT_EXPR, type, exp,
4902 build_int_cst (etype, 0));
4906 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4907 This requires wrap-around arithmetics for the type of the expression. */
4908 etype = range_check_type (etype);
4909 if (etype == NULL_TREE)
4910 return NULL_TREE;
4912 if (POINTER_TYPE_P (etype))
4913 etype = unsigned_type_for (etype);
4915 high = fold_convert_loc (loc, etype, high);
4916 low = fold_convert_loc (loc, etype, low);
4917 exp = fold_convert_loc (loc, etype, exp);
4919 value = const_binop (MINUS_EXPR, high, low);
4921 if (value != 0 && !TREE_OVERFLOW (value))
4922 return build_range_check (loc, type,
4923 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4924 1, build_int_cst (etype, 0), value);
4926 return 0;
4929 /* Return the predecessor of VAL in its type, handling the infinite case. */
4931 static tree
4932 range_predecessor (tree val)
4934 tree type = TREE_TYPE (val);
4936 if (INTEGRAL_TYPE_P (type)
4937 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4938 return 0;
4939 else
4940 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4941 build_int_cst (TREE_TYPE (val), 1), 0);
4944 /* Return the successor of VAL in its type, handling the infinite case. */
4946 static tree
4947 range_successor (tree val)
4949 tree type = TREE_TYPE (val);
4951 if (INTEGRAL_TYPE_P (type)
4952 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4953 return 0;
4954 else
4955 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4956 build_int_cst (TREE_TYPE (val), 1), 0);
4959 /* Given two ranges, see if we can merge them into one. Return 1 if we
4960 can, 0 if we can't. Set the output range into the specified parameters. */
4962 bool
4963 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4964 tree high0, int in1_p, tree low1, tree high1)
4966 int no_overlap;
4967 int subset;
4968 int temp;
4969 tree tem;
4970 int in_p;
4971 tree low, high;
4972 int lowequal = ((low0 == 0 && low1 == 0)
4973 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4974 low0, 0, low1, 0)));
4975 int highequal = ((high0 == 0 && high1 == 0)
4976 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4977 high0, 1, high1, 1)));
4979 /* Make range 0 be the range that starts first, or ends last if they
4980 start at the same value. Swap them if it isn't. */
4981 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4982 low0, 0, low1, 0))
4983 || (lowequal
4984 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4985 high1, 1, high0, 1))))
4987 temp = in0_p, in0_p = in1_p, in1_p = temp;
4988 tem = low0, low0 = low1, low1 = tem;
4989 tem = high0, high0 = high1, high1 = tem;
4992 /* Now flag two cases, whether the ranges are disjoint or whether the
4993 second range is totally subsumed in the first. Note that the tests
4994 below are simplified by the ones above. */
4995 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4996 high0, 1, low1, 0));
4997 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4998 high1, 1, high0, 1));
5000 /* We now have four cases, depending on whether we are including or
5001 excluding the two ranges. */
5002 if (in0_p && in1_p)
5004 /* If they don't overlap, the result is false. If the second range
5005 is a subset it is the result. Otherwise, the range is from the start
5006 of the second to the end of the first. */
5007 if (no_overlap)
5008 in_p = 0, low = high = 0;
5009 else if (subset)
5010 in_p = 1, low = low1, high = high1;
5011 else
5012 in_p = 1, low = low1, high = high0;
5015 else if (in0_p && ! in1_p)
5017 /* If they don't overlap, the result is the first range. If they are
5018 equal, the result is false. If the second range is a subset of the
5019 first, and the ranges begin at the same place, we go from just after
5020 the end of the second range to the end of the first. If the second
5021 range is not a subset of the first, or if it is a subset and both
5022 ranges end at the same place, the range starts at the start of the
5023 first range and ends just before the second range.
5024 Otherwise, we can't describe this as a single range. */
5025 if (no_overlap)
5026 in_p = 1, low = low0, high = high0;
5027 else if (lowequal && highequal)
5028 in_p = 0, low = high = 0;
5029 else if (subset && lowequal)
5031 low = range_successor (high1);
5032 high = high0;
5033 in_p = 1;
5034 if (low == 0)
5036 /* We are in the weird situation where high0 > high1 but
5037 high1 has no successor. Punt. */
5038 return 0;
5041 else if (! subset || highequal)
5043 low = low0;
5044 high = range_predecessor (low1);
5045 in_p = 1;
5046 if (high == 0)
5048 /* low0 < low1 but low1 has no predecessor. Punt. */
5049 return 0;
5052 else
5053 return 0;
5056 else if (! in0_p && in1_p)
5058 /* If they don't overlap, the result is the second range. If the second
5059 is a subset of the first, the result is false. Otherwise,
5060 the range starts just after the first range and ends at the
5061 end of the second. */
5062 if (no_overlap)
5063 in_p = 1, low = low1, high = high1;
5064 else if (subset || highequal)
5065 in_p = 0, low = high = 0;
5066 else
5068 low = range_successor (high0);
5069 high = high1;
5070 in_p = 1;
5071 if (low == 0)
5073 /* high1 > high0 but high0 has no successor. Punt. */
5074 return 0;
5079 else
5081 /* The case where we are excluding both ranges. Here the complex case
5082 is if they don't overlap. In that case, the only time we have a
5083 range is if they are adjacent. If the second is a subset of the
5084 first, the result is the first. Otherwise, the range to exclude
5085 starts at the beginning of the first range and ends at the end of the
5086 second. */
5087 if (no_overlap)
5089 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5090 range_successor (high0),
5091 1, low1, 0)))
5092 in_p = 0, low = low0, high = high1;
5093 else
5095 /* Canonicalize - [min, x] into - [-, x]. */
5096 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5097 switch (TREE_CODE (TREE_TYPE (low0)))
5099 case ENUMERAL_TYPE:
5100 if (TYPE_PRECISION (TREE_TYPE (low0))
5101 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5102 break;
5103 /* FALLTHROUGH */
5104 case INTEGER_TYPE:
5105 if (tree_int_cst_equal (low0,
5106 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5107 low0 = 0;
5108 break;
5109 case POINTER_TYPE:
5110 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5111 && integer_zerop (low0))
5112 low0 = 0;
5113 break;
5114 default:
5115 break;
5118 /* Canonicalize - [x, max] into - [x, -]. */
5119 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5120 switch (TREE_CODE (TREE_TYPE (high1)))
5122 case ENUMERAL_TYPE:
5123 if (TYPE_PRECISION (TREE_TYPE (high1))
5124 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5125 break;
5126 /* FALLTHROUGH */
5127 case INTEGER_TYPE:
5128 if (tree_int_cst_equal (high1,
5129 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5130 high1 = 0;
5131 break;
5132 case POINTER_TYPE:
5133 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5134 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5135 high1, 1,
5136 build_int_cst (TREE_TYPE (high1), 1),
5137 1)))
5138 high1 = 0;
5139 break;
5140 default:
5141 break;
5144 /* The ranges might be also adjacent between the maximum and
5145 minimum values of the given type. For
5146 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5147 return + [x + 1, y - 1]. */
5148 if (low0 == 0 && high1 == 0)
5150 low = range_successor (high0);
5151 high = range_predecessor (low1);
5152 if (low == 0 || high == 0)
5153 return 0;
5155 in_p = 1;
5157 else
5158 return 0;
5161 else if (subset)
5162 in_p = 0, low = low0, high = high0;
5163 else
5164 in_p = 0, low = low0, high = high1;
5167 *pin_p = in_p, *plow = low, *phigh = high;
5168 return 1;
5172 /* Subroutine of fold, looking inside expressions of the form
5173 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5174 of the COND_EXPR. This function is being used also to optimize
5175 A op B ? C : A, by reversing the comparison first.
5177 Return a folded expression whose code is not a COND_EXPR
5178 anymore, or NULL_TREE if no folding opportunity is found. */
5180 static tree
5181 fold_cond_expr_with_comparison (location_t loc, tree type,
5182 tree arg0, tree arg1, tree arg2)
5184 enum tree_code comp_code = TREE_CODE (arg0);
5185 tree arg00 = TREE_OPERAND (arg0, 0);
5186 tree arg01 = TREE_OPERAND (arg0, 1);
5187 tree arg1_type = TREE_TYPE (arg1);
5188 tree tem;
5190 STRIP_NOPS (arg1);
5191 STRIP_NOPS (arg2);
5193 /* If we have A op 0 ? A : -A, consider applying the following
5194 transformations:
5196 A == 0? A : -A same as -A
5197 A != 0? A : -A same as A
5198 A >= 0? A : -A same as abs (A)
5199 A > 0? A : -A same as abs (A)
5200 A <= 0? A : -A same as -abs (A)
5201 A < 0? A : -A same as -abs (A)
5203 None of these transformations work for modes with signed
5204 zeros. If A is +/-0, the first two transformations will
5205 change the sign of the result (from +0 to -0, or vice
5206 versa). The last four will fix the sign of the result,
5207 even though the original expressions could be positive or
5208 negative, depending on the sign of A.
5210 Note that all these transformations are correct if A is
5211 NaN, since the two alternatives (A and -A) are also NaNs. */
5212 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5213 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5214 ? real_zerop (arg01)
5215 : integer_zerop (arg01))
5216 && ((TREE_CODE (arg2) == NEGATE_EXPR
5217 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5218 /* In the case that A is of the form X-Y, '-A' (arg2) may
5219 have already been folded to Y-X, check for that. */
5220 || (TREE_CODE (arg1) == MINUS_EXPR
5221 && TREE_CODE (arg2) == MINUS_EXPR
5222 && operand_equal_p (TREE_OPERAND (arg1, 0),
5223 TREE_OPERAND (arg2, 1), 0)
5224 && operand_equal_p (TREE_OPERAND (arg1, 1),
5225 TREE_OPERAND (arg2, 0), 0))))
5226 switch (comp_code)
5228 case EQ_EXPR:
5229 case UNEQ_EXPR:
5230 tem = fold_convert_loc (loc, arg1_type, arg1);
5231 return fold_convert_loc (loc, type, negate_expr (tem));
5232 case NE_EXPR:
5233 case LTGT_EXPR:
5234 return fold_convert_loc (loc, type, arg1);
5235 case UNGE_EXPR:
5236 case UNGT_EXPR:
5237 if (flag_trapping_math)
5238 break;
5239 /* Fall through. */
5240 case GE_EXPR:
5241 case GT_EXPR:
5242 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5243 break;
5244 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5245 return fold_convert_loc (loc, type, tem);
5246 case UNLE_EXPR:
5247 case UNLT_EXPR:
5248 if (flag_trapping_math)
5249 break;
5250 /* FALLTHRU */
5251 case LE_EXPR:
5252 case LT_EXPR:
5253 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5254 break;
5255 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5256 return negate_expr (fold_convert_loc (loc, type, tem));
5257 default:
5258 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5259 break;
5262 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5263 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5264 both transformations are correct when A is NaN: A != 0
5265 is then true, and A == 0 is false. */
5267 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5268 && integer_zerop (arg01) && integer_zerop (arg2))
5270 if (comp_code == NE_EXPR)
5271 return fold_convert_loc (loc, type, arg1);
5272 else if (comp_code == EQ_EXPR)
5273 return build_zero_cst (type);
5276 /* Try some transformations of A op B ? A : B.
5278 A == B? A : B same as B
5279 A != B? A : B same as A
5280 A >= B? A : B same as max (A, B)
5281 A > B? A : B same as max (B, A)
5282 A <= B? A : B same as min (A, B)
5283 A < B? A : B same as min (B, A)
5285 As above, these transformations don't work in the presence
5286 of signed zeros. For example, if A and B are zeros of
5287 opposite sign, the first two transformations will change
5288 the sign of the result. In the last four, the original
5289 expressions give different results for (A=+0, B=-0) and
5290 (A=-0, B=+0), but the transformed expressions do not.
5292 The first two transformations are correct if either A or B
5293 is a NaN. In the first transformation, the condition will
5294 be false, and B will indeed be chosen. In the case of the
5295 second transformation, the condition A != B will be true,
5296 and A will be chosen.
5298 The conversions to max() and min() are not correct if B is
5299 a number and A is not. The conditions in the original
5300 expressions will be false, so all four give B. The min()
5301 and max() versions would give a NaN instead. */
5302 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5303 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5304 /* Avoid these transformations if the COND_EXPR may be used
5305 as an lvalue in the C++ front-end. PR c++/19199. */
5306 && (in_gimple_form
5307 || VECTOR_TYPE_P (type)
5308 || (! lang_GNU_CXX ()
5309 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5310 || ! maybe_lvalue_p (arg1)
5311 || ! maybe_lvalue_p (arg2)))
5313 tree comp_op0 = arg00;
5314 tree comp_op1 = arg01;
5315 tree comp_type = TREE_TYPE (comp_op0);
5317 switch (comp_code)
5319 case EQ_EXPR:
5320 return fold_convert_loc (loc, type, arg2);
5321 case NE_EXPR:
5322 return fold_convert_loc (loc, type, arg1);
5323 case LE_EXPR:
5324 case LT_EXPR:
5325 case UNLE_EXPR:
5326 case UNLT_EXPR:
5327 /* In C++ a ?: expression can be an lvalue, so put the
5328 operand which will be used if they are equal first
5329 so that we can convert this back to the
5330 corresponding COND_EXPR. */
5331 if (!HONOR_NANS (arg1))
5333 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5334 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5335 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5336 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5337 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5338 comp_op1, comp_op0);
5339 return fold_convert_loc (loc, type, tem);
5341 break;
5342 case GE_EXPR:
5343 case GT_EXPR:
5344 case UNGE_EXPR:
5345 case UNGT_EXPR:
5346 if (!HONOR_NANS (arg1))
5348 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5349 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5350 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5351 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5352 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5353 comp_op1, comp_op0);
5354 return fold_convert_loc (loc, type, tem);
5356 break;
5357 case UNEQ_EXPR:
5358 if (!HONOR_NANS (arg1))
5359 return fold_convert_loc (loc, type, arg2);
5360 break;
5361 case LTGT_EXPR:
5362 if (!HONOR_NANS (arg1))
5363 return fold_convert_loc (loc, type, arg1);
5364 break;
5365 default:
5366 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5367 break;
5371 return NULL_TREE;
5376 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5377 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5378 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5379 false) >= 2)
5380 #endif
5382 /* EXP is some logical combination of boolean tests. See if we can
5383 merge it into some range test. Return the new tree if so. */
5385 static tree
5386 fold_range_test (location_t loc, enum tree_code code, tree type,
5387 tree op0, tree op1)
5389 int or_op = (code == TRUTH_ORIF_EXPR
5390 || code == TRUTH_OR_EXPR);
5391 int in0_p, in1_p, in_p;
5392 tree low0, low1, low, high0, high1, high;
5393 bool strict_overflow_p = false;
5394 tree tem, lhs, rhs;
5395 const char * const warnmsg = G_("assuming signed overflow does not occur "
5396 "when simplifying range test");
5398 if (!INTEGRAL_TYPE_P (type))
5399 return 0;
5401 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5402 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5404 /* If this is an OR operation, invert both sides; we will invert
5405 again at the end. */
5406 if (or_op)
5407 in0_p = ! in0_p, in1_p = ! in1_p;
5409 /* If both expressions are the same, if we can merge the ranges, and we
5410 can build the range test, return it or it inverted. If one of the
5411 ranges is always true or always false, consider it to be the same
5412 expression as the other. */
5413 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5414 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5415 in1_p, low1, high1)
5416 && 0 != (tem = (build_range_check (loc, type,
5417 lhs != 0 ? lhs
5418 : rhs != 0 ? rhs : integer_zero_node,
5419 in_p, low, high))))
5421 if (strict_overflow_p)
5422 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5423 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5426 /* On machines where the branch cost is expensive, if this is a
5427 short-circuited branch and the underlying object on both sides
5428 is the same, make a non-short-circuit operation. */
5429 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5430 && lhs != 0 && rhs != 0
5431 && (code == TRUTH_ANDIF_EXPR
5432 || code == TRUTH_ORIF_EXPR)
5433 && operand_equal_p (lhs, rhs, 0))
5435 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5436 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5437 which cases we can't do this. */
5438 if (simple_operand_p (lhs))
5439 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5440 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5441 type, op0, op1);
5443 else if (!lang_hooks.decls.global_bindings_p ()
5444 && !CONTAINS_PLACEHOLDER_P (lhs))
5446 tree common = save_expr (lhs);
5448 if (0 != (lhs = build_range_check (loc, type, common,
5449 or_op ? ! in0_p : in0_p,
5450 low0, high0))
5451 && (0 != (rhs = build_range_check (loc, type, common,
5452 or_op ? ! in1_p : in1_p,
5453 low1, high1))))
5455 if (strict_overflow_p)
5456 fold_overflow_warning (warnmsg,
5457 WARN_STRICT_OVERFLOW_COMPARISON);
5458 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5459 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5460 type, lhs, rhs);
5465 return 0;
5468 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5469 bit value. Arrange things so the extra bits will be set to zero if and
5470 only if C is signed-extended to its full width. If MASK is nonzero,
5471 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5473 static tree
5474 unextend (tree c, int p, int unsignedp, tree mask)
5476 tree type = TREE_TYPE (c);
5477 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5478 tree temp;
5480 if (p == modesize || unsignedp)
5481 return c;
5483 /* We work by getting just the sign bit into the low-order bit, then
5484 into the high-order bit, then sign-extend. We then XOR that value
5485 with C. */
5486 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5488 /* We must use a signed type in order to get an arithmetic right shift.
5489 However, we must also avoid introducing accidental overflows, so that
5490 a subsequent call to integer_zerop will work. Hence we must
5491 do the type conversion here. At this point, the constant is either
5492 zero or one, and the conversion to a signed type can never overflow.
5493 We could get an overflow if this conversion is done anywhere else. */
5494 if (TYPE_UNSIGNED (type))
5495 temp = fold_convert (signed_type_for (type), temp);
5497 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5498 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5499 if (mask != 0)
5500 temp = const_binop (BIT_AND_EXPR, temp,
5501 fold_convert (TREE_TYPE (c), mask));
5502 /* If necessary, convert the type back to match the type of C. */
5503 if (TYPE_UNSIGNED (type))
5504 temp = fold_convert (type, temp);
5506 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5509 /* For an expression that has the form
5510 (A && B) || ~B
5512 (A || B) && ~B,
5513 we can drop one of the inner expressions and simplify to
5514 A || ~B
5516 A && ~B
5517 LOC is the location of the resulting expression. OP is the inner
5518 logical operation; the left-hand side in the examples above, while CMPOP
5519 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5520 removing a condition that guards another, as in
5521 (A != NULL && A->...) || A == NULL
5522 which we must not transform. If RHS_ONLY is true, only eliminate the
5523 right-most operand of the inner logical operation. */
5525 static tree
5526 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5527 bool rhs_only)
5529 tree type = TREE_TYPE (cmpop);
5530 enum tree_code code = TREE_CODE (cmpop);
5531 enum tree_code truthop_code = TREE_CODE (op);
5532 tree lhs = TREE_OPERAND (op, 0);
5533 tree rhs = TREE_OPERAND (op, 1);
5534 tree orig_lhs = lhs, orig_rhs = rhs;
5535 enum tree_code rhs_code = TREE_CODE (rhs);
5536 enum tree_code lhs_code = TREE_CODE (lhs);
5537 enum tree_code inv_code;
5539 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5540 return NULL_TREE;
5542 if (TREE_CODE_CLASS (code) != tcc_comparison)
5543 return NULL_TREE;
5545 if (rhs_code == truthop_code)
5547 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5548 if (newrhs != NULL_TREE)
5550 rhs = newrhs;
5551 rhs_code = TREE_CODE (rhs);
5554 if (lhs_code == truthop_code && !rhs_only)
5556 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5557 if (newlhs != NULL_TREE)
5559 lhs = newlhs;
5560 lhs_code = TREE_CODE (lhs);
5564 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5565 if (inv_code == rhs_code
5566 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5567 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5568 return lhs;
5569 if (!rhs_only && inv_code == lhs_code
5570 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5571 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5572 return rhs;
5573 if (rhs != orig_rhs || lhs != orig_lhs)
5574 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5575 lhs, rhs);
5576 return NULL_TREE;
5579 /* Find ways of folding logical expressions of LHS and RHS:
5580 Try to merge two comparisons to the same innermost item.
5581 Look for range tests like "ch >= '0' && ch <= '9'".
5582 Look for combinations of simple terms on machines with expensive branches
5583 and evaluate the RHS unconditionally.
5585 For example, if we have p->a == 2 && p->b == 4 and we can make an
5586 object large enough to span both A and B, we can do this with a comparison
5587 against the object ANDed with the a mask.
5589 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5590 operations to do this with one comparison.
5592 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5593 function and the one above.
5595 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5596 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5598 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5599 two operands.
5601 We return the simplified tree or 0 if no optimization is possible. */
5603 static tree
5604 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5605 tree lhs, tree rhs)
5607 /* If this is the "or" of two comparisons, we can do something if
5608 the comparisons are NE_EXPR. If this is the "and", we can do something
5609 if the comparisons are EQ_EXPR. I.e.,
5610 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5612 WANTED_CODE is this operation code. For single bit fields, we can
5613 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5614 comparison for one-bit fields. */
5616 enum tree_code wanted_code;
5617 enum tree_code lcode, rcode;
5618 tree ll_arg, lr_arg, rl_arg, rr_arg;
5619 tree ll_inner, lr_inner, rl_inner, rr_inner;
5620 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5621 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5622 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5623 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5624 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5625 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5626 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5627 machine_mode lnmode, rnmode;
5628 tree ll_mask, lr_mask, rl_mask, rr_mask;
5629 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5630 tree l_const, r_const;
5631 tree lntype, rntype, result;
5632 HOST_WIDE_INT first_bit, end_bit;
5633 int volatilep;
5635 /* Start by getting the comparison codes. Fail if anything is volatile.
5636 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5637 it were surrounded with a NE_EXPR. */
5639 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5640 return 0;
5642 lcode = TREE_CODE (lhs);
5643 rcode = TREE_CODE (rhs);
5645 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5647 lhs = build2 (NE_EXPR, truth_type, lhs,
5648 build_int_cst (TREE_TYPE (lhs), 0));
5649 lcode = NE_EXPR;
5652 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5654 rhs = build2 (NE_EXPR, truth_type, rhs,
5655 build_int_cst (TREE_TYPE (rhs), 0));
5656 rcode = NE_EXPR;
5659 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5660 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5661 return 0;
5663 ll_arg = TREE_OPERAND (lhs, 0);
5664 lr_arg = TREE_OPERAND (lhs, 1);
5665 rl_arg = TREE_OPERAND (rhs, 0);
5666 rr_arg = TREE_OPERAND (rhs, 1);
5668 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5669 if (simple_operand_p (ll_arg)
5670 && simple_operand_p (lr_arg))
5672 if (operand_equal_p (ll_arg, rl_arg, 0)
5673 && operand_equal_p (lr_arg, rr_arg, 0))
5675 result = combine_comparisons (loc, code, lcode, rcode,
5676 truth_type, ll_arg, lr_arg);
5677 if (result)
5678 return result;
5680 else if (operand_equal_p (ll_arg, rr_arg, 0)
5681 && operand_equal_p (lr_arg, rl_arg, 0))
5683 result = combine_comparisons (loc, code, lcode,
5684 swap_tree_comparison (rcode),
5685 truth_type, ll_arg, lr_arg);
5686 if (result)
5687 return result;
5691 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5692 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5694 /* If the RHS can be evaluated unconditionally and its operands are
5695 simple, it wins to evaluate the RHS unconditionally on machines
5696 with expensive branches. In this case, this isn't a comparison
5697 that can be merged. */
5699 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5700 false) >= 2
5701 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5702 && simple_operand_p (rl_arg)
5703 && simple_operand_p (rr_arg))
5705 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5706 if (code == TRUTH_OR_EXPR
5707 && lcode == NE_EXPR && integer_zerop (lr_arg)
5708 && rcode == NE_EXPR && integer_zerop (rr_arg)
5709 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5710 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5711 return build2_loc (loc, NE_EXPR, truth_type,
5712 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5713 ll_arg, rl_arg),
5714 build_int_cst (TREE_TYPE (ll_arg), 0));
5716 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5717 if (code == TRUTH_AND_EXPR
5718 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5719 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5720 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5721 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5722 return build2_loc (loc, EQ_EXPR, truth_type,
5723 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5724 ll_arg, rl_arg),
5725 build_int_cst (TREE_TYPE (ll_arg), 0));
5728 /* See if the comparisons can be merged. Then get all the parameters for
5729 each side. */
5731 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5732 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5733 return 0;
5735 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5736 volatilep = 0;
5737 ll_inner = decode_field_reference (loc, &ll_arg,
5738 &ll_bitsize, &ll_bitpos, &ll_mode,
5739 &ll_unsignedp, &ll_reversep, &volatilep,
5740 &ll_mask, &ll_and_mask);
5741 lr_inner = decode_field_reference (loc, &lr_arg,
5742 &lr_bitsize, &lr_bitpos, &lr_mode,
5743 &lr_unsignedp, &lr_reversep, &volatilep,
5744 &lr_mask, &lr_and_mask);
5745 rl_inner = decode_field_reference (loc, &rl_arg,
5746 &rl_bitsize, &rl_bitpos, &rl_mode,
5747 &rl_unsignedp, &rl_reversep, &volatilep,
5748 &rl_mask, &rl_and_mask);
5749 rr_inner = decode_field_reference (loc, &rr_arg,
5750 &rr_bitsize, &rr_bitpos, &rr_mode,
5751 &rr_unsignedp, &rr_reversep, &volatilep,
5752 &rr_mask, &rr_and_mask);
5754 /* It must be true that the inner operation on the lhs of each
5755 comparison must be the same if we are to be able to do anything.
5756 Then see if we have constants. If not, the same must be true for
5757 the rhs's. */
5758 if (volatilep
5759 || ll_reversep != rl_reversep
5760 || ll_inner == 0 || rl_inner == 0
5761 || ! operand_equal_p (ll_inner, rl_inner, 0))
5762 return 0;
5764 if (TREE_CODE (lr_arg) == INTEGER_CST
5765 && TREE_CODE (rr_arg) == INTEGER_CST)
5767 l_const = lr_arg, r_const = rr_arg;
5768 lr_reversep = ll_reversep;
5770 else if (lr_reversep != rr_reversep
5771 || lr_inner == 0 || rr_inner == 0
5772 || ! operand_equal_p (lr_inner, rr_inner, 0))
5773 return 0;
5774 else
5775 l_const = r_const = 0;
5777 /* If either comparison code is not correct for our logical operation,
5778 fail. However, we can convert a one-bit comparison against zero into
5779 the opposite comparison against that bit being set in the field. */
5781 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5782 if (lcode != wanted_code)
5784 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5786 /* Make the left operand unsigned, since we are only interested
5787 in the value of one bit. Otherwise we are doing the wrong
5788 thing below. */
5789 ll_unsignedp = 1;
5790 l_const = ll_mask;
5792 else
5793 return 0;
5796 /* This is analogous to the code for l_const above. */
5797 if (rcode != wanted_code)
5799 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5801 rl_unsignedp = 1;
5802 r_const = rl_mask;
5804 else
5805 return 0;
5808 /* See if we can find a mode that contains both fields being compared on
5809 the left. If we can't, fail. Otherwise, update all constants and masks
5810 to be relative to a field of that size. */
5811 first_bit = MIN (ll_bitpos, rl_bitpos);
5812 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5813 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5814 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5815 volatilep);
5816 if (lnmode == VOIDmode)
5817 return 0;
5819 lnbitsize = GET_MODE_BITSIZE (lnmode);
5820 lnbitpos = first_bit & ~ (lnbitsize - 1);
5821 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5822 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5824 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5826 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5827 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5830 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5831 size_int (xll_bitpos));
5832 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5833 size_int (xrl_bitpos));
5835 if (l_const)
5837 l_const = fold_convert_loc (loc, lntype, l_const);
5838 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5839 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5840 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5841 fold_build1_loc (loc, BIT_NOT_EXPR,
5842 lntype, ll_mask))))
5844 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5846 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5849 if (r_const)
5851 r_const = fold_convert_loc (loc, lntype, r_const);
5852 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5853 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5854 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5855 fold_build1_loc (loc, BIT_NOT_EXPR,
5856 lntype, rl_mask))))
5858 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5860 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5864 /* If the right sides are not constant, do the same for it. Also,
5865 disallow this optimization if a size or signedness mismatch occurs
5866 between the left and right sides. */
5867 if (l_const == 0)
5869 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5870 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5871 /* Make sure the two fields on the right
5872 correspond to the left without being swapped. */
5873 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5874 return 0;
5876 first_bit = MIN (lr_bitpos, rr_bitpos);
5877 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5878 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5879 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5880 volatilep);
5881 if (rnmode == VOIDmode)
5882 return 0;
5884 rnbitsize = GET_MODE_BITSIZE (rnmode);
5885 rnbitpos = first_bit & ~ (rnbitsize - 1);
5886 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5887 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5889 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5891 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5892 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5895 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5896 rntype, lr_mask),
5897 size_int (xlr_bitpos));
5898 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5899 rntype, rr_mask),
5900 size_int (xrr_bitpos));
5902 /* Make a mask that corresponds to both fields being compared.
5903 Do this for both items being compared. If the operands are the
5904 same size and the bits being compared are in the same position
5905 then we can do this by masking both and comparing the masked
5906 results. */
5907 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5908 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5909 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5911 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5912 lntype, lnbitsize, lnbitpos,
5913 ll_unsignedp || rl_unsignedp, ll_reversep);
5914 if (! all_ones_mask_p (ll_mask, lnbitsize))
5915 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5917 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5918 rntype, rnbitsize, rnbitpos,
5919 lr_unsignedp || rr_unsignedp, lr_reversep);
5920 if (! all_ones_mask_p (lr_mask, rnbitsize))
5921 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5923 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5926 /* There is still another way we can do something: If both pairs of
5927 fields being compared are adjacent, we may be able to make a wider
5928 field containing them both.
5930 Note that we still must mask the lhs/rhs expressions. Furthermore,
5931 the mask must be shifted to account for the shift done by
5932 make_bit_field_ref. */
5933 if ((ll_bitsize + ll_bitpos == rl_bitpos
5934 && lr_bitsize + lr_bitpos == rr_bitpos)
5935 || (ll_bitpos == rl_bitpos + rl_bitsize
5936 && lr_bitpos == rr_bitpos + rr_bitsize))
5938 tree type;
5940 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5941 ll_bitsize + rl_bitsize,
5942 MIN (ll_bitpos, rl_bitpos),
5943 ll_unsignedp, ll_reversep);
5944 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5945 lr_bitsize + rr_bitsize,
5946 MIN (lr_bitpos, rr_bitpos),
5947 lr_unsignedp, lr_reversep);
5949 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5950 size_int (MIN (xll_bitpos, xrl_bitpos)));
5951 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5952 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5954 /* Convert to the smaller type before masking out unwanted bits. */
5955 type = lntype;
5956 if (lntype != rntype)
5958 if (lnbitsize > rnbitsize)
5960 lhs = fold_convert_loc (loc, rntype, lhs);
5961 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5962 type = rntype;
5964 else if (lnbitsize < rnbitsize)
5966 rhs = fold_convert_loc (loc, lntype, rhs);
5967 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5968 type = lntype;
5972 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5973 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5975 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5976 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5978 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5981 return 0;
5984 /* Handle the case of comparisons with constants. If there is something in
5985 common between the masks, those bits of the constants must be the same.
5986 If not, the condition is always false. Test for this to avoid generating
5987 incorrect code below. */
5988 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5989 if (! integer_zerop (result)
5990 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5991 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5993 if (wanted_code == NE_EXPR)
5995 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5996 return constant_boolean_node (true, truth_type);
5998 else
6000 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6001 return constant_boolean_node (false, truth_type);
6005 /* Construct the expression we will return. First get the component
6006 reference we will make. Unless the mask is all ones the width of
6007 that field, perform the mask operation. Then compare with the
6008 merged constant. */
6009 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6010 lntype, lnbitsize, lnbitpos,
6011 ll_unsignedp || rl_unsignedp, ll_reversep);
6013 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6014 if (! all_ones_mask_p (ll_mask, lnbitsize))
6015 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6017 return build2_loc (loc, wanted_code, truth_type, result,
6018 const_binop (BIT_IOR_EXPR, l_const, r_const));
6021 /* T is an integer expression that is being multiplied, divided, or taken a
6022 modulus (CODE says which and what kind of divide or modulus) by a
6023 constant C. See if we can eliminate that operation by folding it with
6024 other operations already in T. WIDE_TYPE, if non-null, is a type that
6025 should be used for the computation if wider than our type.
6027 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6028 (X * 2) + (Y * 4). We must, however, be assured that either the original
6029 expression would not overflow or that overflow is undefined for the type
6030 in the language in question.
6032 If we return a non-null expression, it is an equivalent form of the
6033 original computation, but need not be in the original type.
6035 We set *STRICT_OVERFLOW_P to true if the return values depends on
6036 signed overflow being undefined. Otherwise we do not change
6037 *STRICT_OVERFLOW_P. */
6039 static tree
6040 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6041 bool *strict_overflow_p)
6043 /* To avoid exponential search depth, refuse to allow recursion past
6044 three levels. Beyond that (1) it's highly unlikely that we'll find
6045 something interesting and (2) we've probably processed it before
6046 when we built the inner expression. */
6048 static int depth;
6049 tree ret;
6051 if (depth > 3)
6052 return NULL;
6054 depth++;
6055 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6056 depth--;
6058 return ret;
6061 static tree
6062 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6063 bool *strict_overflow_p)
6065 tree type = TREE_TYPE (t);
6066 enum tree_code tcode = TREE_CODE (t);
6067 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6068 > GET_MODE_SIZE (TYPE_MODE (type)))
6069 ? wide_type : type);
6070 tree t1, t2;
6071 int same_p = tcode == code;
6072 tree op0 = NULL_TREE, op1 = NULL_TREE;
6073 bool sub_strict_overflow_p;
6075 /* Don't deal with constants of zero here; they confuse the code below. */
6076 if (integer_zerop (c))
6077 return NULL_TREE;
6079 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6080 op0 = TREE_OPERAND (t, 0);
6082 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6083 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6085 /* Note that we need not handle conditional operations here since fold
6086 already handles those cases. So just do arithmetic here. */
6087 switch (tcode)
6089 case INTEGER_CST:
6090 /* For a constant, we can always simplify if we are a multiply
6091 or (for divide and modulus) if it is a multiple of our constant. */
6092 if (code == MULT_EXPR
6093 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6095 tree tem = const_binop (code, fold_convert (ctype, t),
6096 fold_convert (ctype, c));
6097 /* If the multiplication overflowed, we lost information on it.
6098 See PR68142 and PR69845. */
6099 if (TREE_OVERFLOW (tem))
6100 return NULL_TREE;
6101 return tem;
6103 break;
6105 CASE_CONVERT: case NON_LVALUE_EXPR:
6106 /* If op0 is an expression ... */
6107 if ((COMPARISON_CLASS_P (op0)
6108 || UNARY_CLASS_P (op0)
6109 || BINARY_CLASS_P (op0)
6110 || VL_EXP_CLASS_P (op0)
6111 || EXPRESSION_CLASS_P (op0))
6112 /* ... and has wrapping overflow, and its type is smaller
6113 than ctype, then we cannot pass through as widening. */
6114 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6115 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6116 && (TYPE_PRECISION (ctype)
6117 > TYPE_PRECISION (TREE_TYPE (op0))))
6118 /* ... or this is a truncation (t is narrower than op0),
6119 then we cannot pass through this narrowing. */
6120 || (TYPE_PRECISION (type)
6121 < TYPE_PRECISION (TREE_TYPE (op0)))
6122 /* ... or signedness changes for division or modulus,
6123 then we cannot pass through this conversion. */
6124 || (code != MULT_EXPR
6125 && (TYPE_UNSIGNED (ctype)
6126 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6127 /* ... or has undefined overflow while the converted to
6128 type has not, we cannot do the operation in the inner type
6129 as that would introduce undefined overflow. */
6130 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6131 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6132 && !TYPE_OVERFLOW_UNDEFINED (type))))
6133 break;
6135 /* Pass the constant down and see if we can make a simplification. If
6136 we can, replace this expression with the inner simplification for
6137 possible later conversion to our or some other type. */
6138 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6139 && TREE_CODE (t2) == INTEGER_CST
6140 && !TREE_OVERFLOW (t2)
6141 && (0 != (t1 = extract_muldiv (op0, t2, code,
6142 code == MULT_EXPR
6143 ? ctype : NULL_TREE,
6144 strict_overflow_p))))
6145 return t1;
6146 break;
6148 case ABS_EXPR:
6149 /* If widening the type changes it from signed to unsigned, then we
6150 must avoid building ABS_EXPR itself as unsigned. */
6151 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6153 tree cstype = (*signed_type_for) (ctype);
6154 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6155 != 0)
6157 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6158 return fold_convert (ctype, t1);
6160 break;
6162 /* If the constant is negative, we cannot simplify this. */
6163 if (tree_int_cst_sgn (c) == -1)
6164 break;
6165 /* FALLTHROUGH */
6166 case NEGATE_EXPR:
6167 /* For division and modulus, type can't be unsigned, as e.g.
6168 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6169 For signed types, even with wrapping overflow, this is fine. */
6170 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6171 break;
6172 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6173 != 0)
6174 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6175 break;
6177 case MIN_EXPR: case MAX_EXPR:
6178 /* If widening the type changes the signedness, then we can't perform
6179 this optimization as that changes the result. */
6180 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6181 break;
6183 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6184 sub_strict_overflow_p = false;
6185 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6186 &sub_strict_overflow_p)) != 0
6187 && (t2 = extract_muldiv (op1, c, code, wide_type,
6188 &sub_strict_overflow_p)) != 0)
6190 if (tree_int_cst_sgn (c) < 0)
6191 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6192 if (sub_strict_overflow_p)
6193 *strict_overflow_p = true;
6194 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6195 fold_convert (ctype, t2));
6197 break;
6199 case LSHIFT_EXPR: case RSHIFT_EXPR:
6200 /* If the second operand is constant, this is a multiplication
6201 or floor division, by a power of two, so we can treat it that
6202 way unless the multiplier or divisor overflows. Signed
6203 left-shift overflow is implementation-defined rather than
6204 undefined in C90, so do not convert signed left shift into
6205 multiplication. */
6206 if (TREE_CODE (op1) == INTEGER_CST
6207 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6208 /* const_binop may not detect overflow correctly,
6209 so check for it explicitly here. */
6210 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6211 && 0 != (t1 = fold_convert (ctype,
6212 const_binop (LSHIFT_EXPR,
6213 size_one_node,
6214 op1)))
6215 && !TREE_OVERFLOW (t1))
6216 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6217 ? MULT_EXPR : FLOOR_DIV_EXPR,
6218 ctype,
6219 fold_convert (ctype, op0),
6220 t1),
6221 c, code, wide_type, strict_overflow_p);
6222 break;
6224 case PLUS_EXPR: case MINUS_EXPR:
6225 /* See if we can eliminate the operation on both sides. If we can, we
6226 can return a new PLUS or MINUS. If we can't, the only remaining
6227 cases where we can do anything are if the second operand is a
6228 constant. */
6229 sub_strict_overflow_p = false;
6230 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6231 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6232 if (t1 != 0 && t2 != 0
6233 && TYPE_OVERFLOW_WRAPS (ctype)
6234 && (code == MULT_EXPR
6235 /* If not multiplication, we can only do this if both operands
6236 are divisible by c. */
6237 || (multiple_of_p (ctype, op0, c)
6238 && multiple_of_p (ctype, op1, c))))
6240 if (sub_strict_overflow_p)
6241 *strict_overflow_p = true;
6242 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6243 fold_convert (ctype, t2));
6246 /* If this was a subtraction, negate OP1 and set it to be an addition.
6247 This simplifies the logic below. */
6248 if (tcode == MINUS_EXPR)
6250 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6251 /* If OP1 was not easily negatable, the constant may be OP0. */
6252 if (TREE_CODE (op0) == INTEGER_CST)
6254 std::swap (op0, op1);
6255 std::swap (t1, t2);
6259 if (TREE_CODE (op1) != INTEGER_CST)
6260 break;
6262 /* If either OP1 or C are negative, this optimization is not safe for
6263 some of the division and remainder types while for others we need
6264 to change the code. */
6265 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6267 if (code == CEIL_DIV_EXPR)
6268 code = FLOOR_DIV_EXPR;
6269 else if (code == FLOOR_DIV_EXPR)
6270 code = CEIL_DIV_EXPR;
6271 else if (code != MULT_EXPR
6272 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6273 break;
6276 /* If it's a multiply or a division/modulus operation of a multiple
6277 of our constant, do the operation and verify it doesn't overflow. */
6278 if (code == MULT_EXPR
6279 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6281 op1 = const_binop (code, fold_convert (ctype, op1),
6282 fold_convert (ctype, c));
6283 /* We allow the constant to overflow with wrapping semantics. */
6284 if (op1 == 0
6285 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6286 break;
6288 else
6289 break;
6291 /* If we have an unsigned type, we cannot widen the operation since it
6292 will change the result if the original computation overflowed. */
6293 if (TYPE_UNSIGNED (ctype) && ctype != type)
6294 break;
6296 /* The last case is if we are a multiply. In that case, we can
6297 apply the distributive law to commute the multiply and addition
6298 if the multiplication of the constants doesn't overflow
6299 and overflow is defined. With undefined overflow
6300 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6301 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6302 return fold_build2 (tcode, ctype,
6303 fold_build2 (code, ctype,
6304 fold_convert (ctype, op0),
6305 fold_convert (ctype, c)),
6306 op1);
6308 break;
6310 case MULT_EXPR:
6311 /* We have a special case here if we are doing something like
6312 (C * 8) % 4 since we know that's zero. */
6313 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6314 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6315 /* If the multiplication can overflow we cannot optimize this. */
6316 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6317 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6318 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6320 *strict_overflow_p = true;
6321 return omit_one_operand (type, integer_zero_node, op0);
6324 /* ... fall through ... */
6326 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6327 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6328 /* If we can extract our operation from the LHS, do so and return a
6329 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6330 do something only if the second operand is a constant. */
6331 if (same_p
6332 && TYPE_OVERFLOW_WRAPS (ctype)
6333 && (t1 = extract_muldiv (op0, c, code, wide_type,
6334 strict_overflow_p)) != 0)
6335 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6336 fold_convert (ctype, op1));
6337 else if (tcode == MULT_EXPR && code == MULT_EXPR
6338 && TYPE_OVERFLOW_WRAPS (ctype)
6339 && (t1 = extract_muldiv (op1, c, code, wide_type,
6340 strict_overflow_p)) != 0)
6341 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6342 fold_convert (ctype, t1));
6343 else if (TREE_CODE (op1) != INTEGER_CST)
6344 return 0;
6346 /* If these are the same operation types, we can associate them
6347 assuming no overflow. */
6348 if (tcode == code)
6350 bool overflow_p = false;
6351 bool overflow_mul_p;
6352 signop sign = TYPE_SIGN (ctype);
6353 unsigned prec = TYPE_PRECISION (ctype);
6354 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6355 wi::to_wide (c, prec),
6356 sign, &overflow_mul_p);
6357 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6358 if (overflow_mul_p
6359 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6360 overflow_p = true;
6361 if (!overflow_p)
6362 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6363 wide_int_to_tree (ctype, mul));
6366 /* If these operations "cancel" each other, we have the main
6367 optimizations of this pass, which occur when either constant is a
6368 multiple of the other, in which case we replace this with either an
6369 operation or CODE or TCODE.
6371 If we have an unsigned type, we cannot do this since it will change
6372 the result if the original computation overflowed. */
6373 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6374 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6375 || (tcode == MULT_EXPR
6376 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6377 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6378 && code != MULT_EXPR)))
6380 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6382 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6383 *strict_overflow_p = true;
6384 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6385 fold_convert (ctype,
6386 const_binop (TRUNC_DIV_EXPR,
6387 op1, c)));
6389 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6391 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6392 *strict_overflow_p = true;
6393 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6394 fold_convert (ctype,
6395 const_binop (TRUNC_DIV_EXPR,
6396 c, op1)));
6399 break;
6401 default:
6402 break;
6405 return 0;
6408 /* Return a node which has the indicated constant VALUE (either 0 or
6409 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6410 and is of the indicated TYPE. */
6412 tree
6413 constant_boolean_node (bool value, tree type)
6415 if (type == integer_type_node)
6416 return value ? integer_one_node : integer_zero_node;
6417 else if (type == boolean_type_node)
6418 return value ? boolean_true_node : boolean_false_node;
6419 else if (TREE_CODE (type) == VECTOR_TYPE)
6420 return build_vector_from_val (type,
6421 build_int_cst (TREE_TYPE (type),
6422 value ? -1 : 0));
6423 else
6424 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6428 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6429 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6430 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6431 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6432 COND is the first argument to CODE; otherwise (as in the example
6433 given here), it is the second argument. TYPE is the type of the
6434 original expression. Return NULL_TREE if no simplification is
6435 possible. */
6437 static tree
6438 fold_binary_op_with_conditional_arg (location_t loc,
6439 enum tree_code code,
6440 tree type, tree op0, tree op1,
6441 tree cond, tree arg, int cond_first_p)
6443 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6444 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6445 tree test, true_value, false_value;
6446 tree lhs = NULL_TREE;
6447 tree rhs = NULL_TREE;
6448 enum tree_code cond_code = COND_EXPR;
6450 if (TREE_CODE (cond) == COND_EXPR
6451 || TREE_CODE (cond) == VEC_COND_EXPR)
6453 test = TREE_OPERAND (cond, 0);
6454 true_value = TREE_OPERAND (cond, 1);
6455 false_value = TREE_OPERAND (cond, 2);
6456 /* If this operand throws an expression, then it does not make
6457 sense to try to perform a logical or arithmetic operation
6458 involving it. */
6459 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6460 lhs = true_value;
6461 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6462 rhs = false_value;
6464 else if (!(TREE_CODE (type) != VECTOR_TYPE
6465 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6467 tree testtype = TREE_TYPE (cond);
6468 test = cond;
6469 true_value = constant_boolean_node (true, testtype);
6470 false_value = constant_boolean_node (false, testtype);
6472 else
6473 /* Detect the case of mixing vector and scalar types - bail out. */
6474 return NULL_TREE;
6476 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6477 cond_code = VEC_COND_EXPR;
6479 /* This transformation is only worthwhile if we don't have to wrap ARG
6480 in a SAVE_EXPR and the operation can be simplified without recursing
6481 on at least one of the branches once its pushed inside the COND_EXPR. */
6482 if (!TREE_CONSTANT (arg)
6483 && (TREE_SIDE_EFFECTS (arg)
6484 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6485 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6486 return NULL_TREE;
6488 arg = fold_convert_loc (loc, arg_type, arg);
6489 if (lhs == 0)
6491 true_value = fold_convert_loc (loc, cond_type, true_value);
6492 if (cond_first_p)
6493 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6494 else
6495 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6497 if (rhs == 0)
6499 false_value = fold_convert_loc (loc, cond_type, false_value);
6500 if (cond_first_p)
6501 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6502 else
6503 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6506 /* Check that we have simplified at least one of the branches. */
6507 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6508 return NULL_TREE;
6510 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6514 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6516 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6517 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6518 ADDEND is the same as X.
6520 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6521 and finite. The problematic cases are when X is zero, and its mode
6522 has signed zeros. In the case of rounding towards -infinity,
6523 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6524 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6526 bool
6527 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6529 if (!real_zerop (addend))
6530 return false;
6532 /* Don't allow the fold with -fsignaling-nans. */
6533 if (HONOR_SNANS (element_mode (type)))
6534 return false;
6536 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6537 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6538 return true;
6540 /* In a vector or complex, we would need to check the sign of all zeros. */
6541 if (TREE_CODE (addend) != REAL_CST)
6542 return false;
6544 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6545 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6546 negate = !negate;
6548 /* The mode has signed zeros, and we have to honor their sign.
6549 In this situation, there is only one case we can return true for.
6550 X - 0 is the same as X unless rounding towards -infinity is
6551 supported. */
6552 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6555 /* Subroutine of match.pd that optimizes comparisons of a division by
6556 a nonzero integer constant against an integer constant, i.e.
6557 X/C1 op C2.
6559 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6560 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6562 enum tree_code
6563 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6564 tree *hi, bool *neg_overflow)
6566 tree prod, tmp, type = TREE_TYPE (c1);
6567 signop sign = TYPE_SIGN (type);
6568 bool overflow;
6570 /* We have to do this the hard way to detect unsigned overflow.
6571 prod = int_const_binop (MULT_EXPR, c1, c2); */
6572 wide_int val = wi::mul (c1, c2, sign, &overflow);
6573 prod = force_fit_type (type, val, -1, overflow);
6574 *neg_overflow = false;
6576 if (sign == UNSIGNED)
6578 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6579 *lo = prod;
6581 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6582 val = wi::add (prod, tmp, sign, &overflow);
6583 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6585 else if (tree_int_cst_sgn (c1) >= 0)
6587 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6588 switch (tree_int_cst_sgn (c2))
6590 case -1:
6591 *neg_overflow = true;
6592 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6593 *hi = prod;
6594 break;
6596 case 0:
6597 *lo = fold_negate_const (tmp, type);
6598 *hi = tmp;
6599 break;
6601 case 1:
6602 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6603 *lo = prod;
6604 break;
6606 default:
6607 gcc_unreachable ();
6610 else
6612 /* A negative divisor reverses the relational operators. */
6613 code = swap_tree_comparison (code);
6615 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6616 switch (tree_int_cst_sgn (c2))
6618 case -1:
6619 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6620 *lo = prod;
6621 break;
6623 case 0:
6624 *hi = fold_negate_const (tmp, type);
6625 *lo = tmp;
6626 break;
6628 case 1:
6629 *neg_overflow = true;
6630 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6631 *hi = prod;
6632 break;
6634 default:
6635 gcc_unreachable ();
6639 if (code != EQ_EXPR && code != NE_EXPR)
6640 return code;
6642 if (TREE_OVERFLOW (*lo)
6643 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6644 *lo = NULL_TREE;
6645 if (TREE_OVERFLOW (*hi)
6646 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6647 *hi = NULL_TREE;
6649 return code;
6653 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6654 equality/inequality test, then return a simplified form of the test
6655 using a sign testing. Otherwise return NULL. TYPE is the desired
6656 result type. */
6658 static tree
6659 fold_single_bit_test_into_sign_test (location_t loc,
6660 enum tree_code code, tree arg0, tree arg1,
6661 tree result_type)
6663 /* If this is testing a single bit, we can optimize the test. */
6664 if ((code == NE_EXPR || code == EQ_EXPR)
6665 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6666 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6668 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6669 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6670 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6672 if (arg00 != NULL_TREE
6673 /* This is only a win if casting to a signed type is cheap,
6674 i.e. when arg00's type is not a partial mode. */
6675 && TYPE_PRECISION (TREE_TYPE (arg00))
6676 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6678 tree stype = signed_type_for (TREE_TYPE (arg00));
6679 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6680 result_type,
6681 fold_convert_loc (loc, stype, arg00),
6682 build_int_cst (stype, 0));
6686 return NULL_TREE;
6689 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6690 equality/inequality test, then return a simplified form of
6691 the test using shifts and logical operations. Otherwise return
6692 NULL. TYPE is the desired result type. */
6694 tree
6695 fold_single_bit_test (location_t loc, enum tree_code code,
6696 tree arg0, tree arg1, tree result_type)
6698 /* If this is testing a single bit, we can optimize the test. */
6699 if ((code == NE_EXPR || code == EQ_EXPR)
6700 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6701 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6703 tree inner = TREE_OPERAND (arg0, 0);
6704 tree type = TREE_TYPE (arg0);
6705 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6706 machine_mode operand_mode = TYPE_MODE (type);
6707 int ops_unsigned;
6708 tree signed_type, unsigned_type, intermediate_type;
6709 tree tem, one;
6711 /* First, see if we can fold the single bit test into a sign-bit
6712 test. */
6713 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6714 result_type);
6715 if (tem)
6716 return tem;
6718 /* Otherwise we have (A & C) != 0 where C is a single bit,
6719 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6720 Similarly for (A & C) == 0. */
6722 /* If INNER is a right shift of a constant and it plus BITNUM does
6723 not overflow, adjust BITNUM and INNER. */
6724 if (TREE_CODE (inner) == RSHIFT_EXPR
6725 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6726 && bitnum < TYPE_PRECISION (type)
6727 && wi::ltu_p (TREE_OPERAND (inner, 1),
6728 TYPE_PRECISION (type) - bitnum))
6730 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6731 inner = TREE_OPERAND (inner, 0);
6734 /* If we are going to be able to omit the AND below, we must do our
6735 operations as unsigned. If we must use the AND, we have a choice.
6736 Normally unsigned is faster, but for some machines signed is. */
6737 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6738 && !flag_syntax_only) ? 0 : 1;
6740 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6741 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6742 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6743 inner = fold_convert_loc (loc, intermediate_type, inner);
6745 if (bitnum != 0)
6746 inner = build2 (RSHIFT_EXPR, intermediate_type,
6747 inner, size_int (bitnum));
6749 one = build_int_cst (intermediate_type, 1);
6751 if (code == EQ_EXPR)
6752 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6754 /* Put the AND last so it can combine with more things. */
6755 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6757 /* Make sure to return the proper type. */
6758 inner = fold_convert_loc (loc, result_type, inner);
6760 return inner;
6762 return NULL_TREE;
6765 /* Test whether it is preferable two swap two operands, ARG0 and
6766 ARG1, for example because ARG0 is an integer constant and ARG1
6767 isn't. */
6769 bool
6770 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6772 if (CONSTANT_CLASS_P (arg1))
6773 return 0;
6774 if (CONSTANT_CLASS_P (arg0))
6775 return 1;
6777 STRIP_NOPS (arg0);
6778 STRIP_NOPS (arg1);
6780 if (TREE_CONSTANT (arg1))
6781 return 0;
6782 if (TREE_CONSTANT (arg0))
6783 return 1;
6785 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6786 for commutative and comparison operators. Ensuring a canonical
6787 form allows the optimizers to find additional redundancies without
6788 having to explicitly check for both orderings. */
6789 if (TREE_CODE (arg0) == SSA_NAME
6790 && TREE_CODE (arg1) == SSA_NAME
6791 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6792 return 1;
6794 /* Put SSA_NAMEs last. */
6795 if (TREE_CODE (arg1) == SSA_NAME)
6796 return 0;
6797 if (TREE_CODE (arg0) == SSA_NAME)
6798 return 1;
6800 /* Put variables last. */
6801 if (DECL_P (arg1))
6802 return 0;
6803 if (DECL_P (arg0))
6804 return 1;
6806 return 0;
6810 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6811 means A >= Y && A != MAX, but in this case we know that
6812 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6814 static tree
6815 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6817 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6819 if (TREE_CODE (bound) == LT_EXPR)
6820 a = TREE_OPERAND (bound, 0);
6821 else if (TREE_CODE (bound) == GT_EXPR)
6822 a = TREE_OPERAND (bound, 1);
6823 else
6824 return NULL_TREE;
6826 typea = TREE_TYPE (a);
6827 if (!INTEGRAL_TYPE_P (typea)
6828 && !POINTER_TYPE_P (typea))
6829 return NULL_TREE;
6831 if (TREE_CODE (ineq) == LT_EXPR)
6833 a1 = TREE_OPERAND (ineq, 1);
6834 y = TREE_OPERAND (ineq, 0);
6836 else if (TREE_CODE (ineq) == GT_EXPR)
6838 a1 = TREE_OPERAND (ineq, 0);
6839 y = TREE_OPERAND (ineq, 1);
6841 else
6842 return NULL_TREE;
6844 if (TREE_TYPE (a1) != typea)
6845 return NULL_TREE;
6847 if (POINTER_TYPE_P (typea))
6849 /* Convert the pointer types into integer before taking the difference. */
6850 tree ta = fold_convert_loc (loc, ssizetype, a);
6851 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6852 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6854 else
6855 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6857 if (!diff || !integer_onep (diff))
6858 return NULL_TREE;
6860 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6863 /* Fold a sum or difference of at least one multiplication.
6864 Returns the folded tree or NULL if no simplification could be made. */
6866 static tree
6867 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6868 tree arg0, tree arg1)
6870 tree arg00, arg01, arg10, arg11;
6871 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6873 /* (A * C) +- (B * C) -> (A+-B) * C.
6874 (A * C) +- A -> A * (C+-1).
6875 We are most concerned about the case where C is a constant,
6876 but other combinations show up during loop reduction. Since
6877 it is not difficult, try all four possibilities. */
6879 if (TREE_CODE (arg0) == MULT_EXPR)
6881 arg00 = TREE_OPERAND (arg0, 0);
6882 arg01 = TREE_OPERAND (arg0, 1);
6884 else if (TREE_CODE (arg0) == INTEGER_CST)
6886 arg00 = build_one_cst (type);
6887 arg01 = arg0;
6889 else
6891 /* We cannot generate constant 1 for fract. */
6892 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6893 return NULL_TREE;
6894 arg00 = arg0;
6895 arg01 = build_one_cst (type);
6897 if (TREE_CODE (arg1) == MULT_EXPR)
6899 arg10 = TREE_OPERAND (arg1, 0);
6900 arg11 = TREE_OPERAND (arg1, 1);
6902 else if (TREE_CODE (arg1) == INTEGER_CST)
6904 arg10 = build_one_cst (type);
6905 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6906 the purpose of this canonicalization. */
6907 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6908 && negate_expr_p (arg1)
6909 && code == PLUS_EXPR)
6911 arg11 = negate_expr (arg1);
6912 code = MINUS_EXPR;
6914 else
6915 arg11 = arg1;
6917 else
6919 /* We cannot generate constant 1 for fract. */
6920 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6921 return NULL_TREE;
6922 arg10 = arg1;
6923 arg11 = build_one_cst (type);
6925 same = NULL_TREE;
6927 /* Prefer factoring a common non-constant. */
6928 if (operand_equal_p (arg00, arg10, 0))
6929 same = arg00, alt0 = arg01, alt1 = arg11;
6930 else if (operand_equal_p (arg01, arg11, 0))
6931 same = arg01, alt0 = arg00, alt1 = arg10;
6932 else if (operand_equal_p (arg00, arg11, 0))
6933 same = arg00, alt0 = arg01, alt1 = arg10;
6934 else if (operand_equal_p (arg01, arg10, 0))
6935 same = arg01, alt0 = arg00, alt1 = arg11;
6937 /* No identical multiplicands; see if we can find a common
6938 power-of-two factor in non-power-of-two multiplies. This
6939 can help in multi-dimensional array access. */
6940 else if (tree_fits_shwi_p (arg01)
6941 && tree_fits_shwi_p (arg11))
6943 HOST_WIDE_INT int01, int11, tmp;
6944 bool swap = false;
6945 tree maybe_same;
6946 int01 = tree_to_shwi (arg01);
6947 int11 = tree_to_shwi (arg11);
6949 /* Move min of absolute values to int11. */
6950 if (absu_hwi (int01) < absu_hwi (int11))
6952 tmp = int01, int01 = int11, int11 = tmp;
6953 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6954 maybe_same = arg01;
6955 swap = true;
6957 else
6958 maybe_same = arg11;
6960 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6961 /* The remainder should not be a constant, otherwise we
6962 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6963 increased the number of multiplications necessary. */
6964 && TREE_CODE (arg10) != INTEGER_CST)
6966 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6967 build_int_cst (TREE_TYPE (arg00),
6968 int01 / int11));
6969 alt1 = arg10;
6970 same = maybe_same;
6971 if (swap)
6972 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6976 if (!same)
6977 return NULL_TREE;
6979 if (! INTEGRAL_TYPE_P (type)
6980 || TYPE_OVERFLOW_WRAPS (type)
6981 /* We are neither factoring zero nor minus one. */
6982 || TREE_CODE (same) == INTEGER_CST)
6983 return fold_build2_loc (loc, MULT_EXPR, type,
6984 fold_build2_loc (loc, code, type,
6985 fold_convert_loc (loc, type, alt0),
6986 fold_convert_loc (loc, type, alt1)),
6987 fold_convert_loc (loc, type, same));
6989 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6990 same may be minus one and thus the multiplication may overflow. Perform
6991 the operations in an unsigned type. */
6992 tree utype = unsigned_type_for (type);
6993 tree tem = fold_build2_loc (loc, code, utype,
6994 fold_convert_loc (loc, utype, alt0),
6995 fold_convert_loc (loc, utype, alt1));
6996 /* If the sum evaluated to a constant that is not -INF the multiplication
6997 cannot overflow. */
6998 if (TREE_CODE (tem) == INTEGER_CST
6999 && ! wi::eq_p (tem, wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7000 return fold_build2_loc (loc, MULT_EXPR, type,
7001 fold_convert (type, tem), same);
7003 return fold_convert_loc (loc, type,
7004 fold_build2_loc (loc, MULT_EXPR, utype, tem,
7005 fold_convert_loc (loc, utype, same)));
7008 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7009 specified by EXPR into the buffer PTR of length LEN bytes.
7010 Return the number of bytes placed in the buffer, or zero
7011 upon failure. */
7013 static int
7014 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7016 tree type = TREE_TYPE (expr);
7017 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7018 int byte, offset, word, words;
7019 unsigned char value;
7021 if ((off == -1 && total_bytes > len)
7022 || off >= total_bytes)
7023 return 0;
7024 if (off == -1)
7025 off = 0;
7026 words = total_bytes / UNITS_PER_WORD;
7028 for (byte = 0; byte < total_bytes; byte++)
7030 int bitpos = byte * BITS_PER_UNIT;
7031 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7032 number of bytes. */
7033 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7035 if (total_bytes > UNITS_PER_WORD)
7037 word = byte / UNITS_PER_WORD;
7038 if (WORDS_BIG_ENDIAN)
7039 word = (words - 1) - word;
7040 offset = word * UNITS_PER_WORD;
7041 if (BYTES_BIG_ENDIAN)
7042 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7043 else
7044 offset += byte % UNITS_PER_WORD;
7046 else
7047 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7048 if (offset >= off
7049 && offset - off < len)
7050 ptr[offset - off] = value;
7052 return MIN (len, total_bytes - off);
7056 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7057 specified by EXPR into the buffer PTR of length LEN bytes.
7058 Return the number of bytes placed in the buffer, or zero
7059 upon failure. */
7061 static int
7062 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7064 tree type = TREE_TYPE (expr);
7065 machine_mode mode = TYPE_MODE (type);
7066 int total_bytes = GET_MODE_SIZE (mode);
7067 FIXED_VALUE_TYPE value;
7068 tree i_value, i_type;
7070 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7071 return 0;
7073 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7075 if (NULL_TREE == i_type
7076 || TYPE_PRECISION (i_type) != total_bytes)
7077 return 0;
7079 value = TREE_FIXED_CST (expr);
7080 i_value = double_int_to_tree (i_type, value.data);
7082 return native_encode_int (i_value, ptr, len, off);
7086 /* Subroutine of native_encode_expr. Encode the REAL_CST
7087 specified by EXPR into the buffer PTR of length LEN bytes.
7088 Return the number of bytes placed in the buffer, or zero
7089 upon failure. */
7091 static int
7092 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7094 tree type = TREE_TYPE (expr);
7095 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7096 int byte, offset, word, words, bitpos;
7097 unsigned char value;
7099 /* There are always 32 bits in each long, no matter the size of
7100 the hosts long. We handle floating point representations with
7101 up to 192 bits. */
7102 long tmp[6];
7104 if ((off == -1 && total_bytes > len)
7105 || off >= total_bytes)
7106 return 0;
7107 if (off == -1)
7108 off = 0;
7109 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7111 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7113 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7114 bitpos += BITS_PER_UNIT)
7116 byte = (bitpos / BITS_PER_UNIT) & 3;
7117 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7119 if (UNITS_PER_WORD < 4)
7121 word = byte / UNITS_PER_WORD;
7122 if (WORDS_BIG_ENDIAN)
7123 word = (words - 1) - word;
7124 offset = word * UNITS_PER_WORD;
7125 if (BYTES_BIG_ENDIAN)
7126 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7127 else
7128 offset += byte % UNITS_PER_WORD;
7130 else
7132 offset = byte;
7133 if (BYTES_BIG_ENDIAN)
7135 /* Reverse bytes within each long, or within the entire float
7136 if it's smaller than a long (for HFmode). */
7137 offset = MIN (3, total_bytes - 1) - offset;
7138 gcc_assert (offset >= 0);
7141 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7142 if (offset >= off
7143 && offset - off < len)
7144 ptr[offset - off] = value;
7146 return MIN (len, total_bytes - off);
7149 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7150 specified by EXPR into the buffer PTR of length LEN bytes.
7151 Return the number of bytes placed in the buffer, or zero
7152 upon failure. */
7154 static int
7155 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7157 int rsize, isize;
7158 tree part;
7160 part = TREE_REALPART (expr);
7161 rsize = native_encode_expr (part, ptr, len, off);
7162 if (off == -1
7163 && rsize == 0)
7164 return 0;
7165 part = TREE_IMAGPART (expr);
7166 if (off != -1)
7167 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7168 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7169 if (off == -1
7170 && isize != rsize)
7171 return 0;
7172 return rsize + isize;
7176 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7177 specified by EXPR into the buffer PTR of length LEN bytes.
7178 Return the number of bytes placed in the buffer, or zero
7179 upon failure. */
7181 static int
7182 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7184 unsigned i, count;
7185 int size, offset;
7186 tree itype, elem;
7188 offset = 0;
7189 count = VECTOR_CST_NELTS (expr);
7190 itype = TREE_TYPE (TREE_TYPE (expr));
7191 size = GET_MODE_SIZE (TYPE_MODE (itype));
7192 for (i = 0; i < count; i++)
7194 if (off >= size)
7196 off -= size;
7197 continue;
7199 elem = VECTOR_CST_ELT (expr, i);
7200 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7201 if ((off == -1 && res != size)
7202 || res == 0)
7203 return 0;
7204 offset += res;
7205 if (offset >= len)
7206 return offset;
7207 if (off != -1)
7208 off = 0;
7210 return offset;
7214 /* Subroutine of native_encode_expr. Encode the STRING_CST
7215 specified by EXPR into the buffer PTR of length LEN bytes.
7216 Return the number of bytes placed in the buffer, or zero
7217 upon failure. */
7219 static int
7220 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7222 tree type = TREE_TYPE (expr);
7223 HOST_WIDE_INT total_bytes;
7225 if (TREE_CODE (type) != ARRAY_TYPE
7226 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7227 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7228 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7229 return 0;
7230 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7231 if ((off == -1 && total_bytes > len)
7232 || off >= total_bytes)
7233 return 0;
7234 if (off == -1)
7235 off = 0;
7236 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7238 int written = 0;
7239 if (off < TREE_STRING_LENGTH (expr))
7241 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7242 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7244 memset (ptr + written, 0,
7245 MIN (total_bytes - written, len - written));
7247 else
7248 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7249 return MIN (total_bytes - off, len);
7253 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7254 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7255 buffer PTR of length LEN bytes. If OFF is not -1 then start
7256 the encoding at byte offset OFF and encode at most LEN bytes.
7257 Return the number of bytes placed in the buffer, or zero upon failure. */
7260 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7262 /* We don't support starting at negative offset and -1 is special. */
7263 if (off < -1)
7264 return 0;
7266 switch (TREE_CODE (expr))
7268 case INTEGER_CST:
7269 return native_encode_int (expr, ptr, len, off);
7271 case REAL_CST:
7272 return native_encode_real (expr, ptr, len, off);
7274 case FIXED_CST:
7275 return native_encode_fixed (expr, ptr, len, off);
7277 case COMPLEX_CST:
7278 return native_encode_complex (expr, ptr, len, off);
7280 case VECTOR_CST:
7281 return native_encode_vector (expr, ptr, len, off);
7283 case STRING_CST:
7284 return native_encode_string (expr, ptr, len, off);
7286 default:
7287 return 0;
7292 /* Subroutine of native_interpret_expr. Interpret the contents of
7293 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7294 If the buffer cannot be interpreted, return NULL_TREE. */
7296 static tree
7297 native_interpret_int (tree type, const unsigned char *ptr, int len)
7299 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7301 if (total_bytes > len
7302 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7303 return NULL_TREE;
7305 wide_int result = wi::from_buffer (ptr, total_bytes);
7307 return wide_int_to_tree (type, result);
7311 /* Subroutine of native_interpret_expr. Interpret the contents of
7312 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7313 If the buffer cannot be interpreted, return NULL_TREE. */
7315 static tree
7316 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7318 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7319 double_int result;
7320 FIXED_VALUE_TYPE fixed_value;
7322 if (total_bytes > len
7323 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7324 return NULL_TREE;
7326 result = double_int::from_buffer (ptr, total_bytes);
7327 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7329 return build_fixed (type, fixed_value);
7333 /* Subroutine of native_interpret_expr. Interpret the contents of
7334 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7335 If the buffer cannot be interpreted, return NULL_TREE. */
7337 static tree
7338 native_interpret_real (tree type, const unsigned char *ptr, int len)
7340 machine_mode mode = TYPE_MODE (type);
7341 int total_bytes = GET_MODE_SIZE (mode);
7342 unsigned char value;
7343 /* There are always 32 bits in each long, no matter the size of
7344 the hosts long. We handle floating point representations with
7345 up to 192 bits. */
7346 REAL_VALUE_TYPE r;
7347 long tmp[6];
7349 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7350 if (total_bytes > len || total_bytes > 24)
7351 return NULL_TREE;
7352 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7354 memset (tmp, 0, sizeof (tmp));
7355 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7356 bitpos += BITS_PER_UNIT)
7358 /* Both OFFSET and BYTE index within a long;
7359 bitpos indexes the whole float. */
7360 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7361 if (UNITS_PER_WORD < 4)
7363 int word = byte / UNITS_PER_WORD;
7364 if (WORDS_BIG_ENDIAN)
7365 word = (words - 1) - word;
7366 offset = word * UNITS_PER_WORD;
7367 if (BYTES_BIG_ENDIAN)
7368 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7369 else
7370 offset += byte % UNITS_PER_WORD;
7372 else
7374 offset = byte;
7375 if (BYTES_BIG_ENDIAN)
7377 /* Reverse bytes within each long, or within the entire float
7378 if it's smaller than a long (for HFmode). */
7379 offset = MIN (3, total_bytes - 1) - offset;
7380 gcc_assert (offset >= 0);
7383 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7385 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7388 real_from_target (&r, tmp, mode);
7389 return build_real (type, r);
7393 /* Subroutine of native_interpret_expr. Interpret the contents of
7394 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7395 If the buffer cannot be interpreted, return NULL_TREE. */
7397 static tree
7398 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7400 tree etype, rpart, ipart;
7401 int size;
7403 etype = TREE_TYPE (type);
7404 size = GET_MODE_SIZE (TYPE_MODE (etype));
7405 if (size * 2 > len)
7406 return NULL_TREE;
7407 rpart = native_interpret_expr (etype, ptr, size);
7408 if (!rpart)
7409 return NULL_TREE;
7410 ipart = native_interpret_expr (etype, ptr+size, size);
7411 if (!ipart)
7412 return NULL_TREE;
7413 return build_complex (type, rpart, ipart);
7417 /* Subroutine of native_interpret_expr. Interpret the contents of
7418 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7419 If the buffer cannot be interpreted, return NULL_TREE. */
7421 static tree
7422 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7424 tree etype, elem;
7425 int i, size, count;
7426 tree *elements;
7428 etype = TREE_TYPE (type);
7429 size = GET_MODE_SIZE (TYPE_MODE (etype));
7430 count = TYPE_VECTOR_SUBPARTS (type);
7431 if (size * count > len)
7432 return NULL_TREE;
7434 elements = XALLOCAVEC (tree, count);
7435 for (i = count - 1; i >= 0; i--)
7437 elem = native_interpret_expr (etype, ptr+(i*size), size);
7438 if (!elem)
7439 return NULL_TREE;
7440 elements[i] = elem;
7442 return build_vector (type, elements);
7446 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7447 the buffer PTR of length LEN as a constant of type TYPE. For
7448 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7449 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7450 return NULL_TREE. */
7452 tree
7453 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7455 switch (TREE_CODE (type))
7457 case INTEGER_TYPE:
7458 case ENUMERAL_TYPE:
7459 case BOOLEAN_TYPE:
7460 case POINTER_TYPE:
7461 case REFERENCE_TYPE:
7462 return native_interpret_int (type, ptr, len);
7464 case REAL_TYPE:
7465 return native_interpret_real (type, ptr, len);
7467 case FIXED_POINT_TYPE:
7468 return native_interpret_fixed (type, ptr, len);
7470 case COMPLEX_TYPE:
7471 return native_interpret_complex (type, ptr, len);
7473 case VECTOR_TYPE:
7474 return native_interpret_vector (type, ptr, len);
7476 default:
7477 return NULL_TREE;
7481 /* Returns true if we can interpret the contents of a native encoding
7482 as TYPE. */
7484 static bool
7485 can_native_interpret_type_p (tree type)
7487 switch (TREE_CODE (type))
7489 case INTEGER_TYPE:
7490 case ENUMERAL_TYPE:
7491 case BOOLEAN_TYPE:
7492 case POINTER_TYPE:
7493 case REFERENCE_TYPE:
7494 case FIXED_POINT_TYPE:
7495 case REAL_TYPE:
7496 case COMPLEX_TYPE:
7497 case VECTOR_TYPE:
7498 return true;
7499 default:
7500 return false;
7504 /* Return true iff a constant of type TYPE is accepted by
7505 native_encode_expr. */
7507 bool
7508 can_native_encode_type_p (tree type)
7510 switch (TREE_CODE (type))
7512 case INTEGER_TYPE:
7513 case REAL_TYPE:
7514 case FIXED_POINT_TYPE:
7515 case COMPLEX_TYPE:
7516 case VECTOR_TYPE:
7517 case POINTER_TYPE:
7518 return true;
7519 default:
7520 return false;
7524 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7525 TYPE at compile-time. If we're unable to perform the conversion
7526 return NULL_TREE. */
7528 static tree
7529 fold_view_convert_expr (tree type, tree expr)
7531 /* We support up to 512-bit values (for V8DFmode). */
7532 unsigned char buffer[64];
7533 int len;
7535 /* Check that the host and target are sane. */
7536 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7537 return NULL_TREE;
7539 len = native_encode_expr (expr, buffer, sizeof (buffer));
7540 if (len == 0)
7541 return NULL_TREE;
7543 return native_interpret_expr (type, buffer, len);
7546 /* Build an expression for the address of T. Folds away INDIRECT_REF
7547 to avoid confusing the gimplify process. */
7549 tree
7550 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7552 /* The size of the object is not relevant when talking about its address. */
7553 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7554 t = TREE_OPERAND (t, 0);
7556 if (TREE_CODE (t) == INDIRECT_REF)
7558 t = TREE_OPERAND (t, 0);
7560 if (TREE_TYPE (t) != ptrtype)
7561 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7563 else if (TREE_CODE (t) == MEM_REF
7564 && integer_zerop (TREE_OPERAND (t, 1)))
7565 return TREE_OPERAND (t, 0);
7566 else if (TREE_CODE (t) == MEM_REF
7567 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7568 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7569 TREE_OPERAND (t, 0),
7570 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7571 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7573 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7575 if (TREE_TYPE (t) != ptrtype)
7576 t = fold_convert_loc (loc, ptrtype, t);
7578 else
7579 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7581 return t;
7584 /* Build an expression for the address of T. */
7586 tree
7587 build_fold_addr_expr_loc (location_t loc, tree t)
7589 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7591 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7594 /* Fold a unary expression of code CODE and type TYPE with operand
7595 OP0. Return the folded expression if folding is successful.
7596 Otherwise, return NULL_TREE. */
7598 tree
7599 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7601 tree tem;
7602 tree arg0;
7603 enum tree_code_class kind = TREE_CODE_CLASS (code);
7605 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7606 && TREE_CODE_LENGTH (code) == 1);
7608 arg0 = op0;
7609 if (arg0)
7611 if (CONVERT_EXPR_CODE_P (code)
7612 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7614 /* Don't use STRIP_NOPS, because signedness of argument type
7615 matters. */
7616 STRIP_SIGN_NOPS (arg0);
7618 else
7620 /* Strip any conversions that don't change the mode. This
7621 is safe for every expression, except for a comparison
7622 expression because its signedness is derived from its
7623 operands.
7625 Note that this is done as an internal manipulation within
7626 the constant folder, in order to find the simplest
7627 representation of the arguments so that their form can be
7628 studied. In any cases, the appropriate type conversions
7629 should be put back in the tree that will get out of the
7630 constant folder. */
7631 STRIP_NOPS (arg0);
7634 if (CONSTANT_CLASS_P (arg0))
7636 tree tem = const_unop (code, type, arg0);
7637 if (tem)
7639 if (TREE_TYPE (tem) != type)
7640 tem = fold_convert_loc (loc, type, tem);
7641 return tem;
7646 tem = generic_simplify (loc, code, type, op0);
7647 if (tem)
7648 return tem;
7650 if (TREE_CODE_CLASS (code) == tcc_unary)
7652 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7653 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7654 fold_build1_loc (loc, code, type,
7655 fold_convert_loc (loc, TREE_TYPE (op0),
7656 TREE_OPERAND (arg0, 1))));
7657 else if (TREE_CODE (arg0) == COND_EXPR)
7659 tree arg01 = TREE_OPERAND (arg0, 1);
7660 tree arg02 = TREE_OPERAND (arg0, 2);
7661 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7662 arg01 = fold_build1_loc (loc, code, type,
7663 fold_convert_loc (loc,
7664 TREE_TYPE (op0), arg01));
7665 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7666 arg02 = fold_build1_loc (loc, code, type,
7667 fold_convert_loc (loc,
7668 TREE_TYPE (op0), arg02));
7669 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7670 arg01, arg02);
7672 /* If this was a conversion, and all we did was to move into
7673 inside the COND_EXPR, bring it back out. But leave it if
7674 it is a conversion from integer to integer and the
7675 result precision is no wider than a word since such a
7676 conversion is cheap and may be optimized away by combine,
7677 while it couldn't if it were outside the COND_EXPR. Then return
7678 so we don't get into an infinite recursion loop taking the
7679 conversion out and then back in. */
7681 if ((CONVERT_EXPR_CODE_P (code)
7682 || code == NON_LVALUE_EXPR)
7683 && TREE_CODE (tem) == COND_EXPR
7684 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7685 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7686 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7687 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7688 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7689 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7690 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7691 && (INTEGRAL_TYPE_P
7692 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7693 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7694 || flag_syntax_only))
7695 tem = build1_loc (loc, code, type,
7696 build3 (COND_EXPR,
7697 TREE_TYPE (TREE_OPERAND
7698 (TREE_OPERAND (tem, 1), 0)),
7699 TREE_OPERAND (tem, 0),
7700 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7701 TREE_OPERAND (TREE_OPERAND (tem, 2),
7702 0)));
7703 return tem;
7707 switch (code)
7709 case NON_LVALUE_EXPR:
7710 if (!maybe_lvalue_p (op0))
7711 return fold_convert_loc (loc, type, op0);
7712 return NULL_TREE;
7714 CASE_CONVERT:
7715 case FLOAT_EXPR:
7716 case FIX_TRUNC_EXPR:
7717 if (COMPARISON_CLASS_P (op0))
7719 /* If we have (type) (a CMP b) and type is an integral type, return
7720 new expression involving the new type. Canonicalize
7721 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7722 non-integral type.
7723 Do not fold the result as that would not simplify further, also
7724 folding again results in recursions. */
7725 if (TREE_CODE (type) == BOOLEAN_TYPE)
7726 return build2_loc (loc, TREE_CODE (op0), type,
7727 TREE_OPERAND (op0, 0),
7728 TREE_OPERAND (op0, 1));
7729 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7730 && TREE_CODE (type) != VECTOR_TYPE)
7731 return build3_loc (loc, COND_EXPR, type, op0,
7732 constant_boolean_node (true, type),
7733 constant_boolean_node (false, type));
7736 /* Handle (T *)&A.B.C for A being of type T and B and C
7737 living at offset zero. This occurs frequently in
7738 C++ upcasting and then accessing the base. */
7739 if (TREE_CODE (op0) == ADDR_EXPR
7740 && POINTER_TYPE_P (type)
7741 && handled_component_p (TREE_OPERAND (op0, 0)))
7743 HOST_WIDE_INT bitsize, bitpos;
7744 tree offset;
7745 machine_mode mode;
7746 int unsignedp, reversep, volatilep;
7747 tree base
7748 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7749 &offset, &mode, &unsignedp, &reversep,
7750 &volatilep);
7751 /* If the reference was to a (constant) zero offset, we can use
7752 the address of the base if it has the same base type
7753 as the result type and the pointer type is unqualified. */
7754 if (! offset && bitpos == 0
7755 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7756 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7757 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7758 return fold_convert_loc (loc, type,
7759 build_fold_addr_expr_loc (loc, base));
7762 if (TREE_CODE (op0) == MODIFY_EXPR
7763 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7764 /* Detect assigning a bitfield. */
7765 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7766 && DECL_BIT_FIELD
7767 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7769 /* Don't leave an assignment inside a conversion
7770 unless assigning a bitfield. */
7771 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7772 /* First do the assignment, then return converted constant. */
7773 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7774 TREE_NO_WARNING (tem) = 1;
7775 TREE_USED (tem) = 1;
7776 return tem;
7779 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7780 constants (if x has signed type, the sign bit cannot be set
7781 in c). This folds extension into the BIT_AND_EXPR.
7782 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7783 very likely don't have maximal range for their precision and this
7784 transformation effectively doesn't preserve non-maximal ranges. */
7785 if (TREE_CODE (type) == INTEGER_TYPE
7786 && TREE_CODE (op0) == BIT_AND_EXPR
7787 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7789 tree and_expr = op0;
7790 tree and0 = TREE_OPERAND (and_expr, 0);
7791 tree and1 = TREE_OPERAND (and_expr, 1);
7792 int change = 0;
7794 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7795 || (TYPE_PRECISION (type)
7796 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7797 change = 1;
7798 else if (TYPE_PRECISION (TREE_TYPE (and1))
7799 <= HOST_BITS_PER_WIDE_INT
7800 && tree_fits_uhwi_p (and1))
7802 unsigned HOST_WIDE_INT cst;
7804 cst = tree_to_uhwi (and1);
7805 cst &= HOST_WIDE_INT_M1U
7806 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7807 change = (cst == 0);
7808 if (change
7809 && !flag_syntax_only
7810 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7811 == ZERO_EXTEND))
7813 tree uns = unsigned_type_for (TREE_TYPE (and0));
7814 and0 = fold_convert_loc (loc, uns, and0);
7815 and1 = fold_convert_loc (loc, uns, and1);
7818 if (change)
7820 tem = force_fit_type (type, wi::to_widest (and1), 0,
7821 TREE_OVERFLOW (and1));
7822 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7823 fold_convert_loc (loc, type, and0), tem);
7827 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7828 cast (T1)X will fold away. We assume that this happens when X itself
7829 is a cast. */
7830 if (POINTER_TYPE_P (type)
7831 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7832 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7834 tree arg00 = TREE_OPERAND (arg0, 0);
7835 tree arg01 = TREE_OPERAND (arg0, 1);
7837 return fold_build_pointer_plus_loc
7838 (loc, fold_convert_loc (loc, type, arg00), arg01);
7841 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7842 of the same precision, and X is an integer type not narrower than
7843 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7844 if (INTEGRAL_TYPE_P (type)
7845 && TREE_CODE (op0) == BIT_NOT_EXPR
7846 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7847 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7848 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7850 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7851 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7852 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7853 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7854 fold_convert_loc (loc, type, tem));
7857 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7858 type of X and Y (integer types only). */
7859 if (INTEGRAL_TYPE_P (type)
7860 && TREE_CODE (op0) == MULT_EXPR
7861 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7862 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7864 /* Be careful not to introduce new overflows. */
7865 tree mult_type;
7866 if (TYPE_OVERFLOW_WRAPS (type))
7867 mult_type = type;
7868 else
7869 mult_type = unsigned_type_for (type);
7871 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7873 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7874 fold_convert_loc (loc, mult_type,
7875 TREE_OPERAND (op0, 0)),
7876 fold_convert_loc (loc, mult_type,
7877 TREE_OPERAND (op0, 1)));
7878 return fold_convert_loc (loc, type, tem);
7882 return NULL_TREE;
7884 case VIEW_CONVERT_EXPR:
7885 if (TREE_CODE (op0) == MEM_REF)
7887 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7888 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7889 tem = fold_build2_loc (loc, MEM_REF, type,
7890 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7891 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7892 return tem;
7895 return NULL_TREE;
7897 case NEGATE_EXPR:
7898 tem = fold_negate_expr (loc, arg0);
7899 if (tem)
7900 return fold_convert_loc (loc, type, tem);
7901 return NULL_TREE;
7903 case ABS_EXPR:
7904 /* Convert fabs((double)float) into (double)fabsf(float). */
7905 if (TREE_CODE (arg0) == NOP_EXPR
7906 && TREE_CODE (type) == REAL_TYPE)
7908 tree targ0 = strip_float_extensions (arg0);
7909 if (targ0 != arg0)
7910 return fold_convert_loc (loc, type,
7911 fold_build1_loc (loc, ABS_EXPR,
7912 TREE_TYPE (targ0),
7913 targ0));
7915 return NULL_TREE;
7917 case BIT_NOT_EXPR:
7918 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7919 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7920 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7921 fold_convert_loc (loc, type,
7922 TREE_OPERAND (arg0, 0)))))
7923 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7924 fold_convert_loc (loc, type,
7925 TREE_OPERAND (arg0, 1)));
7926 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7927 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7928 fold_convert_loc (loc, type,
7929 TREE_OPERAND (arg0, 1)))))
7930 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7931 fold_convert_loc (loc, type,
7932 TREE_OPERAND (arg0, 0)), tem);
7934 return NULL_TREE;
7936 case TRUTH_NOT_EXPR:
7937 /* Note that the operand of this must be an int
7938 and its values must be 0 or 1.
7939 ("true" is a fixed value perhaps depending on the language,
7940 but we don't handle values other than 1 correctly yet.) */
7941 tem = fold_truth_not_expr (loc, arg0);
7942 if (!tem)
7943 return NULL_TREE;
7944 return fold_convert_loc (loc, type, tem);
7946 case INDIRECT_REF:
7947 /* Fold *&X to X if X is an lvalue. */
7948 if (TREE_CODE (op0) == ADDR_EXPR)
7950 tree op00 = TREE_OPERAND (op0, 0);
7951 if ((VAR_P (op00)
7952 || TREE_CODE (op00) == PARM_DECL
7953 || TREE_CODE (op00) == RESULT_DECL)
7954 && !TREE_READONLY (op00))
7955 return op00;
7957 return NULL_TREE;
7959 default:
7960 return NULL_TREE;
7961 } /* switch (code) */
7965 /* If the operation was a conversion do _not_ mark a resulting constant
7966 with TREE_OVERFLOW if the original constant was not. These conversions
7967 have implementation defined behavior and retaining the TREE_OVERFLOW
7968 flag here would confuse later passes such as VRP. */
7969 tree
7970 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7971 tree type, tree op0)
7973 tree res = fold_unary_loc (loc, code, type, op0);
7974 if (res
7975 && TREE_CODE (res) == INTEGER_CST
7976 && TREE_CODE (op0) == INTEGER_CST
7977 && CONVERT_EXPR_CODE_P (code))
7978 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7980 return res;
7983 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7984 operands OP0 and OP1. LOC is the location of the resulting expression.
7985 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7986 Return the folded expression if folding is successful. Otherwise,
7987 return NULL_TREE. */
7988 static tree
7989 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7990 tree arg0, tree arg1, tree op0, tree op1)
7992 tree tem;
7994 /* We only do these simplifications if we are optimizing. */
7995 if (!optimize)
7996 return NULL_TREE;
7998 /* Check for things like (A || B) && (A || C). We can convert this
7999 to A || (B && C). Note that either operator can be any of the four
8000 truth and/or operations and the transformation will still be
8001 valid. Also note that we only care about order for the
8002 ANDIF and ORIF operators. If B contains side effects, this
8003 might change the truth-value of A. */
8004 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8005 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8006 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8007 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8008 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8009 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8011 tree a00 = TREE_OPERAND (arg0, 0);
8012 tree a01 = TREE_OPERAND (arg0, 1);
8013 tree a10 = TREE_OPERAND (arg1, 0);
8014 tree a11 = TREE_OPERAND (arg1, 1);
8015 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8016 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8017 && (code == TRUTH_AND_EXPR
8018 || code == TRUTH_OR_EXPR));
8020 if (operand_equal_p (a00, a10, 0))
8021 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8022 fold_build2_loc (loc, code, type, a01, a11));
8023 else if (commutative && operand_equal_p (a00, a11, 0))
8024 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8025 fold_build2_loc (loc, code, type, a01, a10));
8026 else if (commutative && operand_equal_p (a01, a10, 0))
8027 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8028 fold_build2_loc (loc, code, type, a00, a11));
8030 /* This case if tricky because we must either have commutative
8031 operators or else A10 must not have side-effects. */
8033 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8034 && operand_equal_p (a01, a11, 0))
8035 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8036 fold_build2_loc (loc, code, type, a00, a10),
8037 a01);
8040 /* See if we can build a range comparison. */
8041 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8042 return tem;
8044 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8045 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8047 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8048 if (tem)
8049 return fold_build2_loc (loc, code, type, tem, arg1);
8052 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8053 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8055 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8056 if (tem)
8057 return fold_build2_loc (loc, code, type, arg0, tem);
8060 /* Check for the possibility of merging component references. If our
8061 lhs is another similar operation, try to merge its rhs with our
8062 rhs. Then try to merge our lhs and rhs. */
8063 if (TREE_CODE (arg0) == code
8064 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8065 TREE_OPERAND (arg0, 1), arg1)))
8066 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8068 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8069 return tem;
8071 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8072 && (code == TRUTH_AND_EXPR
8073 || code == TRUTH_ANDIF_EXPR
8074 || code == TRUTH_OR_EXPR
8075 || code == TRUTH_ORIF_EXPR))
8077 enum tree_code ncode, icode;
8079 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8080 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8081 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8083 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8084 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8085 We don't want to pack more than two leafs to a non-IF AND/OR
8086 expression.
8087 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8088 equal to IF-CODE, then we don't want to add right-hand operand.
8089 If the inner right-hand side of left-hand operand has
8090 side-effects, or isn't simple, then we can't add to it,
8091 as otherwise we might destroy if-sequence. */
8092 if (TREE_CODE (arg0) == icode
8093 && simple_operand_p_2 (arg1)
8094 /* Needed for sequence points to handle trappings, and
8095 side-effects. */
8096 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8098 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8099 arg1);
8100 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8101 tem);
8103 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8104 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8105 else if (TREE_CODE (arg1) == icode
8106 && simple_operand_p_2 (arg0)
8107 /* Needed for sequence points to handle trappings, and
8108 side-effects. */
8109 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8111 tem = fold_build2_loc (loc, ncode, type,
8112 arg0, TREE_OPERAND (arg1, 0));
8113 return fold_build2_loc (loc, icode, type, tem,
8114 TREE_OPERAND (arg1, 1));
8116 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8117 into (A OR B).
8118 For sequence point consistancy, we need to check for trapping,
8119 and side-effects. */
8120 else if (code == icode && simple_operand_p_2 (arg0)
8121 && simple_operand_p_2 (arg1))
8122 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8125 return NULL_TREE;
8128 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8129 by changing CODE to reduce the magnitude of constants involved in
8130 ARG0 of the comparison.
8131 Returns a canonicalized comparison tree if a simplification was
8132 possible, otherwise returns NULL_TREE.
8133 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8134 valid if signed overflow is undefined. */
8136 static tree
8137 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8138 tree arg0, tree arg1,
8139 bool *strict_overflow_p)
8141 enum tree_code code0 = TREE_CODE (arg0);
8142 tree t, cst0 = NULL_TREE;
8143 int sgn0;
8145 /* Match A +- CST code arg1. We can change this only if overflow
8146 is undefined. */
8147 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8148 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8149 /* In principle pointers also have undefined overflow behavior,
8150 but that causes problems elsewhere. */
8151 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8152 && (code0 == MINUS_EXPR
8153 || code0 == PLUS_EXPR)
8154 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8155 return NULL_TREE;
8157 /* Identify the constant in arg0 and its sign. */
8158 cst0 = TREE_OPERAND (arg0, 1);
8159 sgn0 = tree_int_cst_sgn (cst0);
8161 /* Overflowed constants and zero will cause problems. */
8162 if (integer_zerop (cst0)
8163 || TREE_OVERFLOW (cst0))
8164 return NULL_TREE;
8166 /* See if we can reduce the magnitude of the constant in
8167 arg0 by changing the comparison code. */
8168 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8169 if (code == LT_EXPR
8170 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8171 code = LE_EXPR;
8172 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8173 else if (code == GT_EXPR
8174 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8175 code = GE_EXPR;
8176 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8177 else if (code == LE_EXPR
8178 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8179 code = LT_EXPR;
8180 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8181 else if (code == GE_EXPR
8182 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8183 code = GT_EXPR;
8184 else
8185 return NULL_TREE;
8186 *strict_overflow_p = true;
8188 /* Now build the constant reduced in magnitude. But not if that
8189 would produce one outside of its types range. */
8190 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8191 && ((sgn0 == 1
8192 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8193 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8194 || (sgn0 == -1
8195 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8196 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8197 return NULL_TREE;
8199 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8200 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8201 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8202 t = fold_convert (TREE_TYPE (arg1), t);
8204 return fold_build2_loc (loc, code, type, t, arg1);
8207 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8208 overflow further. Try to decrease the magnitude of constants involved
8209 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8210 and put sole constants at the second argument position.
8211 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8213 static tree
8214 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8215 tree arg0, tree arg1)
8217 tree t;
8218 bool strict_overflow_p;
8219 const char * const warnmsg = G_("assuming signed overflow does not occur "
8220 "when reducing constant in comparison");
8222 /* Try canonicalization by simplifying arg0. */
8223 strict_overflow_p = false;
8224 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8225 &strict_overflow_p);
8226 if (t)
8228 if (strict_overflow_p)
8229 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8230 return t;
8233 /* Try canonicalization by simplifying arg1 using the swapped
8234 comparison. */
8235 code = swap_tree_comparison (code);
8236 strict_overflow_p = false;
8237 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8238 &strict_overflow_p);
8239 if (t && strict_overflow_p)
8240 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8241 return t;
8244 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8245 space. This is used to avoid issuing overflow warnings for
8246 expressions like &p->x which can not wrap. */
8248 static bool
8249 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8251 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8252 return true;
8254 if (bitpos < 0)
8255 return true;
8257 wide_int wi_offset;
8258 int precision = TYPE_PRECISION (TREE_TYPE (base));
8259 if (offset == NULL_TREE)
8260 wi_offset = wi::zero (precision);
8261 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8262 return true;
8263 else
8264 wi_offset = offset;
8266 bool overflow;
8267 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8268 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8269 if (overflow)
8270 return true;
8272 if (!wi::fits_uhwi_p (total))
8273 return true;
8275 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8276 if (size <= 0)
8277 return true;
8279 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8280 array. */
8281 if (TREE_CODE (base) == ADDR_EXPR)
8283 HOST_WIDE_INT base_size;
8285 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8286 if (base_size > 0 && size < base_size)
8287 size = base_size;
8290 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8293 /* Return a positive integer when the symbol DECL is known to have
8294 a nonzero address, zero when it's known not to (e.g., it's a weak
8295 symbol), and a negative integer when the symbol is not yet in the
8296 symbol table and so whether or not its address is zero is unknown.
8297 For function local objects always return positive integer. */
8298 static int
8299 maybe_nonzero_address (tree decl)
8301 if (DECL_P (decl) && decl_in_symtab_p (decl))
8302 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8303 return symbol->nonzero_address ();
8305 /* Function local objects are never NULL. */
8306 if (DECL_P (decl)
8307 && (DECL_CONTEXT (decl)
8308 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8309 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8310 return 1;
8312 return -1;
8315 /* Subroutine of fold_binary. This routine performs all of the
8316 transformations that are common to the equality/inequality
8317 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8318 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8319 fold_binary should call fold_binary. Fold a comparison with
8320 tree code CODE and type TYPE with operands OP0 and OP1. Return
8321 the folded comparison or NULL_TREE. */
8323 static tree
8324 fold_comparison (location_t loc, enum tree_code code, tree type,
8325 tree op0, tree op1)
8327 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8328 tree arg0, arg1, tem;
8330 arg0 = op0;
8331 arg1 = op1;
8333 STRIP_SIGN_NOPS (arg0);
8334 STRIP_SIGN_NOPS (arg1);
8336 /* For comparisons of pointers we can decompose it to a compile time
8337 comparison of the base objects and the offsets into the object.
8338 This requires at least one operand being an ADDR_EXPR or a
8339 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8340 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8341 && (TREE_CODE (arg0) == ADDR_EXPR
8342 || TREE_CODE (arg1) == ADDR_EXPR
8343 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8344 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8346 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8347 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8348 machine_mode mode;
8349 int volatilep, reversep, unsignedp;
8350 bool indirect_base0 = false, indirect_base1 = false;
8352 /* Get base and offset for the access. Strip ADDR_EXPR for
8353 get_inner_reference, but put it back by stripping INDIRECT_REF
8354 off the base object if possible. indirect_baseN will be true
8355 if baseN is not an address but refers to the object itself. */
8356 base0 = arg0;
8357 if (TREE_CODE (arg0) == ADDR_EXPR)
8359 base0
8360 = get_inner_reference (TREE_OPERAND (arg0, 0),
8361 &bitsize, &bitpos0, &offset0, &mode,
8362 &unsignedp, &reversep, &volatilep);
8363 if (TREE_CODE (base0) == INDIRECT_REF)
8364 base0 = TREE_OPERAND (base0, 0);
8365 else
8366 indirect_base0 = true;
8368 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8370 base0 = TREE_OPERAND (arg0, 0);
8371 STRIP_SIGN_NOPS (base0);
8372 if (TREE_CODE (base0) == ADDR_EXPR)
8374 base0
8375 = get_inner_reference (TREE_OPERAND (base0, 0),
8376 &bitsize, &bitpos0, &offset0, &mode,
8377 &unsignedp, &reversep, &volatilep);
8378 if (TREE_CODE (base0) == INDIRECT_REF)
8379 base0 = TREE_OPERAND (base0, 0);
8380 else
8381 indirect_base0 = true;
8383 if (offset0 == NULL_TREE || integer_zerop (offset0))
8384 offset0 = TREE_OPERAND (arg0, 1);
8385 else
8386 offset0 = size_binop (PLUS_EXPR, offset0,
8387 TREE_OPERAND (arg0, 1));
8388 if (TREE_CODE (offset0) == INTEGER_CST)
8390 offset_int tem = wi::sext (wi::to_offset (offset0),
8391 TYPE_PRECISION (sizetype));
8392 tem <<= LOG2_BITS_PER_UNIT;
8393 tem += bitpos0;
8394 if (wi::fits_shwi_p (tem))
8396 bitpos0 = tem.to_shwi ();
8397 offset0 = NULL_TREE;
8402 base1 = arg1;
8403 if (TREE_CODE (arg1) == ADDR_EXPR)
8405 base1
8406 = get_inner_reference (TREE_OPERAND (arg1, 0),
8407 &bitsize, &bitpos1, &offset1, &mode,
8408 &unsignedp, &reversep, &volatilep);
8409 if (TREE_CODE (base1) == INDIRECT_REF)
8410 base1 = TREE_OPERAND (base1, 0);
8411 else
8412 indirect_base1 = true;
8414 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8416 base1 = TREE_OPERAND (arg1, 0);
8417 STRIP_SIGN_NOPS (base1);
8418 if (TREE_CODE (base1) == ADDR_EXPR)
8420 base1
8421 = get_inner_reference (TREE_OPERAND (base1, 0),
8422 &bitsize, &bitpos1, &offset1, &mode,
8423 &unsignedp, &reversep, &volatilep);
8424 if (TREE_CODE (base1) == INDIRECT_REF)
8425 base1 = TREE_OPERAND (base1, 0);
8426 else
8427 indirect_base1 = true;
8429 if (offset1 == NULL_TREE || integer_zerop (offset1))
8430 offset1 = TREE_OPERAND (arg1, 1);
8431 else
8432 offset1 = size_binop (PLUS_EXPR, offset1,
8433 TREE_OPERAND (arg1, 1));
8434 if (TREE_CODE (offset1) == INTEGER_CST)
8436 offset_int tem = wi::sext (wi::to_offset (offset1),
8437 TYPE_PRECISION (sizetype));
8438 tem <<= LOG2_BITS_PER_UNIT;
8439 tem += bitpos1;
8440 if (wi::fits_shwi_p (tem))
8442 bitpos1 = tem.to_shwi ();
8443 offset1 = NULL_TREE;
8448 /* If we have equivalent bases we might be able to simplify. */
8449 if (indirect_base0 == indirect_base1
8450 && operand_equal_p (base0, base1,
8451 indirect_base0 ? OEP_ADDRESS_OF : 0))
8453 /* We can fold this expression to a constant if the non-constant
8454 offset parts are equal. */
8455 if (offset0 == offset1
8456 || (offset0 && offset1
8457 && operand_equal_p (offset0, offset1, 0)))
8459 if (!equality_code
8460 && bitpos0 != bitpos1
8461 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8462 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8463 fold_overflow_warning (("assuming pointer wraparound does not "
8464 "occur when comparing P +- C1 with "
8465 "P +- C2"),
8466 WARN_STRICT_OVERFLOW_CONDITIONAL);
8468 switch (code)
8470 case EQ_EXPR:
8471 return constant_boolean_node (bitpos0 == bitpos1, type);
8472 case NE_EXPR:
8473 return constant_boolean_node (bitpos0 != bitpos1, type);
8474 case LT_EXPR:
8475 return constant_boolean_node (bitpos0 < bitpos1, type);
8476 case LE_EXPR:
8477 return constant_boolean_node (bitpos0 <= bitpos1, type);
8478 case GE_EXPR:
8479 return constant_boolean_node (bitpos0 >= bitpos1, type);
8480 case GT_EXPR:
8481 return constant_boolean_node (bitpos0 > bitpos1, type);
8482 default:;
8485 /* We can simplify the comparison to a comparison of the variable
8486 offset parts if the constant offset parts are equal.
8487 Be careful to use signed sizetype here because otherwise we
8488 mess with array offsets in the wrong way. This is possible
8489 because pointer arithmetic is restricted to retain within an
8490 object and overflow on pointer differences is undefined as of
8491 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8492 else if (bitpos0 == bitpos1)
8494 /* By converting to signed sizetype we cover middle-end pointer
8495 arithmetic which operates on unsigned pointer types of size
8496 type size and ARRAY_REF offsets which are properly sign or
8497 zero extended from their type in case it is narrower than
8498 sizetype. */
8499 if (offset0 == NULL_TREE)
8500 offset0 = build_int_cst (ssizetype, 0);
8501 else
8502 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8503 if (offset1 == NULL_TREE)
8504 offset1 = build_int_cst (ssizetype, 0);
8505 else
8506 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8508 if (!equality_code
8509 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8510 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8511 fold_overflow_warning (("assuming pointer wraparound does not "
8512 "occur when comparing P +- C1 with "
8513 "P +- C2"),
8514 WARN_STRICT_OVERFLOW_COMPARISON);
8516 return fold_build2_loc (loc, code, type, offset0, offset1);
8519 /* For equal offsets we can simplify to a comparison of the
8520 base addresses. */
8521 else if (bitpos0 == bitpos1
8522 && (indirect_base0
8523 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8524 && (indirect_base1
8525 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8526 && ((offset0 == offset1)
8527 || (offset0 && offset1
8528 && operand_equal_p (offset0, offset1, 0))))
8530 if (indirect_base0)
8531 base0 = build_fold_addr_expr_loc (loc, base0);
8532 if (indirect_base1)
8533 base1 = build_fold_addr_expr_loc (loc, base1);
8534 return fold_build2_loc (loc, code, type, base0, base1);
8536 /* Comparison between an ordinary (non-weak) symbol and a null
8537 pointer can be eliminated since such symbols must have a non
8538 null address. In C, relational expressions between pointers
8539 to objects and null pointers are undefined. The results
8540 below follow the C++ rules with the additional property that
8541 every object pointer compares greater than a null pointer.
8543 else if (((DECL_P (base0)
8544 && maybe_nonzero_address (base0) > 0
8545 /* Avoid folding references to struct members at offset 0 to
8546 prevent tests like '&ptr->firstmember == 0' from getting
8547 eliminated. When ptr is null, although the -> expression
8548 is strictly speaking invalid, GCC retains it as a matter
8549 of QoI. See PR c/44555. */
8550 && (offset0 == NULL_TREE && bitpos0 != 0))
8551 || CONSTANT_CLASS_P (base0))
8552 && indirect_base0
8553 /* The caller guarantees that when one of the arguments is
8554 constant (i.e., null in this case) it is second. */
8555 && integer_zerop (arg1))
8557 switch (code)
8559 case EQ_EXPR:
8560 case LE_EXPR:
8561 case LT_EXPR:
8562 return constant_boolean_node (false, type);
8563 case GE_EXPR:
8564 case GT_EXPR:
8565 case NE_EXPR:
8566 return constant_boolean_node (true, type);
8567 default:
8568 gcc_unreachable ();
8573 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8574 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8575 the resulting offset is smaller in absolute value than the
8576 original one and has the same sign. */
8577 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8578 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8579 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8580 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8581 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8582 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8583 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8584 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8586 tree const1 = TREE_OPERAND (arg0, 1);
8587 tree const2 = TREE_OPERAND (arg1, 1);
8588 tree variable1 = TREE_OPERAND (arg0, 0);
8589 tree variable2 = TREE_OPERAND (arg1, 0);
8590 tree cst;
8591 const char * const warnmsg = G_("assuming signed overflow does not "
8592 "occur when combining constants around "
8593 "a comparison");
8595 /* Put the constant on the side where it doesn't overflow and is
8596 of lower absolute value and of same sign than before. */
8597 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8598 ? MINUS_EXPR : PLUS_EXPR,
8599 const2, const1);
8600 if (!TREE_OVERFLOW (cst)
8601 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8602 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8604 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8605 return fold_build2_loc (loc, code, type,
8606 variable1,
8607 fold_build2_loc (loc, TREE_CODE (arg1),
8608 TREE_TYPE (arg1),
8609 variable2, cst));
8612 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8613 ? MINUS_EXPR : PLUS_EXPR,
8614 const1, const2);
8615 if (!TREE_OVERFLOW (cst)
8616 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8617 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8619 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8620 return fold_build2_loc (loc, code, type,
8621 fold_build2_loc (loc, TREE_CODE (arg0),
8622 TREE_TYPE (arg0),
8623 variable1, cst),
8624 variable2);
8628 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8629 if (tem)
8630 return tem;
8632 /* If we are comparing an expression that just has comparisons
8633 of two integer values, arithmetic expressions of those comparisons,
8634 and constants, we can simplify it. There are only three cases
8635 to check: the two values can either be equal, the first can be
8636 greater, or the second can be greater. Fold the expression for
8637 those three values. Since each value must be 0 or 1, we have
8638 eight possibilities, each of which corresponds to the constant 0
8639 or 1 or one of the six possible comparisons.
8641 This handles common cases like (a > b) == 0 but also handles
8642 expressions like ((x > y) - (y > x)) > 0, which supposedly
8643 occur in macroized code. */
8645 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8647 tree cval1 = 0, cval2 = 0;
8648 int save_p = 0;
8650 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8651 /* Don't handle degenerate cases here; they should already
8652 have been handled anyway. */
8653 && cval1 != 0 && cval2 != 0
8654 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8655 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8656 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8657 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8658 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8659 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8660 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8662 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8663 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8665 /* We can't just pass T to eval_subst in case cval1 or cval2
8666 was the same as ARG1. */
8668 tree high_result
8669 = fold_build2_loc (loc, code, type,
8670 eval_subst (loc, arg0, cval1, maxval,
8671 cval2, minval),
8672 arg1);
8673 tree equal_result
8674 = fold_build2_loc (loc, code, type,
8675 eval_subst (loc, arg0, cval1, maxval,
8676 cval2, maxval),
8677 arg1);
8678 tree low_result
8679 = fold_build2_loc (loc, code, type,
8680 eval_subst (loc, arg0, cval1, minval,
8681 cval2, maxval),
8682 arg1);
8684 /* All three of these results should be 0 or 1. Confirm they are.
8685 Then use those values to select the proper code to use. */
8687 if (TREE_CODE (high_result) == INTEGER_CST
8688 && TREE_CODE (equal_result) == INTEGER_CST
8689 && TREE_CODE (low_result) == INTEGER_CST)
8691 /* Make a 3-bit mask with the high-order bit being the
8692 value for `>', the next for '=', and the low for '<'. */
8693 switch ((integer_onep (high_result) * 4)
8694 + (integer_onep (equal_result) * 2)
8695 + integer_onep (low_result))
8697 case 0:
8698 /* Always false. */
8699 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8700 case 1:
8701 code = LT_EXPR;
8702 break;
8703 case 2:
8704 code = EQ_EXPR;
8705 break;
8706 case 3:
8707 code = LE_EXPR;
8708 break;
8709 case 4:
8710 code = GT_EXPR;
8711 break;
8712 case 5:
8713 code = NE_EXPR;
8714 break;
8715 case 6:
8716 code = GE_EXPR;
8717 break;
8718 case 7:
8719 /* Always true. */
8720 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8723 if (save_p)
8725 tem = save_expr (build2 (code, type, cval1, cval2));
8726 protected_set_expr_location (tem, loc);
8727 return tem;
8729 return fold_build2_loc (loc, code, type, cval1, cval2);
8734 return NULL_TREE;
8738 /* Subroutine of fold_binary. Optimize complex multiplications of the
8739 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8740 argument EXPR represents the expression "z" of type TYPE. */
8742 static tree
8743 fold_mult_zconjz (location_t loc, tree type, tree expr)
8745 tree itype = TREE_TYPE (type);
8746 tree rpart, ipart, tem;
8748 if (TREE_CODE (expr) == COMPLEX_EXPR)
8750 rpart = TREE_OPERAND (expr, 0);
8751 ipart = TREE_OPERAND (expr, 1);
8753 else if (TREE_CODE (expr) == COMPLEX_CST)
8755 rpart = TREE_REALPART (expr);
8756 ipart = TREE_IMAGPART (expr);
8758 else
8760 expr = save_expr (expr);
8761 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8762 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8765 rpart = save_expr (rpart);
8766 ipart = save_expr (ipart);
8767 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8768 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8769 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8770 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8771 build_zero_cst (itype));
8775 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8776 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8778 static bool
8779 vec_cst_ctor_to_array (tree arg, tree *elts)
8781 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8783 if (TREE_CODE (arg) == VECTOR_CST)
8785 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8786 elts[i] = VECTOR_CST_ELT (arg, i);
8788 else if (TREE_CODE (arg) == CONSTRUCTOR)
8790 constructor_elt *elt;
8792 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8793 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8794 return false;
8795 else
8796 elts[i] = elt->value;
8798 else
8799 return false;
8800 for (; i < nelts; i++)
8801 elts[i]
8802 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8803 return true;
8806 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8807 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8808 NULL_TREE otherwise. */
8810 static tree
8811 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8813 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8814 tree *elts;
8815 bool need_ctor = false;
8817 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8818 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8819 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8820 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8821 return NULL_TREE;
8823 elts = XALLOCAVEC (tree, nelts * 3);
8824 if (!vec_cst_ctor_to_array (arg0, elts)
8825 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8826 return NULL_TREE;
8828 for (i = 0; i < nelts; i++)
8830 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8831 need_ctor = true;
8832 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8835 if (need_ctor)
8837 vec<constructor_elt, va_gc> *v;
8838 vec_alloc (v, nelts);
8839 for (i = 0; i < nelts; i++)
8840 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8841 return build_constructor (type, v);
8843 else
8844 return build_vector (type, &elts[2 * nelts]);
8847 /* Try to fold a pointer difference of type TYPE two address expressions of
8848 array references AREF0 and AREF1 using location LOC. Return a
8849 simplified expression for the difference or NULL_TREE. */
8851 static tree
8852 fold_addr_of_array_ref_difference (location_t loc, tree type,
8853 tree aref0, tree aref1)
8855 tree base0 = TREE_OPERAND (aref0, 0);
8856 tree base1 = TREE_OPERAND (aref1, 0);
8857 tree base_offset = build_int_cst (type, 0);
8859 /* If the bases are array references as well, recurse. If the bases
8860 are pointer indirections compute the difference of the pointers.
8861 If the bases are equal, we are set. */
8862 if ((TREE_CODE (base0) == ARRAY_REF
8863 && TREE_CODE (base1) == ARRAY_REF
8864 && (base_offset
8865 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8866 || (INDIRECT_REF_P (base0)
8867 && INDIRECT_REF_P (base1)
8868 && (base_offset
8869 = fold_binary_loc (loc, MINUS_EXPR, type,
8870 fold_convert (type, TREE_OPERAND (base0, 0)),
8871 fold_convert (type,
8872 TREE_OPERAND (base1, 0)))))
8873 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8875 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8876 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8877 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8878 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8879 return fold_build2_loc (loc, PLUS_EXPR, type,
8880 base_offset,
8881 fold_build2_loc (loc, MULT_EXPR, type,
8882 diff, esz));
8884 return NULL_TREE;
8887 /* If the real or vector real constant CST of type TYPE has an exact
8888 inverse, return it, else return NULL. */
8890 tree
8891 exact_inverse (tree type, tree cst)
8893 REAL_VALUE_TYPE r;
8894 tree unit_type, *elts;
8895 machine_mode mode;
8896 unsigned vec_nelts, i;
8898 switch (TREE_CODE (cst))
8900 case REAL_CST:
8901 r = TREE_REAL_CST (cst);
8903 if (exact_real_inverse (TYPE_MODE (type), &r))
8904 return build_real (type, r);
8906 return NULL_TREE;
8908 case VECTOR_CST:
8909 vec_nelts = VECTOR_CST_NELTS (cst);
8910 elts = XALLOCAVEC (tree, vec_nelts);
8911 unit_type = TREE_TYPE (type);
8912 mode = TYPE_MODE (unit_type);
8914 for (i = 0; i < vec_nelts; i++)
8916 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8917 if (!exact_real_inverse (mode, &r))
8918 return NULL_TREE;
8919 elts[i] = build_real (unit_type, r);
8922 return build_vector (type, elts);
8924 default:
8925 return NULL_TREE;
8929 /* Mask out the tz least significant bits of X of type TYPE where
8930 tz is the number of trailing zeroes in Y. */
8931 static wide_int
8932 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8934 int tz = wi::ctz (y);
8935 if (tz > 0)
8936 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8937 return x;
8940 /* Return true when T is an address and is known to be nonzero.
8941 For floating point we further ensure that T is not denormal.
8942 Similar logic is present in nonzero_address in rtlanal.h.
8944 If the return value is based on the assumption that signed overflow
8945 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8946 change *STRICT_OVERFLOW_P. */
8948 static bool
8949 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8951 tree type = TREE_TYPE (t);
8952 enum tree_code code;
8954 /* Doing something useful for floating point would need more work. */
8955 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8956 return false;
8958 code = TREE_CODE (t);
8959 switch (TREE_CODE_CLASS (code))
8961 case tcc_unary:
8962 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8963 strict_overflow_p);
8964 case tcc_binary:
8965 case tcc_comparison:
8966 return tree_binary_nonzero_warnv_p (code, type,
8967 TREE_OPERAND (t, 0),
8968 TREE_OPERAND (t, 1),
8969 strict_overflow_p);
8970 case tcc_constant:
8971 case tcc_declaration:
8972 case tcc_reference:
8973 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8975 default:
8976 break;
8979 switch (code)
8981 case TRUTH_NOT_EXPR:
8982 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8983 strict_overflow_p);
8985 case TRUTH_AND_EXPR:
8986 case TRUTH_OR_EXPR:
8987 case TRUTH_XOR_EXPR:
8988 return tree_binary_nonzero_warnv_p (code, type,
8989 TREE_OPERAND (t, 0),
8990 TREE_OPERAND (t, 1),
8991 strict_overflow_p);
8993 case COND_EXPR:
8994 case CONSTRUCTOR:
8995 case OBJ_TYPE_REF:
8996 case ASSERT_EXPR:
8997 case ADDR_EXPR:
8998 case WITH_SIZE_EXPR:
8999 case SSA_NAME:
9000 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9002 case COMPOUND_EXPR:
9003 case MODIFY_EXPR:
9004 case BIND_EXPR:
9005 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9006 strict_overflow_p);
9008 case SAVE_EXPR:
9009 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9010 strict_overflow_p);
9012 case CALL_EXPR:
9014 tree fndecl = get_callee_fndecl (t);
9015 if (!fndecl) return false;
9016 if (flag_delete_null_pointer_checks && !flag_check_new
9017 && DECL_IS_OPERATOR_NEW (fndecl)
9018 && !TREE_NOTHROW (fndecl))
9019 return true;
9020 if (flag_delete_null_pointer_checks
9021 && lookup_attribute ("returns_nonnull",
9022 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9023 return true;
9024 return alloca_call_p (t);
9027 default:
9028 break;
9030 return false;
9033 /* Return true when T is an address and is known to be nonzero.
9034 Handle warnings about undefined signed overflow. */
9036 bool
9037 tree_expr_nonzero_p (tree t)
9039 bool ret, strict_overflow_p;
9041 strict_overflow_p = false;
9042 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9043 if (strict_overflow_p)
9044 fold_overflow_warning (("assuming signed overflow does not occur when "
9045 "determining that expression is always "
9046 "non-zero"),
9047 WARN_STRICT_OVERFLOW_MISC);
9048 return ret;
9051 /* Return true if T is known not to be equal to an integer W. */
9053 bool
9054 expr_not_equal_to (tree t, const wide_int &w)
9056 wide_int min, max, nz;
9057 value_range_type rtype;
9058 switch (TREE_CODE (t))
9060 case INTEGER_CST:
9061 return wi::ne_p (t, w);
9063 case SSA_NAME:
9064 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9065 return false;
9066 rtype = get_range_info (t, &min, &max);
9067 if (rtype == VR_RANGE)
9069 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9070 return true;
9071 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9072 return true;
9074 else if (rtype == VR_ANTI_RANGE
9075 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9076 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9077 return true;
9078 /* If T has some known zero bits and W has any of those bits set,
9079 then T is known not to be equal to W. */
9080 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9081 TYPE_PRECISION (TREE_TYPE (t))), 0))
9082 return true;
9083 return false;
9085 default:
9086 return false;
9090 /* Fold a binary expression of code CODE and type TYPE with operands
9091 OP0 and OP1. LOC is the location of the resulting expression.
9092 Return the folded expression if folding is successful. Otherwise,
9093 return NULL_TREE. */
9095 tree
9096 fold_binary_loc (location_t loc,
9097 enum tree_code code, tree type, tree op0, tree op1)
9099 enum tree_code_class kind = TREE_CODE_CLASS (code);
9100 tree arg0, arg1, tem;
9101 tree t1 = NULL_TREE;
9102 bool strict_overflow_p;
9103 unsigned int prec;
9105 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9106 && TREE_CODE_LENGTH (code) == 2
9107 && op0 != NULL_TREE
9108 && op1 != NULL_TREE);
9110 arg0 = op0;
9111 arg1 = op1;
9113 /* Strip any conversions that don't change the mode. This is
9114 safe for every expression, except for a comparison expression
9115 because its signedness is derived from its operands. So, in
9116 the latter case, only strip conversions that don't change the
9117 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9118 preserved.
9120 Note that this is done as an internal manipulation within the
9121 constant folder, in order to find the simplest representation
9122 of the arguments so that their form can be studied. In any
9123 cases, the appropriate type conversions should be put back in
9124 the tree that will get out of the constant folder. */
9126 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9128 STRIP_SIGN_NOPS (arg0);
9129 STRIP_SIGN_NOPS (arg1);
9131 else
9133 STRIP_NOPS (arg0);
9134 STRIP_NOPS (arg1);
9137 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9138 constant but we can't do arithmetic on them. */
9139 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9141 tem = const_binop (code, type, arg0, arg1);
9142 if (tem != NULL_TREE)
9144 if (TREE_TYPE (tem) != type)
9145 tem = fold_convert_loc (loc, type, tem);
9146 return tem;
9150 /* If this is a commutative operation, and ARG0 is a constant, move it
9151 to ARG1 to reduce the number of tests below. */
9152 if (commutative_tree_code (code)
9153 && tree_swap_operands_p (arg0, arg1))
9154 return fold_build2_loc (loc, code, type, op1, op0);
9156 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9157 to ARG1 to reduce the number of tests below. */
9158 if (kind == tcc_comparison
9159 && tree_swap_operands_p (arg0, arg1))
9160 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9162 tem = generic_simplify (loc, code, type, op0, op1);
9163 if (tem)
9164 return tem;
9166 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9168 First check for cases where an arithmetic operation is applied to a
9169 compound, conditional, or comparison operation. Push the arithmetic
9170 operation inside the compound or conditional to see if any folding
9171 can then be done. Convert comparison to conditional for this purpose.
9172 The also optimizes non-constant cases that used to be done in
9173 expand_expr.
9175 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9176 one of the operands is a comparison and the other is a comparison, a
9177 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9178 code below would make the expression more complex. Change it to a
9179 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9180 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9182 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9183 || code == EQ_EXPR || code == NE_EXPR)
9184 && TREE_CODE (type) != VECTOR_TYPE
9185 && ((truth_value_p (TREE_CODE (arg0))
9186 && (truth_value_p (TREE_CODE (arg1))
9187 || (TREE_CODE (arg1) == BIT_AND_EXPR
9188 && integer_onep (TREE_OPERAND (arg1, 1)))))
9189 || (truth_value_p (TREE_CODE (arg1))
9190 && (truth_value_p (TREE_CODE (arg0))
9191 || (TREE_CODE (arg0) == BIT_AND_EXPR
9192 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9194 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9195 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9196 : TRUTH_XOR_EXPR,
9197 boolean_type_node,
9198 fold_convert_loc (loc, boolean_type_node, arg0),
9199 fold_convert_loc (loc, boolean_type_node, arg1));
9201 if (code == EQ_EXPR)
9202 tem = invert_truthvalue_loc (loc, tem);
9204 return fold_convert_loc (loc, type, tem);
9207 if (TREE_CODE_CLASS (code) == tcc_binary
9208 || TREE_CODE_CLASS (code) == tcc_comparison)
9210 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9212 tem = fold_build2_loc (loc, code, type,
9213 fold_convert_loc (loc, TREE_TYPE (op0),
9214 TREE_OPERAND (arg0, 1)), op1);
9215 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9216 tem);
9218 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9220 tem = fold_build2_loc (loc, code, type, op0,
9221 fold_convert_loc (loc, TREE_TYPE (op1),
9222 TREE_OPERAND (arg1, 1)));
9223 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9224 tem);
9227 if (TREE_CODE (arg0) == COND_EXPR
9228 || TREE_CODE (arg0) == VEC_COND_EXPR
9229 || COMPARISON_CLASS_P (arg0))
9231 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9232 arg0, arg1,
9233 /*cond_first_p=*/1);
9234 if (tem != NULL_TREE)
9235 return tem;
9238 if (TREE_CODE (arg1) == COND_EXPR
9239 || TREE_CODE (arg1) == VEC_COND_EXPR
9240 || COMPARISON_CLASS_P (arg1))
9242 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9243 arg1, arg0,
9244 /*cond_first_p=*/0);
9245 if (tem != NULL_TREE)
9246 return tem;
9250 switch (code)
9252 case MEM_REF:
9253 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9254 if (TREE_CODE (arg0) == ADDR_EXPR
9255 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9257 tree iref = TREE_OPERAND (arg0, 0);
9258 return fold_build2 (MEM_REF, type,
9259 TREE_OPERAND (iref, 0),
9260 int_const_binop (PLUS_EXPR, arg1,
9261 TREE_OPERAND (iref, 1)));
9264 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9265 if (TREE_CODE (arg0) == ADDR_EXPR
9266 && handled_component_p (TREE_OPERAND (arg0, 0)))
9268 tree base;
9269 HOST_WIDE_INT coffset;
9270 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9271 &coffset);
9272 if (!base)
9273 return NULL_TREE;
9274 return fold_build2 (MEM_REF, type,
9275 build_fold_addr_expr (base),
9276 int_const_binop (PLUS_EXPR, arg1,
9277 size_int (coffset)));
9280 return NULL_TREE;
9282 case POINTER_PLUS_EXPR:
9283 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9284 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9285 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9286 return fold_convert_loc (loc, type,
9287 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9288 fold_convert_loc (loc, sizetype,
9289 arg1),
9290 fold_convert_loc (loc, sizetype,
9291 arg0)));
9293 return NULL_TREE;
9295 case PLUS_EXPR:
9296 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9298 /* X + (X / CST) * -CST is X % CST. */
9299 if (TREE_CODE (arg1) == MULT_EXPR
9300 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9301 && operand_equal_p (arg0,
9302 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9304 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9305 tree cst1 = TREE_OPERAND (arg1, 1);
9306 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9307 cst1, cst0);
9308 if (sum && integer_zerop (sum))
9309 return fold_convert_loc (loc, type,
9310 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9311 TREE_TYPE (arg0), arg0,
9312 cst0));
9316 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9317 one. Make sure the type is not saturating and has the signedness of
9318 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9319 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9320 if ((TREE_CODE (arg0) == MULT_EXPR
9321 || TREE_CODE (arg1) == MULT_EXPR)
9322 && !TYPE_SATURATING (type)
9323 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9324 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9325 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9327 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9328 if (tem)
9329 return tem;
9332 if (! FLOAT_TYPE_P (type))
9334 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9335 (plus (plus (mult) (mult)) (foo)) so that we can
9336 take advantage of the factoring cases below. */
9337 if (ANY_INTEGRAL_TYPE_P (type)
9338 && TYPE_OVERFLOW_WRAPS (type)
9339 && (((TREE_CODE (arg0) == PLUS_EXPR
9340 || TREE_CODE (arg0) == MINUS_EXPR)
9341 && TREE_CODE (arg1) == MULT_EXPR)
9342 || ((TREE_CODE (arg1) == PLUS_EXPR
9343 || TREE_CODE (arg1) == MINUS_EXPR)
9344 && TREE_CODE (arg0) == MULT_EXPR)))
9346 tree parg0, parg1, parg, marg;
9347 enum tree_code pcode;
9349 if (TREE_CODE (arg1) == MULT_EXPR)
9350 parg = arg0, marg = arg1;
9351 else
9352 parg = arg1, marg = arg0;
9353 pcode = TREE_CODE (parg);
9354 parg0 = TREE_OPERAND (parg, 0);
9355 parg1 = TREE_OPERAND (parg, 1);
9356 STRIP_NOPS (parg0);
9357 STRIP_NOPS (parg1);
9359 if (TREE_CODE (parg0) == MULT_EXPR
9360 && TREE_CODE (parg1) != MULT_EXPR)
9361 return fold_build2_loc (loc, pcode, type,
9362 fold_build2_loc (loc, PLUS_EXPR, type,
9363 fold_convert_loc (loc, type,
9364 parg0),
9365 fold_convert_loc (loc, type,
9366 marg)),
9367 fold_convert_loc (loc, type, parg1));
9368 if (TREE_CODE (parg0) != MULT_EXPR
9369 && TREE_CODE (parg1) == MULT_EXPR)
9370 return
9371 fold_build2_loc (loc, PLUS_EXPR, type,
9372 fold_convert_loc (loc, type, parg0),
9373 fold_build2_loc (loc, pcode, type,
9374 fold_convert_loc (loc, type, marg),
9375 fold_convert_loc (loc, type,
9376 parg1)));
9379 else
9381 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9382 to __complex__ ( x, y ). This is not the same for SNaNs or
9383 if signed zeros are involved. */
9384 if (!HONOR_SNANS (element_mode (arg0))
9385 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9386 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9388 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9389 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9390 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9391 bool arg0rz = false, arg0iz = false;
9392 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9393 || (arg0i && (arg0iz = real_zerop (arg0i))))
9395 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9396 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9397 if (arg0rz && arg1i && real_zerop (arg1i))
9399 tree rp = arg1r ? arg1r
9400 : build1 (REALPART_EXPR, rtype, arg1);
9401 tree ip = arg0i ? arg0i
9402 : build1 (IMAGPART_EXPR, rtype, arg0);
9403 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9405 else if (arg0iz && arg1r && real_zerop (arg1r))
9407 tree rp = arg0r ? arg0r
9408 : build1 (REALPART_EXPR, rtype, arg0);
9409 tree ip = arg1i ? arg1i
9410 : build1 (IMAGPART_EXPR, rtype, arg1);
9411 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9416 if (flag_unsafe_math_optimizations
9417 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9418 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9419 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9420 return tem;
9422 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9423 We associate floats only if the user has specified
9424 -fassociative-math. */
9425 if (flag_associative_math
9426 && TREE_CODE (arg1) == PLUS_EXPR
9427 && TREE_CODE (arg0) != MULT_EXPR)
9429 tree tree10 = TREE_OPERAND (arg1, 0);
9430 tree tree11 = TREE_OPERAND (arg1, 1);
9431 if (TREE_CODE (tree11) == MULT_EXPR
9432 && TREE_CODE (tree10) == MULT_EXPR)
9434 tree tree0;
9435 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9436 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9439 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9440 We associate floats only if the user has specified
9441 -fassociative-math. */
9442 if (flag_associative_math
9443 && TREE_CODE (arg0) == PLUS_EXPR
9444 && TREE_CODE (arg1) != MULT_EXPR)
9446 tree tree00 = TREE_OPERAND (arg0, 0);
9447 tree tree01 = TREE_OPERAND (arg0, 1);
9448 if (TREE_CODE (tree01) == MULT_EXPR
9449 && TREE_CODE (tree00) == MULT_EXPR)
9451 tree tree0;
9452 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9453 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9458 bit_rotate:
9459 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9460 is a rotate of A by C1 bits. */
9461 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9462 is a rotate of A by B bits. */
9464 enum tree_code code0, code1;
9465 tree rtype;
9466 code0 = TREE_CODE (arg0);
9467 code1 = TREE_CODE (arg1);
9468 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9469 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9470 && operand_equal_p (TREE_OPERAND (arg0, 0),
9471 TREE_OPERAND (arg1, 0), 0)
9472 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9473 TYPE_UNSIGNED (rtype))
9474 /* Only create rotates in complete modes. Other cases are not
9475 expanded properly. */
9476 && (element_precision (rtype)
9477 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9479 tree tree01, tree11;
9480 enum tree_code code01, code11;
9482 tree01 = TREE_OPERAND (arg0, 1);
9483 tree11 = TREE_OPERAND (arg1, 1);
9484 STRIP_NOPS (tree01);
9485 STRIP_NOPS (tree11);
9486 code01 = TREE_CODE (tree01);
9487 code11 = TREE_CODE (tree11);
9488 if (code01 == INTEGER_CST
9489 && code11 == INTEGER_CST
9490 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9491 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9493 tem = build2_loc (loc, LROTATE_EXPR,
9494 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9495 TREE_OPERAND (arg0, 0),
9496 code0 == LSHIFT_EXPR
9497 ? TREE_OPERAND (arg0, 1)
9498 : TREE_OPERAND (arg1, 1));
9499 return fold_convert_loc (loc, type, tem);
9501 else if (code11 == MINUS_EXPR)
9503 tree tree110, tree111;
9504 tree110 = TREE_OPERAND (tree11, 0);
9505 tree111 = TREE_OPERAND (tree11, 1);
9506 STRIP_NOPS (tree110);
9507 STRIP_NOPS (tree111);
9508 if (TREE_CODE (tree110) == INTEGER_CST
9509 && 0 == compare_tree_int (tree110,
9510 element_precision
9511 (TREE_TYPE (TREE_OPERAND
9512 (arg0, 0))))
9513 && operand_equal_p (tree01, tree111, 0))
9514 return
9515 fold_convert_loc (loc, type,
9516 build2 ((code0 == LSHIFT_EXPR
9517 ? LROTATE_EXPR
9518 : RROTATE_EXPR),
9519 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9520 TREE_OPERAND (arg0, 0),
9521 TREE_OPERAND (arg0, 1)));
9523 else if (code01 == MINUS_EXPR)
9525 tree tree010, tree011;
9526 tree010 = TREE_OPERAND (tree01, 0);
9527 tree011 = TREE_OPERAND (tree01, 1);
9528 STRIP_NOPS (tree010);
9529 STRIP_NOPS (tree011);
9530 if (TREE_CODE (tree010) == INTEGER_CST
9531 && 0 == compare_tree_int (tree010,
9532 element_precision
9533 (TREE_TYPE (TREE_OPERAND
9534 (arg0, 0))))
9535 && operand_equal_p (tree11, tree011, 0))
9536 return fold_convert_loc
9537 (loc, type,
9538 build2 ((code0 != LSHIFT_EXPR
9539 ? LROTATE_EXPR
9540 : RROTATE_EXPR),
9541 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9542 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9547 associate:
9548 /* In most languages, can't associate operations on floats through
9549 parentheses. Rather than remember where the parentheses were, we
9550 don't associate floats at all, unless the user has specified
9551 -fassociative-math.
9552 And, we need to make sure type is not saturating. */
9554 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9555 && !TYPE_SATURATING (type))
9557 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9558 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9559 tree atype = type;
9560 bool ok = true;
9562 /* Split both trees into variables, constants, and literals. Then
9563 associate each group together, the constants with literals,
9564 then the result with variables. This increases the chances of
9565 literals being recombined later and of generating relocatable
9566 expressions for the sum of a constant and literal. */
9567 var0 = split_tree (arg0, type, code,
9568 &minus_var0, &con0, &minus_con0,
9569 &lit0, &minus_lit0, 0);
9570 var1 = split_tree (arg1, type, code,
9571 &minus_var1, &con1, &minus_con1,
9572 &lit1, &minus_lit1, code == MINUS_EXPR);
9574 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9575 if (code == MINUS_EXPR)
9576 code = PLUS_EXPR;
9578 /* With undefined overflow prefer doing association in a type
9579 which wraps on overflow, if that is one of the operand types. */
9580 if (POINTER_TYPE_P (type)
9581 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9583 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9584 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9585 atype = TREE_TYPE (arg0);
9586 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9587 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9588 atype = TREE_TYPE (arg1);
9589 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9592 /* With undefined overflow we can only associate constants with one
9593 variable, and constants whose association doesn't overflow. */
9594 if (POINTER_TYPE_P (atype)
9595 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9597 if ((var0 && var1) || (minus_var0 && minus_var1))
9599 /* ??? If split_tree would handle NEGATE_EXPR we could
9600 simply reject these cases and the allowed cases would
9601 be the var0/minus_var1 ones. */
9602 tree tmp0 = var0 ? var0 : minus_var0;
9603 tree tmp1 = var1 ? var1 : minus_var1;
9604 bool one_neg = false;
9606 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9608 tmp0 = TREE_OPERAND (tmp0, 0);
9609 one_neg = !one_neg;
9611 if (CONVERT_EXPR_P (tmp0)
9612 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9613 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9614 <= TYPE_PRECISION (atype)))
9615 tmp0 = TREE_OPERAND (tmp0, 0);
9616 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9618 tmp1 = TREE_OPERAND (tmp1, 0);
9619 one_neg = !one_neg;
9621 if (CONVERT_EXPR_P (tmp1)
9622 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9623 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9624 <= TYPE_PRECISION (atype)))
9625 tmp1 = TREE_OPERAND (tmp1, 0);
9626 /* The only case we can still associate with two variables
9627 is if they cancel out. */
9628 if (!one_neg
9629 || !operand_equal_p (tmp0, tmp1, 0))
9630 ok = false;
9632 else if ((var0 && minus_var1
9633 && ! operand_equal_p (var0, minus_var1, 0))
9634 || (minus_var0 && var1
9635 && ! operand_equal_p (minus_var0, var1, 0)))
9636 ok = false;
9639 /* Only do something if we found more than two objects. Otherwise,
9640 nothing has changed and we risk infinite recursion. */
9641 if (ok
9642 && (2 < ((var0 != 0) + (var1 != 0)
9643 + (minus_var0 != 0) + (minus_var1 != 0)
9644 + (con0 != 0) + (con1 != 0)
9645 + (minus_con0 != 0) + (minus_con1 != 0)
9646 + (lit0 != 0) + (lit1 != 0)
9647 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9649 var0 = associate_trees (loc, var0, var1, code, atype);
9650 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9651 code, atype);
9652 con0 = associate_trees (loc, con0, con1, code, atype);
9653 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9654 code, atype);
9655 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9656 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9657 code, atype);
9659 if (minus_var0 && var0)
9661 var0 = associate_trees (loc, var0, minus_var0,
9662 MINUS_EXPR, atype);
9663 minus_var0 = 0;
9665 if (minus_con0 && con0)
9667 con0 = associate_trees (loc, con0, minus_con0,
9668 MINUS_EXPR, atype);
9669 minus_con0 = 0;
9672 /* Preserve the MINUS_EXPR if the negative part of the literal is
9673 greater than the positive part. Otherwise, the multiplicative
9674 folding code (i.e extract_muldiv) may be fooled in case
9675 unsigned constants are subtracted, like in the following
9676 example: ((X*2 + 4) - 8U)/2. */
9677 if (minus_lit0 && lit0)
9679 if (TREE_CODE (lit0) == INTEGER_CST
9680 && TREE_CODE (minus_lit0) == INTEGER_CST
9681 && tree_int_cst_lt (lit0, minus_lit0)
9682 /* But avoid ending up with only negated parts. */
9683 && (var0 || con0))
9685 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9686 MINUS_EXPR, atype);
9687 lit0 = 0;
9689 else
9691 lit0 = associate_trees (loc, lit0, minus_lit0,
9692 MINUS_EXPR, atype);
9693 minus_lit0 = 0;
9697 /* Don't introduce overflows through reassociation. */
9698 if ((lit0 && TREE_OVERFLOW_P (lit0))
9699 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9700 return NULL_TREE;
9702 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9703 con0 = associate_trees (loc, con0, lit0, code, atype);
9704 lit0 = 0;
9705 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9706 code, atype);
9707 minus_lit0 = 0;
9709 /* Eliminate minus_con0. */
9710 if (minus_con0)
9712 if (con0)
9713 con0 = associate_trees (loc, con0, minus_con0,
9714 MINUS_EXPR, atype);
9715 else if (var0)
9716 var0 = associate_trees (loc, var0, minus_con0,
9717 MINUS_EXPR, atype);
9718 else
9719 gcc_unreachable ();
9720 minus_con0 = 0;
9723 /* Eliminate minus_var0. */
9724 if (minus_var0)
9726 if (con0)
9727 con0 = associate_trees (loc, con0, minus_var0,
9728 MINUS_EXPR, atype);
9729 else
9730 gcc_unreachable ();
9731 minus_var0 = 0;
9734 return
9735 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9736 code, atype));
9740 return NULL_TREE;
9742 case MINUS_EXPR:
9743 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9744 if (TREE_CODE (arg0) == NEGATE_EXPR
9745 && negate_expr_p (op1))
9746 return fold_build2_loc (loc, MINUS_EXPR, type,
9747 negate_expr (op1),
9748 fold_convert_loc (loc, type,
9749 TREE_OPERAND (arg0, 0)));
9751 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9752 __complex__ ( x, -y ). This is not the same for SNaNs or if
9753 signed zeros are involved. */
9754 if (!HONOR_SNANS (element_mode (arg0))
9755 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9756 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9758 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9759 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9760 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9761 bool arg0rz = false, arg0iz = false;
9762 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9763 || (arg0i && (arg0iz = real_zerop (arg0i))))
9765 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9766 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9767 if (arg0rz && arg1i && real_zerop (arg1i))
9769 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9770 arg1r ? arg1r
9771 : build1 (REALPART_EXPR, rtype, arg1));
9772 tree ip = arg0i ? arg0i
9773 : build1 (IMAGPART_EXPR, rtype, arg0);
9774 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9776 else if (arg0iz && arg1r && real_zerop (arg1r))
9778 tree rp = arg0r ? arg0r
9779 : build1 (REALPART_EXPR, rtype, arg0);
9780 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9781 arg1i ? arg1i
9782 : build1 (IMAGPART_EXPR, rtype, arg1));
9783 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9788 /* A - B -> A + (-B) if B is easily negatable. */
9789 if (negate_expr_p (op1)
9790 && ! TYPE_OVERFLOW_SANITIZED (type)
9791 && ((FLOAT_TYPE_P (type)
9792 /* Avoid this transformation if B is a positive REAL_CST. */
9793 && (TREE_CODE (op1) != REAL_CST
9794 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9795 || INTEGRAL_TYPE_P (type)))
9796 return fold_build2_loc (loc, PLUS_EXPR, type,
9797 fold_convert_loc (loc, type, arg0),
9798 negate_expr (op1));
9800 /* Fold &a[i] - &a[j] to i-j. */
9801 if (TREE_CODE (arg0) == ADDR_EXPR
9802 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9803 && TREE_CODE (arg1) == ADDR_EXPR
9804 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9806 tree tem = fold_addr_of_array_ref_difference (loc, type,
9807 TREE_OPERAND (arg0, 0),
9808 TREE_OPERAND (arg1, 0));
9809 if (tem)
9810 return tem;
9813 if (FLOAT_TYPE_P (type)
9814 && flag_unsafe_math_optimizations
9815 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9816 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9817 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9818 return tem;
9820 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9821 one. Make sure the type is not saturating and has the signedness of
9822 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9823 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9824 if ((TREE_CODE (arg0) == MULT_EXPR
9825 || TREE_CODE (arg1) == MULT_EXPR)
9826 && !TYPE_SATURATING (type)
9827 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9828 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9829 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9831 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9832 if (tem)
9833 return tem;
9836 goto associate;
9838 case MULT_EXPR:
9839 if (! FLOAT_TYPE_P (type))
9841 /* Transform x * -C into -x * C if x is easily negatable. */
9842 if (TREE_CODE (op1) == INTEGER_CST
9843 && tree_int_cst_sgn (op1) == -1
9844 && negate_expr_p (op0)
9845 && negate_expr_p (op1)
9846 && (tem = negate_expr (op1)) != op1
9847 && ! TREE_OVERFLOW (tem))
9848 return fold_build2_loc (loc, MULT_EXPR, type,
9849 fold_convert_loc (loc, type,
9850 negate_expr (op0)), tem);
9852 strict_overflow_p = false;
9853 if (TREE_CODE (arg1) == INTEGER_CST
9854 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9855 &strict_overflow_p)))
9857 if (strict_overflow_p)
9858 fold_overflow_warning (("assuming signed overflow does not "
9859 "occur when simplifying "
9860 "multiplication"),
9861 WARN_STRICT_OVERFLOW_MISC);
9862 return fold_convert_loc (loc, type, tem);
9865 /* Optimize z * conj(z) for integer complex numbers. */
9866 if (TREE_CODE (arg0) == CONJ_EXPR
9867 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9868 return fold_mult_zconjz (loc, type, arg1);
9869 if (TREE_CODE (arg1) == CONJ_EXPR
9870 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9871 return fold_mult_zconjz (loc, type, arg0);
9873 else
9875 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9876 This is not the same for NaNs or if signed zeros are
9877 involved. */
9878 if (!HONOR_NANS (arg0)
9879 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9880 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9881 && TREE_CODE (arg1) == COMPLEX_CST
9882 && real_zerop (TREE_REALPART (arg1)))
9884 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9885 if (real_onep (TREE_IMAGPART (arg1)))
9886 return
9887 fold_build2_loc (loc, COMPLEX_EXPR, type,
9888 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9889 rtype, arg0)),
9890 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9891 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9892 return
9893 fold_build2_loc (loc, COMPLEX_EXPR, type,
9894 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9895 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9896 rtype, arg0)));
9899 /* Optimize z * conj(z) for floating point complex numbers.
9900 Guarded by flag_unsafe_math_optimizations as non-finite
9901 imaginary components don't produce scalar results. */
9902 if (flag_unsafe_math_optimizations
9903 && TREE_CODE (arg0) == CONJ_EXPR
9904 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9905 return fold_mult_zconjz (loc, type, arg1);
9906 if (flag_unsafe_math_optimizations
9907 && TREE_CODE (arg1) == CONJ_EXPR
9908 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9909 return fold_mult_zconjz (loc, type, arg0);
9911 goto associate;
9913 case BIT_IOR_EXPR:
9914 /* Canonicalize (X & C1) | C2. */
9915 if (TREE_CODE (arg0) == BIT_AND_EXPR
9916 && TREE_CODE (arg1) == INTEGER_CST
9917 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9919 int width = TYPE_PRECISION (type), w;
9920 wide_int c1 = TREE_OPERAND (arg0, 1);
9921 wide_int c2 = arg1;
9923 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9924 if ((c1 & c2) == c1)
9925 return omit_one_operand_loc (loc, type, arg1,
9926 TREE_OPERAND (arg0, 0));
9928 wide_int msk = wi::mask (width, false,
9929 TYPE_PRECISION (TREE_TYPE (arg1)));
9931 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9932 if (msk.and_not (c1 | c2) == 0)
9934 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9935 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9938 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9939 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9940 mode which allows further optimizations. */
9941 c1 &= msk;
9942 c2 &= msk;
9943 wide_int c3 = c1.and_not (c2);
9944 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9946 wide_int mask = wi::mask (w, false,
9947 TYPE_PRECISION (type));
9948 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9950 c3 = mask;
9951 break;
9955 if (c3 != c1)
9957 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9958 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9959 wide_int_to_tree (type, c3));
9960 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9964 /* See if this can be simplified into a rotate first. If that
9965 is unsuccessful continue in the association code. */
9966 goto bit_rotate;
9968 case BIT_XOR_EXPR:
9969 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9970 if (TREE_CODE (arg0) == BIT_AND_EXPR
9971 && INTEGRAL_TYPE_P (type)
9972 && integer_onep (TREE_OPERAND (arg0, 1))
9973 && integer_onep (arg1))
9974 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9975 build_zero_cst (TREE_TYPE (arg0)));
9977 /* See if this can be simplified into a rotate first. If that
9978 is unsuccessful continue in the association code. */
9979 goto bit_rotate;
9981 case BIT_AND_EXPR:
9982 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9983 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9984 && INTEGRAL_TYPE_P (type)
9985 && integer_onep (TREE_OPERAND (arg0, 1))
9986 && integer_onep (arg1))
9988 tree tem2;
9989 tem = TREE_OPERAND (arg0, 0);
9990 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9991 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9992 tem, tem2);
9993 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9994 build_zero_cst (TREE_TYPE (tem)));
9996 /* Fold ~X & 1 as (X & 1) == 0. */
9997 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9998 && INTEGRAL_TYPE_P (type)
9999 && integer_onep (arg1))
10001 tree tem2;
10002 tem = TREE_OPERAND (arg0, 0);
10003 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10004 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10005 tem, tem2);
10006 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10007 build_zero_cst (TREE_TYPE (tem)));
10009 /* Fold !X & 1 as X == 0. */
10010 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10011 && integer_onep (arg1))
10013 tem = TREE_OPERAND (arg0, 0);
10014 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10015 build_zero_cst (TREE_TYPE (tem)));
10018 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10019 multiple of 1 << CST. */
10020 if (TREE_CODE (arg1) == INTEGER_CST)
10022 wide_int cst1 = arg1;
10023 wide_int ncst1 = -cst1;
10024 if ((cst1 & ncst1) == ncst1
10025 && multiple_of_p (type, arg0,
10026 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10027 return fold_convert_loc (loc, type, arg0);
10030 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10031 bits from CST2. */
10032 if (TREE_CODE (arg1) == INTEGER_CST
10033 && TREE_CODE (arg0) == MULT_EXPR
10034 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10036 wide_int warg1 = arg1;
10037 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10039 if (masked == 0)
10040 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10041 arg0, arg1);
10042 else if (masked != warg1)
10044 /* Avoid the transform if arg1 is a mask of some
10045 mode which allows further optimizations. */
10046 int pop = wi::popcount (warg1);
10047 if (!(pop >= BITS_PER_UNIT
10048 && pow2p_hwi (pop)
10049 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10050 return fold_build2_loc (loc, code, type, op0,
10051 wide_int_to_tree (type, masked));
10055 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10056 ((A & N) + B) & M -> (A + B) & M
10057 Similarly if (N & M) == 0,
10058 ((A | N) + B) & M -> (A + B) & M
10059 and for - instead of + (or unary - instead of +)
10060 and/or ^ instead of |.
10061 If B is constant and (B & M) == 0, fold into A & M. */
10062 if (TREE_CODE (arg1) == INTEGER_CST)
10064 wide_int cst1 = arg1;
10065 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10066 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10067 && (TREE_CODE (arg0) == PLUS_EXPR
10068 || TREE_CODE (arg0) == MINUS_EXPR
10069 || TREE_CODE (arg0) == NEGATE_EXPR)
10070 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10071 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10073 tree pmop[2];
10074 int which = 0;
10075 wide_int cst0;
10077 /* Now we know that arg0 is (C + D) or (C - D) or
10078 -C and arg1 (M) is == (1LL << cst) - 1.
10079 Store C into PMOP[0] and D into PMOP[1]. */
10080 pmop[0] = TREE_OPERAND (arg0, 0);
10081 pmop[1] = NULL;
10082 if (TREE_CODE (arg0) != NEGATE_EXPR)
10084 pmop[1] = TREE_OPERAND (arg0, 1);
10085 which = 1;
10088 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10089 which = -1;
10091 for (; which >= 0; which--)
10092 switch (TREE_CODE (pmop[which]))
10094 case BIT_AND_EXPR:
10095 case BIT_IOR_EXPR:
10096 case BIT_XOR_EXPR:
10097 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10098 != INTEGER_CST)
10099 break;
10100 cst0 = TREE_OPERAND (pmop[which], 1);
10101 cst0 &= cst1;
10102 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10104 if (cst0 != cst1)
10105 break;
10107 else if (cst0 != 0)
10108 break;
10109 /* If C or D is of the form (A & N) where
10110 (N & M) == M, or of the form (A | N) or
10111 (A ^ N) where (N & M) == 0, replace it with A. */
10112 pmop[which] = TREE_OPERAND (pmop[which], 0);
10113 break;
10114 case INTEGER_CST:
10115 /* If C or D is a N where (N & M) == 0, it can be
10116 omitted (assumed 0). */
10117 if ((TREE_CODE (arg0) == PLUS_EXPR
10118 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10119 && (cst1 & pmop[which]) == 0)
10120 pmop[which] = NULL;
10121 break;
10122 default:
10123 break;
10126 /* Only build anything new if we optimized one or both arguments
10127 above. */
10128 if (pmop[0] != TREE_OPERAND (arg0, 0)
10129 || (TREE_CODE (arg0) != NEGATE_EXPR
10130 && pmop[1] != TREE_OPERAND (arg0, 1)))
10132 tree utype = TREE_TYPE (arg0);
10133 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10135 /* Perform the operations in a type that has defined
10136 overflow behavior. */
10137 utype = unsigned_type_for (TREE_TYPE (arg0));
10138 if (pmop[0] != NULL)
10139 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10140 if (pmop[1] != NULL)
10141 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10144 if (TREE_CODE (arg0) == NEGATE_EXPR)
10145 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10146 else if (TREE_CODE (arg0) == PLUS_EXPR)
10148 if (pmop[0] != NULL && pmop[1] != NULL)
10149 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10150 pmop[0], pmop[1]);
10151 else if (pmop[0] != NULL)
10152 tem = pmop[0];
10153 else if (pmop[1] != NULL)
10154 tem = pmop[1];
10155 else
10156 return build_int_cst (type, 0);
10158 else if (pmop[0] == NULL)
10159 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10160 else
10161 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10162 pmop[0], pmop[1]);
10163 /* TEM is now the new binary +, - or unary - replacement. */
10164 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10165 fold_convert_loc (loc, utype, arg1));
10166 return fold_convert_loc (loc, type, tem);
10171 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10172 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10173 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10175 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10177 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10178 if (mask == -1)
10179 return
10180 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10183 goto associate;
10185 case RDIV_EXPR:
10186 /* Don't touch a floating-point divide by zero unless the mode
10187 of the constant can represent infinity. */
10188 if (TREE_CODE (arg1) == REAL_CST
10189 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10190 && real_zerop (arg1))
10191 return NULL_TREE;
10193 /* (-A) / (-B) -> A / B */
10194 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10195 return fold_build2_loc (loc, RDIV_EXPR, type,
10196 TREE_OPERAND (arg0, 0),
10197 negate_expr (arg1));
10198 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10199 return fold_build2_loc (loc, RDIV_EXPR, type,
10200 negate_expr (arg0),
10201 TREE_OPERAND (arg1, 0));
10202 return NULL_TREE;
10204 case TRUNC_DIV_EXPR:
10205 /* Fall through */
10207 case FLOOR_DIV_EXPR:
10208 /* Simplify A / (B << N) where A and B are positive and B is
10209 a power of 2, to A >> (N + log2(B)). */
10210 strict_overflow_p = false;
10211 if (TREE_CODE (arg1) == LSHIFT_EXPR
10212 && (TYPE_UNSIGNED (type)
10213 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10215 tree sval = TREE_OPERAND (arg1, 0);
10216 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10218 tree sh_cnt = TREE_OPERAND (arg1, 1);
10219 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10220 wi::exact_log2 (sval));
10222 if (strict_overflow_p)
10223 fold_overflow_warning (("assuming signed overflow does not "
10224 "occur when simplifying A / (B << N)"),
10225 WARN_STRICT_OVERFLOW_MISC);
10227 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10228 sh_cnt, pow2);
10229 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10230 fold_convert_loc (loc, type, arg0), sh_cnt);
10234 /* Fall through */
10236 case ROUND_DIV_EXPR:
10237 case CEIL_DIV_EXPR:
10238 case EXACT_DIV_EXPR:
10239 if (integer_zerop (arg1))
10240 return NULL_TREE;
10242 /* Convert -A / -B to A / B when the type is signed and overflow is
10243 undefined. */
10244 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10245 && TREE_CODE (op0) == NEGATE_EXPR
10246 && negate_expr_p (op1))
10248 if (INTEGRAL_TYPE_P (type))
10249 fold_overflow_warning (("assuming signed overflow does not occur "
10250 "when distributing negation across "
10251 "division"),
10252 WARN_STRICT_OVERFLOW_MISC);
10253 return fold_build2_loc (loc, code, type,
10254 fold_convert_loc (loc, type,
10255 TREE_OPERAND (arg0, 0)),
10256 negate_expr (op1));
10258 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10259 && TREE_CODE (arg1) == NEGATE_EXPR
10260 && negate_expr_p (op0))
10262 if (INTEGRAL_TYPE_P (type))
10263 fold_overflow_warning (("assuming signed overflow does not occur "
10264 "when distributing negation across "
10265 "division"),
10266 WARN_STRICT_OVERFLOW_MISC);
10267 return fold_build2_loc (loc, code, type,
10268 negate_expr (op0),
10269 fold_convert_loc (loc, type,
10270 TREE_OPERAND (arg1, 0)));
10273 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10274 operation, EXACT_DIV_EXPR.
10276 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10277 At one time others generated faster code, it's not clear if they do
10278 after the last round to changes to the DIV code in expmed.c. */
10279 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10280 && multiple_of_p (type, arg0, arg1))
10281 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10282 fold_convert (type, arg0),
10283 fold_convert (type, arg1));
10285 strict_overflow_p = false;
10286 if (TREE_CODE (arg1) == INTEGER_CST
10287 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10288 &strict_overflow_p)))
10290 if (strict_overflow_p)
10291 fold_overflow_warning (("assuming signed overflow does not occur "
10292 "when simplifying division"),
10293 WARN_STRICT_OVERFLOW_MISC);
10294 return fold_convert_loc (loc, type, tem);
10297 return NULL_TREE;
10299 case CEIL_MOD_EXPR:
10300 case FLOOR_MOD_EXPR:
10301 case ROUND_MOD_EXPR:
10302 case TRUNC_MOD_EXPR:
10303 strict_overflow_p = false;
10304 if (TREE_CODE (arg1) == INTEGER_CST
10305 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10306 &strict_overflow_p)))
10308 if (strict_overflow_p)
10309 fold_overflow_warning (("assuming signed overflow does not occur "
10310 "when simplifying modulus"),
10311 WARN_STRICT_OVERFLOW_MISC);
10312 return fold_convert_loc (loc, type, tem);
10315 return NULL_TREE;
10317 case LROTATE_EXPR:
10318 case RROTATE_EXPR:
10319 case RSHIFT_EXPR:
10320 case LSHIFT_EXPR:
10321 /* Since negative shift count is not well-defined,
10322 don't try to compute it in the compiler. */
10323 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10324 return NULL_TREE;
10326 prec = element_precision (type);
10328 /* If we have a rotate of a bit operation with the rotate count and
10329 the second operand of the bit operation both constant,
10330 permute the two operations. */
10331 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10332 && (TREE_CODE (arg0) == BIT_AND_EXPR
10333 || TREE_CODE (arg0) == BIT_IOR_EXPR
10334 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10335 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10337 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10338 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10339 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10340 fold_build2_loc (loc, code, type,
10341 arg00, arg1),
10342 fold_build2_loc (loc, code, type,
10343 arg01, arg1));
10346 /* Two consecutive rotates adding up to the some integer
10347 multiple of the precision of the type can be ignored. */
10348 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10349 && TREE_CODE (arg0) == RROTATE_EXPR
10350 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10351 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10352 prec) == 0)
10353 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10355 return NULL_TREE;
10357 case MIN_EXPR:
10358 case MAX_EXPR:
10359 goto associate;
10361 case TRUTH_ANDIF_EXPR:
10362 /* Note that the operands of this must be ints
10363 and their values must be 0 or 1.
10364 ("true" is a fixed value perhaps depending on the language.) */
10365 /* If first arg is constant zero, return it. */
10366 if (integer_zerop (arg0))
10367 return fold_convert_loc (loc, type, arg0);
10368 /* FALLTHRU */
10369 case TRUTH_AND_EXPR:
10370 /* If either arg is constant true, drop it. */
10371 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10372 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10373 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10374 /* Preserve sequence points. */
10375 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10376 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10377 /* If second arg is constant zero, result is zero, but first arg
10378 must be evaluated. */
10379 if (integer_zerop (arg1))
10380 return omit_one_operand_loc (loc, type, arg1, arg0);
10381 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10382 case will be handled here. */
10383 if (integer_zerop (arg0))
10384 return omit_one_operand_loc (loc, type, arg0, arg1);
10386 /* !X && X is always false. */
10387 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10388 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10389 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10390 /* X && !X is always false. */
10391 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10392 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10393 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10395 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10396 means A >= Y && A != MAX, but in this case we know that
10397 A < X <= MAX. */
10399 if (!TREE_SIDE_EFFECTS (arg0)
10400 && !TREE_SIDE_EFFECTS (arg1))
10402 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10403 if (tem && !operand_equal_p (tem, arg0, 0))
10404 return fold_build2_loc (loc, code, type, tem, arg1);
10406 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10407 if (tem && !operand_equal_p (tem, arg1, 0))
10408 return fold_build2_loc (loc, code, type, arg0, tem);
10411 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10412 != NULL_TREE)
10413 return tem;
10415 return NULL_TREE;
10417 case TRUTH_ORIF_EXPR:
10418 /* Note that the operands of this must be ints
10419 and their values must be 0 or true.
10420 ("true" is a fixed value perhaps depending on the language.) */
10421 /* If first arg is constant true, return it. */
10422 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10423 return fold_convert_loc (loc, type, arg0);
10424 /* FALLTHRU */
10425 case TRUTH_OR_EXPR:
10426 /* If either arg is constant zero, drop it. */
10427 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10428 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10429 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10430 /* Preserve sequence points. */
10431 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10432 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10433 /* If second arg is constant true, result is true, but we must
10434 evaluate first arg. */
10435 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10436 return omit_one_operand_loc (loc, type, arg1, arg0);
10437 /* Likewise for first arg, but note this only occurs here for
10438 TRUTH_OR_EXPR. */
10439 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10440 return omit_one_operand_loc (loc, type, arg0, arg1);
10442 /* !X || X is always true. */
10443 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10444 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10445 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10446 /* X || !X is always true. */
10447 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10448 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10449 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10451 /* (X && !Y) || (!X && Y) is X ^ Y */
10452 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10453 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10455 tree a0, a1, l0, l1, n0, n1;
10457 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10458 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10460 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10461 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10463 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10464 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10466 if ((operand_equal_p (n0, a0, 0)
10467 && operand_equal_p (n1, a1, 0))
10468 || (operand_equal_p (n0, a1, 0)
10469 && operand_equal_p (n1, a0, 0)))
10470 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10473 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10474 != NULL_TREE)
10475 return tem;
10477 return NULL_TREE;
10479 case TRUTH_XOR_EXPR:
10480 /* If the second arg is constant zero, drop it. */
10481 if (integer_zerop (arg1))
10482 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10483 /* If the second arg is constant true, this is a logical inversion. */
10484 if (integer_onep (arg1))
10486 tem = invert_truthvalue_loc (loc, arg0);
10487 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10489 /* Identical arguments cancel to zero. */
10490 if (operand_equal_p (arg0, arg1, 0))
10491 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10493 /* !X ^ X is always true. */
10494 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10495 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10496 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10498 /* X ^ !X is always true. */
10499 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10500 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10501 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10503 return NULL_TREE;
10505 case EQ_EXPR:
10506 case NE_EXPR:
10507 STRIP_NOPS (arg0);
10508 STRIP_NOPS (arg1);
10510 tem = fold_comparison (loc, code, type, op0, op1);
10511 if (tem != NULL_TREE)
10512 return tem;
10514 /* bool_var != 1 becomes !bool_var. */
10515 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10516 && code == NE_EXPR)
10517 return fold_convert_loc (loc, type,
10518 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10519 TREE_TYPE (arg0), arg0));
10521 /* bool_var == 0 becomes !bool_var. */
10522 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10523 && code == EQ_EXPR)
10524 return fold_convert_loc (loc, type,
10525 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10526 TREE_TYPE (arg0), arg0));
10528 /* !exp != 0 becomes !exp */
10529 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10530 && code == NE_EXPR)
10531 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10533 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10534 if ((TREE_CODE (arg0) == PLUS_EXPR
10535 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10536 || TREE_CODE (arg0) == MINUS_EXPR)
10537 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10538 0)),
10539 arg1, 0)
10540 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10541 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10543 tree val = TREE_OPERAND (arg0, 1);
10544 val = fold_build2_loc (loc, code, type, val,
10545 build_int_cst (TREE_TYPE (val), 0));
10546 return omit_two_operands_loc (loc, type, val,
10547 TREE_OPERAND (arg0, 0), arg1);
10550 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10551 if ((TREE_CODE (arg1) == PLUS_EXPR
10552 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10553 || TREE_CODE (arg1) == MINUS_EXPR)
10554 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10555 0)),
10556 arg0, 0)
10557 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10558 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10560 tree val = TREE_OPERAND (arg1, 1);
10561 val = fold_build2_loc (loc, code, type, val,
10562 build_int_cst (TREE_TYPE (val), 0));
10563 return omit_two_operands_loc (loc, type, val,
10564 TREE_OPERAND (arg1, 0), arg0);
10567 /* If this is an EQ or NE comparison with zero and ARG0 is
10568 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10569 two operations, but the latter can be done in one less insn
10570 on machines that have only two-operand insns or on which a
10571 constant cannot be the first operand. */
10572 if (TREE_CODE (arg0) == BIT_AND_EXPR
10573 && integer_zerop (arg1))
10575 tree arg00 = TREE_OPERAND (arg0, 0);
10576 tree arg01 = TREE_OPERAND (arg0, 1);
10577 if (TREE_CODE (arg00) == LSHIFT_EXPR
10578 && integer_onep (TREE_OPERAND (arg00, 0)))
10580 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10581 arg01, TREE_OPERAND (arg00, 1));
10582 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10583 build_int_cst (TREE_TYPE (arg0), 1));
10584 return fold_build2_loc (loc, code, type,
10585 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10586 arg1);
10588 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10589 && integer_onep (TREE_OPERAND (arg01, 0)))
10591 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10592 arg00, TREE_OPERAND (arg01, 1));
10593 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10594 build_int_cst (TREE_TYPE (arg0), 1));
10595 return fold_build2_loc (loc, code, type,
10596 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10597 arg1);
10601 /* If this is an NE or EQ comparison of zero against the result of a
10602 signed MOD operation whose second operand is a power of 2, make
10603 the MOD operation unsigned since it is simpler and equivalent. */
10604 if (integer_zerop (arg1)
10605 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10606 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10607 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10608 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10609 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10610 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10612 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10613 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10614 fold_convert_loc (loc, newtype,
10615 TREE_OPERAND (arg0, 0)),
10616 fold_convert_loc (loc, newtype,
10617 TREE_OPERAND (arg0, 1)));
10619 return fold_build2_loc (loc, code, type, newmod,
10620 fold_convert_loc (loc, newtype, arg1));
10623 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10624 C1 is a valid shift constant, and C2 is a power of two, i.e.
10625 a single bit. */
10626 if (TREE_CODE (arg0) == BIT_AND_EXPR
10627 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10628 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10629 == INTEGER_CST
10630 && integer_pow2p (TREE_OPERAND (arg0, 1))
10631 && integer_zerop (arg1))
10633 tree itype = TREE_TYPE (arg0);
10634 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10635 prec = TYPE_PRECISION (itype);
10637 /* Check for a valid shift count. */
10638 if (wi::ltu_p (arg001, prec))
10640 tree arg01 = TREE_OPERAND (arg0, 1);
10641 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10642 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10643 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10644 can be rewritten as (X & (C2 << C1)) != 0. */
10645 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10647 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10648 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10649 return fold_build2_loc (loc, code, type, tem,
10650 fold_convert_loc (loc, itype, arg1));
10652 /* Otherwise, for signed (arithmetic) shifts,
10653 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10654 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10655 else if (!TYPE_UNSIGNED (itype))
10656 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10657 arg000, build_int_cst (itype, 0));
10658 /* Otherwise, of unsigned (logical) shifts,
10659 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10660 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10661 else
10662 return omit_one_operand_loc (loc, type,
10663 code == EQ_EXPR ? integer_one_node
10664 : integer_zero_node,
10665 arg000);
10669 /* If this is a comparison of a field, we may be able to simplify it. */
10670 if ((TREE_CODE (arg0) == COMPONENT_REF
10671 || TREE_CODE (arg0) == BIT_FIELD_REF)
10672 /* Handle the constant case even without -O
10673 to make sure the warnings are given. */
10674 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10676 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10677 if (t1)
10678 return t1;
10681 /* Optimize comparisons of strlen vs zero to a compare of the
10682 first character of the string vs zero. To wit,
10683 strlen(ptr) == 0 => *ptr == 0
10684 strlen(ptr) != 0 => *ptr != 0
10685 Other cases should reduce to one of these two (or a constant)
10686 due to the return value of strlen being unsigned. */
10687 if (TREE_CODE (arg0) == CALL_EXPR
10688 && integer_zerop (arg1))
10690 tree fndecl = get_callee_fndecl (arg0);
10692 if (fndecl
10693 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10694 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10695 && call_expr_nargs (arg0) == 1
10696 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10698 tree iref = build_fold_indirect_ref_loc (loc,
10699 CALL_EXPR_ARG (arg0, 0));
10700 return fold_build2_loc (loc, code, type, iref,
10701 build_int_cst (TREE_TYPE (iref), 0));
10705 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10706 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10707 if (TREE_CODE (arg0) == RSHIFT_EXPR
10708 && integer_zerop (arg1)
10709 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10711 tree arg00 = TREE_OPERAND (arg0, 0);
10712 tree arg01 = TREE_OPERAND (arg0, 1);
10713 tree itype = TREE_TYPE (arg00);
10714 if (wi::eq_p (arg01, element_precision (itype) - 1))
10716 if (TYPE_UNSIGNED (itype))
10718 itype = signed_type_for (itype);
10719 arg00 = fold_convert_loc (loc, itype, arg00);
10721 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10722 type, arg00, build_zero_cst (itype));
10726 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10727 (X & C) == 0 when C is a single bit. */
10728 if (TREE_CODE (arg0) == BIT_AND_EXPR
10729 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10730 && integer_zerop (arg1)
10731 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10733 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10734 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10735 TREE_OPERAND (arg0, 1));
10736 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10737 type, tem,
10738 fold_convert_loc (loc, TREE_TYPE (arg0),
10739 arg1));
10742 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10743 constant C is a power of two, i.e. a single bit. */
10744 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10745 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10746 && integer_zerop (arg1)
10747 && integer_pow2p (TREE_OPERAND (arg0, 1))
10748 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10749 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10751 tree arg00 = TREE_OPERAND (arg0, 0);
10752 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10753 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10756 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10757 when is C is a power of two, i.e. a single bit. */
10758 if (TREE_CODE (arg0) == BIT_AND_EXPR
10759 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10760 && integer_zerop (arg1)
10761 && integer_pow2p (TREE_OPERAND (arg0, 1))
10762 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10763 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10765 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10766 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10767 arg000, TREE_OPERAND (arg0, 1));
10768 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10769 tem, build_int_cst (TREE_TYPE (tem), 0));
10772 if (integer_zerop (arg1)
10773 && tree_expr_nonzero_p (arg0))
10775 tree res = constant_boolean_node (code==NE_EXPR, type);
10776 return omit_one_operand_loc (loc, type, res, arg0);
10779 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10780 if (TREE_CODE (arg0) == BIT_AND_EXPR
10781 && TREE_CODE (arg1) == BIT_AND_EXPR)
10783 tree arg00 = TREE_OPERAND (arg0, 0);
10784 tree arg01 = TREE_OPERAND (arg0, 1);
10785 tree arg10 = TREE_OPERAND (arg1, 0);
10786 tree arg11 = TREE_OPERAND (arg1, 1);
10787 tree itype = TREE_TYPE (arg0);
10789 if (operand_equal_p (arg01, arg11, 0))
10791 tem = fold_convert_loc (loc, itype, arg10);
10792 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10793 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10794 return fold_build2_loc (loc, code, type, tem,
10795 build_zero_cst (itype));
10797 if (operand_equal_p (arg01, arg10, 0))
10799 tem = fold_convert_loc (loc, itype, arg11);
10800 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10801 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10802 return fold_build2_loc (loc, code, type, tem,
10803 build_zero_cst (itype));
10805 if (operand_equal_p (arg00, arg11, 0))
10807 tem = fold_convert_loc (loc, itype, arg10);
10808 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10809 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10810 return fold_build2_loc (loc, code, type, tem,
10811 build_zero_cst (itype));
10813 if (operand_equal_p (arg00, arg10, 0))
10815 tem = fold_convert_loc (loc, itype, arg11);
10816 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10817 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10818 return fold_build2_loc (loc, code, type, tem,
10819 build_zero_cst (itype));
10823 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10824 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10826 tree arg00 = TREE_OPERAND (arg0, 0);
10827 tree arg01 = TREE_OPERAND (arg0, 1);
10828 tree arg10 = TREE_OPERAND (arg1, 0);
10829 tree arg11 = TREE_OPERAND (arg1, 1);
10830 tree itype = TREE_TYPE (arg0);
10832 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10833 operand_equal_p guarantees no side-effects so we don't need
10834 to use omit_one_operand on Z. */
10835 if (operand_equal_p (arg01, arg11, 0))
10836 return fold_build2_loc (loc, code, type, arg00,
10837 fold_convert_loc (loc, TREE_TYPE (arg00),
10838 arg10));
10839 if (operand_equal_p (arg01, arg10, 0))
10840 return fold_build2_loc (loc, code, type, arg00,
10841 fold_convert_loc (loc, TREE_TYPE (arg00),
10842 arg11));
10843 if (operand_equal_p (arg00, arg11, 0))
10844 return fold_build2_loc (loc, code, type, arg01,
10845 fold_convert_loc (loc, TREE_TYPE (arg01),
10846 arg10));
10847 if (operand_equal_p (arg00, arg10, 0))
10848 return fold_build2_loc (loc, code, type, arg01,
10849 fold_convert_loc (loc, TREE_TYPE (arg01),
10850 arg11));
10852 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10853 if (TREE_CODE (arg01) == INTEGER_CST
10854 && TREE_CODE (arg11) == INTEGER_CST)
10856 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10857 fold_convert_loc (loc, itype, arg11));
10858 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10859 return fold_build2_loc (loc, code, type, tem,
10860 fold_convert_loc (loc, itype, arg10));
10864 /* Attempt to simplify equality/inequality comparisons of complex
10865 values. Only lower the comparison if the result is known or
10866 can be simplified to a single scalar comparison. */
10867 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10868 || TREE_CODE (arg0) == COMPLEX_CST)
10869 && (TREE_CODE (arg1) == COMPLEX_EXPR
10870 || TREE_CODE (arg1) == COMPLEX_CST))
10872 tree real0, imag0, real1, imag1;
10873 tree rcond, icond;
10875 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10877 real0 = TREE_OPERAND (arg0, 0);
10878 imag0 = TREE_OPERAND (arg0, 1);
10880 else
10882 real0 = TREE_REALPART (arg0);
10883 imag0 = TREE_IMAGPART (arg0);
10886 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10888 real1 = TREE_OPERAND (arg1, 0);
10889 imag1 = TREE_OPERAND (arg1, 1);
10891 else
10893 real1 = TREE_REALPART (arg1);
10894 imag1 = TREE_IMAGPART (arg1);
10897 rcond = fold_binary_loc (loc, code, type, real0, real1);
10898 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10900 if (integer_zerop (rcond))
10902 if (code == EQ_EXPR)
10903 return omit_two_operands_loc (loc, type, boolean_false_node,
10904 imag0, imag1);
10905 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10907 else
10909 if (code == NE_EXPR)
10910 return omit_two_operands_loc (loc, type, boolean_true_node,
10911 imag0, imag1);
10912 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10916 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10917 if (icond && TREE_CODE (icond) == INTEGER_CST)
10919 if (integer_zerop (icond))
10921 if (code == EQ_EXPR)
10922 return omit_two_operands_loc (loc, type, boolean_false_node,
10923 real0, real1);
10924 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10926 else
10928 if (code == NE_EXPR)
10929 return omit_two_operands_loc (loc, type, boolean_true_node,
10930 real0, real1);
10931 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10936 return NULL_TREE;
10938 case LT_EXPR:
10939 case GT_EXPR:
10940 case LE_EXPR:
10941 case GE_EXPR:
10942 tem = fold_comparison (loc, code, type, op0, op1);
10943 if (tem != NULL_TREE)
10944 return tem;
10946 /* Transform comparisons of the form X +- C CMP X. */
10947 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10948 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10949 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10950 && !HONOR_SNANS (arg0))
10951 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10952 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10954 tree arg01 = TREE_OPERAND (arg0, 1);
10955 enum tree_code code0 = TREE_CODE (arg0);
10956 int is_positive;
10958 if (TREE_CODE (arg01) == REAL_CST)
10959 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10960 else
10961 is_positive = tree_int_cst_sgn (arg01);
10963 /* (X - c) > X becomes false. */
10964 if (code == GT_EXPR
10965 && ((code0 == MINUS_EXPR && is_positive >= 0)
10966 || (code0 == PLUS_EXPR && is_positive <= 0)))
10968 if (TREE_CODE (arg01) == INTEGER_CST
10969 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10970 fold_overflow_warning (("assuming signed overflow does not "
10971 "occur when assuming that (X - c) > X "
10972 "is always false"),
10973 WARN_STRICT_OVERFLOW_ALL);
10974 return constant_boolean_node (0, type);
10977 /* Likewise (X + c) < X becomes false. */
10978 if (code == LT_EXPR
10979 && ((code0 == PLUS_EXPR && is_positive >= 0)
10980 || (code0 == MINUS_EXPR && is_positive <= 0)))
10982 if (TREE_CODE (arg01) == INTEGER_CST
10983 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10984 fold_overflow_warning (("assuming signed overflow does not "
10985 "occur when assuming that "
10986 "(X + c) < X is always false"),
10987 WARN_STRICT_OVERFLOW_ALL);
10988 return constant_boolean_node (0, type);
10991 /* Convert (X - c) <= X to true. */
10992 if (!HONOR_NANS (arg1)
10993 && code == LE_EXPR
10994 && ((code0 == MINUS_EXPR && is_positive >= 0)
10995 || (code0 == PLUS_EXPR && is_positive <= 0)))
10997 if (TREE_CODE (arg01) == INTEGER_CST
10998 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10999 fold_overflow_warning (("assuming signed overflow does not "
11000 "occur when assuming that "
11001 "(X - c) <= X is always true"),
11002 WARN_STRICT_OVERFLOW_ALL);
11003 return constant_boolean_node (1, type);
11006 /* Convert (X + c) >= X to true. */
11007 if (!HONOR_NANS (arg1)
11008 && code == GE_EXPR
11009 && ((code0 == PLUS_EXPR && is_positive >= 0)
11010 || (code0 == MINUS_EXPR && is_positive <= 0)))
11012 if (TREE_CODE (arg01) == INTEGER_CST
11013 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11014 fold_overflow_warning (("assuming signed overflow does not "
11015 "occur when assuming that "
11016 "(X + c) >= X is always true"),
11017 WARN_STRICT_OVERFLOW_ALL);
11018 return constant_boolean_node (1, type);
11021 if (TREE_CODE (arg01) == INTEGER_CST)
11023 /* Convert X + c > X and X - c < X to true for integers. */
11024 if (code == GT_EXPR
11025 && ((code0 == PLUS_EXPR && is_positive > 0)
11026 || (code0 == MINUS_EXPR && is_positive < 0)))
11028 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11029 fold_overflow_warning (("assuming signed overflow does "
11030 "not occur when assuming that "
11031 "(X + c) > X is always true"),
11032 WARN_STRICT_OVERFLOW_ALL);
11033 return constant_boolean_node (1, type);
11036 if (code == LT_EXPR
11037 && ((code0 == MINUS_EXPR && is_positive > 0)
11038 || (code0 == PLUS_EXPR && is_positive < 0)))
11040 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11041 fold_overflow_warning (("assuming signed overflow does "
11042 "not occur when assuming that "
11043 "(X - c) < X is always true"),
11044 WARN_STRICT_OVERFLOW_ALL);
11045 return constant_boolean_node (1, type);
11048 /* Convert X + c <= X and X - c >= X to false for integers. */
11049 if (code == LE_EXPR
11050 && ((code0 == PLUS_EXPR && is_positive > 0)
11051 || (code0 == MINUS_EXPR && is_positive < 0)))
11053 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11054 fold_overflow_warning (("assuming signed overflow does "
11055 "not occur when assuming that "
11056 "(X + c) <= X is always false"),
11057 WARN_STRICT_OVERFLOW_ALL);
11058 return constant_boolean_node (0, type);
11061 if (code == GE_EXPR
11062 && ((code0 == MINUS_EXPR && is_positive > 0)
11063 || (code0 == PLUS_EXPR && is_positive < 0)))
11065 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11066 fold_overflow_warning (("assuming signed overflow does "
11067 "not occur when assuming that "
11068 "(X - c) >= X is always false"),
11069 WARN_STRICT_OVERFLOW_ALL);
11070 return constant_boolean_node (0, type);
11075 /* If we are comparing an ABS_EXPR with a constant, we can
11076 convert all the cases into explicit comparisons, but they may
11077 well not be faster than doing the ABS and one comparison.
11078 But ABS (X) <= C is a range comparison, which becomes a subtraction
11079 and a comparison, and is probably faster. */
11080 if (code == LE_EXPR
11081 && TREE_CODE (arg1) == INTEGER_CST
11082 && TREE_CODE (arg0) == ABS_EXPR
11083 && ! TREE_SIDE_EFFECTS (arg0)
11084 && (0 != (tem = negate_expr (arg1)))
11085 && TREE_CODE (tem) == INTEGER_CST
11086 && !TREE_OVERFLOW (tem))
11087 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11088 build2 (GE_EXPR, type,
11089 TREE_OPERAND (arg0, 0), tem),
11090 build2 (LE_EXPR, type,
11091 TREE_OPERAND (arg0, 0), arg1));
11093 /* Convert ABS_EXPR<x> >= 0 to true. */
11094 strict_overflow_p = false;
11095 if (code == GE_EXPR
11096 && (integer_zerop (arg1)
11097 || (! HONOR_NANS (arg0)
11098 && real_zerop (arg1)))
11099 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11101 if (strict_overflow_p)
11102 fold_overflow_warning (("assuming signed overflow does not occur "
11103 "when simplifying comparison of "
11104 "absolute value and zero"),
11105 WARN_STRICT_OVERFLOW_CONDITIONAL);
11106 return omit_one_operand_loc (loc, type,
11107 constant_boolean_node (true, type),
11108 arg0);
11111 /* Convert ABS_EXPR<x> < 0 to false. */
11112 strict_overflow_p = false;
11113 if (code == LT_EXPR
11114 && (integer_zerop (arg1) || real_zerop (arg1))
11115 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11117 if (strict_overflow_p)
11118 fold_overflow_warning (("assuming signed overflow does not occur "
11119 "when simplifying comparison of "
11120 "absolute value and zero"),
11121 WARN_STRICT_OVERFLOW_CONDITIONAL);
11122 return omit_one_operand_loc (loc, type,
11123 constant_boolean_node (false, type),
11124 arg0);
11127 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11128 and similarly for >= into !=. */
11129 if ((code == LT_EXPR || code == GE_EXPR)
11130 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11131 && TREE_CODE (arg1) == LSHIFT_EXPR
11132 && integer_onep (TREE_OPERAND (arg1, 0)))
11133 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11134 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11135 TREE_OPERAND (arg1, 1)),
11136 build_zero_cst (TREE_TYPE (arg0)));
11138 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11139 otherwise Y might be >= # of bits in X's type and thus e.g.
11140 (unsigned char) (1 << Y) for Y 15 might be 0.
11141 If the cast is widening, then 1 << Y should have unsigned type,
11142 otherwise if Y is number of bits in the signed shift type minus 1,
11143 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11144 31 might be 0xffffffff80000000. */
11145 if ((code == LT_EXPR || code == GE_EXPR)
11146 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11147 && CONVERT_EXPR_P (arg1)
11148 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11149 && (element_precision (TREE_TYPE (arg1))
11150 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11151 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11152 || (element_precision (TREE_TYPE (arg1))
11153 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11154 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11156 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11157 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11158 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11159 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11160 build_zero_cst (TREE_TYPE (arg0)));
11163 return NULL_TREE;
11165 case UNORDERED_EXPR:
11166 case ORDERED_EXPR:
11167 case UNLT_EXPR:
11168 case UNLE_EXPR:
11169 case UNGT_EXPR:
11170 case UNGE_EXPR:
11171 case UNEQ_EXPR:
11172 case LTGT_EXPR:
11173 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11175 tree targ0 = strip_float_extensions (arg0);
11176 tree targ1 = strip_float_extensions (arg1);
11177 tree newtype = TREE_TYPE (targ0);
11179 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11180 newtype = TREE_TYPE (targ1);
11182 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11183 return fold_build2_loc (loc, code, type,
11184 fold_convert_loc (loc, newtype, targ0),
11185 fold_convert_loc (loc, newtype, targ1));
11188 return NULL_TREE;
11190 case COMPOUND_EXPR:
11191 /* When pedantic, a compound expression can be neither an lvalue
11192 nor an integer constant expression. */
11193 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11194 return NULL_TREE;
11195 /* Don't let (0, 0) be null pointer constant. */
11196 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11197 : fold_convert_loc (loc, type, arg1);
11198 return pedantic_non_lvalue_loc (loc, tem);
11200 case ASSERT_EXPR:
11201 /* An ASSERT_EXPR should never be passed to fold_binary. */
11202 gcc_unreachable ();
11204 default:
11205 return NULL_TREE;
11206 } /* switch (code) */
11209 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11210 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11211 of GOTO_EXPR. */
11213 static tree
11214 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11216 switch (TREE_CODE (*tp))
11218 case LABEL_EXPR:
11219 return *tp;
11221 case GOTO_EXPR:
11222 *walk_subtrees = 0;
11224 /* fall through */
11226 default:
11227 return NULL_TREE;
11231 /* Return whether the sub-tree ST contains a label which is accessible from
11232 outside the sub-tree. */
11234 static bool
11235 contains_label_p (tree st)
11237 return
11238 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11241 /* Fold a ternary expression of code CODE and type TYPE with operands
11242 OP0, OP1, and OP2. Return the folded expression if folding is
11243 successful. Otherwise, return NULL_TREE. */
11245 tree
11246 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11247 tree op0, tree op1, tree op2)
11249 tree tem;
11250 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11251 enum tree_code_class kind = TREE_CODE_CLASS (code);
11253 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11254 && TREE_CODE_LENGTH (code) == 3);
11256 /* If this is a commutative operation, and OP0 is a constant, move it
11257 to OP1 to reduce the number of tests below. */
11258 if (commutative_ternary_tree_code (code)
11259 && tree_swap_operands_p (op0, op1))
11260 return fold_build3_loc (loc, code, type, op1, op0, op2);
11262 tem = generic_simplify (loc, code, type, op0, op1, op2);
11263 if (tem)
11264 return tem;
11266 /* Strip any conversions that don't change the mode. This is safe
11267 for every expression, except for a comparison expression because
11268 its signedness is derived from its operands. So, in the latter
11269 case, only strip conversions that don't change the signedness.
11271 Note that this is done as an internal manipulation within the
11272 constant folder, in order to find the simplest representation of
11273 the arguments so that their form can be studied. In any cases,
11274 the appropriate type conversions should be put back in the tree
11275 that will get out of the constant folder. */
11276 if (op0)
11278 arg0 = op0;
11279 STRIP_NOPS (arg0);
11282 if (op1)
11284 arg1 = op1;
11285 STRIP_NOPS (arg1);
11288 if (op2)
11290 arg2 = op2;
11291 STRIP_NOPS (arg2);
11294 switch (code)
11296 case COMPONENT_REF:
11297 if (TREE_CODE (arg0) == CONSTRUCTOR
11298 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11300 unsigned HOST_WIDE_INT idx;
11301 tree field, value;
11302 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11303 if (field == arg1)
11304 return value;
11306 return NULL_TREE;
11308 case COND_EXPR:
11309 case VEC_COND_EXPR:
11310 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11311 so all simple results must be passed through pedantic_non_lvalue. */
11312 if (TREE_CODE (arg0) == INTEGER_CST)
11314 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11315 tem = integer_zerop (arg0) ? op2 : op1;
11316 /* Only optimize constant conditions when the selected branch
11317 has the same type as the COND_EXPR. This avoids optimizing
11318 away "c ? x : throw", where the throw has a void type.
11319 Avoid throwing away that operand which contains label. */
11320 if ((!TREE_SIDE_EFFECTS (unused_op)
11321 || !contains_label_p (unused_op))
11322 && (! VOID_TYPE_P (TREE_TYPE (tem))
11323 || VOID_TYPE_P (type)))
11324 return pedantic_non_lvalue_loc (loc, tem);
11325 return NULL_TREE;
11327 else if (TREE_CODE (arg0) == VECTOR_CST)
11329 if ((TREE_CODE (arg1) == VECTOR_CST
11330 || TREE_CODE (arg1) == CONSTRUCTOR)
11331 && (TREE_CODE (arg2) == VECTOR_CST
11332 || TREE_CODE (arg2) == CONSTRUCTOR))
11334 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11335 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11336 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11337 for (i = 0; i < nelts; i++)
11339 tree val = VECTOR_CST_ELT (arg0, i);
11340 if (integer_all_onesp (val))
11341 sel[i] = i;
11342 else if (integer_zerop (val))
11343 sel[i] = nelts + i;
11344 else /* Currently unreachable. */
11345 return NULL_TREE;
11347 tree t = fold_vec_perm (type, arg1, arg2, sel);
11348 if (t != NULL_TREE)
11349 return t;
11353 /* If we have A op B ? A : C, we may be able to convert this to a
11354 simpler expression, depending on the operation and the values
11355 of B and C. Signed zeros prevent all of these transformations,
11356 for reasons given above each one.
11358 Also try swapping the arguments and inverting the conditional. */
11359 if (COMPARISON_CLASS_P (arg0)
11360 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11361 arg1, TREE_OPERAND (arg0, 1))
11362 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11364 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11365 if (tem)
11366 return tem;
11369 if (COMPARISON_CLASS_P (arg0)
11370 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11371 op2,
11372 TREE_OPERAND (arg0, 1))
11373 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11375 location_t loc0 = expr_location_or (arg0, loc);
11376 tem = fold_invert_truthvalue (loc0, arg0);
11377 if (tem && COMPARISON_CLASS_P (tem))
11379 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11380 if (tem)
11381 return tem;
11385 /* If the second operand is simpler than the third, swap them
11386 since that produces better jump optimization results. */
11387 if (truth_value_p (TREE_CODE (arg0))
11388 && tree_swap_operands_p (op1, op2))
11390 location_t loc0 = expr_location_or (arg0, loc);
11391 /* See if this can be inverted. If it can't, possibly because
11392 it was a floating-point inequality comparison, don't do
11393 anything. */
11394 tem = fold_invert_truthvalue (loc0, arg0);
11395 if (tem)
11396 return fold_build3_loc (loc, code, type, tem, op2, op1);
11399 /* Convert A ? 1 : 0 to simply A. */
11400 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11401 : (integer_onep (op1)
11402 && !VECTOR_TYPE_P (type)))
11403 && integer_zerop (op2)
11404 /* If we try to convert OP0 to our type, the
11405 call to fold will try to move the conversion inside
11406 a COND, which will recurse. In that case, the COND_EXPR
11407 is probably the best choice, so leave it alone. */
11408 && type == TREE_TYPE (arg0))
11409 return pedantic_non_lvalue_loc (loc, arg0);
11411 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11412 over COND_EXPR in cases such as floating point comparisons. */
11413 if (integer_zerop (op1)
11414 && code == COND_EXPR
11415 && integer_onep (op2)
11416 && !VECTOR_TYPE_P (type)
11417 && truth_value_p (TREE_CODE (arg0)))
11418 return pedantic_non_lvalue_loc (loc,
11419 fold_convert_loc (loc, type,
11420 invert_truthvalue_loc (loc,
11421 arg0)));
11423 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11424 if (TREE_CODE (arg0) == LT_EXPR
11425 && integer_zerop (TREE_OPERAND (arg0, 1))
11426 && integer_zerop (op2)
11427 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11429 /* sign_bit_p looks through both zero and sign extensions,
11430 but for this optimization only sign extensions are
11431 usable. */
11432 tree tem2 = TREE_OPERAND (arg0, 0);
11433 while (tem != tem2)
11435 if (TREE_CODE (tem2) != NOP_EXPR
11436 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11438 tem = NULL_TREE;
11439 break;
11441 tem2 = TREE_OPERAND (tem2, 0);
11443 /* sign_bit_p only checks ARG1 bits within A's precision.
11444 If <sign bit of A> has wider type than A, bits outside
11445 of A's precision in <sign bit of A> need to be checked.
11446 If they are all 0, this optimization needs to be done
11447 in unsigned A's type, if they are all 1 in signed A's type,
11448 otherwise this can't be done. */
11449 if (tem
11450 && TYPE_PRECISION (TREE_TYPE (tem))
11451 < TYPE_PRECISION (TREE_TYPE (arg1))
11452 && TYPE_PRECISION (TREE_TYPE (tem))
11453 < TYPE_PRECISION (type))
11455 int inner_width, outer_width;
11456 tree tem_type;
11458 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11459 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11460 if (outer_width > TYPE_PRECISION (type))
11461 outer_width = TYPE_PRECISION (type);
11463 wide_int mask = wi::shifted_mask
11464 (inner_width, outer_width - inner_width, false,
11465 TYPE_PRECISION (TREE_TYPE (arg1)));
11467 wide_int common = mask & arg1;
11468 if (common == mask)
11470 tem_type = signed_type_for (TREE_TYPE (tem));
11471 tem = fold_convert_loc (loc, tem_type, tem);
11473 else if (common == 0)
11475 tem_type = unsigned_type_for (TREE_TYPE (tem));
11476 tem = fold_convert_loc (loc, tem_type, tem);
11478 else
11479 tem = NULL;
11482 if (tem)
11483 return
11484 fold_convert_loc (loc, type,
11485 fold_build2_loc (loc, BIT_AND_EXPR,
11486 TREE_TYPE (tem), tem,
11487 fold_convert_loc (loc,
11488 TREE_TYPE (tem),
11489 arg1)));
11492 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11493 already handled above. */
11494 if (TREE_CODE (arg0) == BIT_AND_EXPR
11495 && integer_onep (TREE_OPERAND (arg0, 1))
11496 && integer_zerop (op2)
11497 && integer_pow2p (arg1))
11499 tree tem = TREE_OPERAND (arg0, 0);
11500 STRIP_NOPS (tem);
11501 if (TREE_CODE (tem) == RSHIFT_EXPR
11502 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11503 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11504 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11505 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11506 fold_convert_loc (loc, type,
11507 TREE_OPERAND (tem, 0)),
11508 op1);
11511 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11512 is probably obsolete because the first operand should be a
11513 truth value (that's why we have the two cases above), but let's
11514 leave it in until we can confirm this for all front-ends. */
11515 if (integer_zerop (op2)
11516 && TREE_CODE (arg0) == NE_EXPR
11517 && integer_zerop (TREE_OPERAND (arg0, 1))
11518 && integer_pow2p (arg1)
11519 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11520 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11521 arg1, OEP_ONLY_CONST))
11522 return pedantic_non_lvalue_loc (loc,
11523 fold_convert_loc (loc, type,
11524 TREE_OPERAND (arg0, 0)));
11526 /* Disable the transformations below for vectors, since
11527 fold_binary_op_with_conditional_arg may undo them immediately,
11528 yielding an infinite loop. */
11529 if (code == VEC_COND_EXPR)
11530 return NULL_TREE;
11532 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11533 if (integer_zerop (op2)
11534 && truth_value_p (TREE_CODE (arg0))
11535 && truth_value_p (TREE_CODE (arg1))
11536 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11537 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11538 : TRUTH_ANDIF_EXPR,
11539 type, fold_convert_loc (loc, type, arg0), op1);
11541 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11542 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11543 && truth_value_p (TREE_CODE (arg0))
11544 && truth_value_p (TREE_CODE (arg1))
11545 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11547 location_t loc0 = expr_location_or (arg0, loc);
11548 /* Only perform transformation if ARG0 is easily inverted. */
11549 tem = fold_invert_truthvalue (loc0, arg0);
11550 if (tem)
11551 return fold_build2_loc (loc, code == VEC_COND_EXPR
11552 ? BIT_IOR_EXPR
11553 : TRUTH_ORIF_EXPR,
11554 type, fold_convert_loc (loc, type, tem),
11555 op1);
11558 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11559 if (integer_zerop (arg1)
11560 && truth_value_p (TREE_CODE (arg0))
11561 && truth_value_p (TREE_CODE (op2))
11562 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11564 location_t loc0 = expr_location_or (arg0, loc);
11565 /* Only perform transformation if ARG0 is easily inverted. */
11566 tem = fold_invert_truthvalue (loc0, arg0);
11567 if (tem)
11568 return fold_build2_loc (loc, code == VEC_COND_EXPR
11569 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11570 type, fold_convert_loc (loc, type, tem),
11571 op2);
11574 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11575 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11576 && truth_value_p (TREE_CODE (arg0))
11577 && truth_value_p (TREE_CODE (op2))
11578 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11579 return fold_build2_loc (loc, code == VEC_COND_EXPR
11580 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11581 type, fold_convert_loc (loc, type, arg0), op2);
11583 return NULL_TREE;
11585 case CALL_EXPR:
11586 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11587 of fold_ternary on them. */
11588 gcc_unreachable ();
11590 case BIT_FIELD_REF:
11591 if (TREE_CODE (arg0) == VECTOR_CST
11592 && (type == TREE_TYPE (TREE_TYPE (arg0))
11593 || (TREE_CODE (type) == VECTOR_TYPE
11594 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11596 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11597 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11598 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11599 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11601 if (n != 0
11602 && (idx % width) == 0
11603 && (n % width) == 0
11604 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11606 idx = idx / width;
11607 n = n / width;
11609 if (TREE_CODE (arg0) == VECTOR_CST)
11611 if (n == 1)
11612 return VECTOR_CST_ELT (arg0, idx);
11614 tree *vals = XALLOCAVEC (tree, n);
11615 for (unsigned i = 0; i < n; ++i)
11616 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11617 return build_vector (type, vals);
11622 /* On constants we can use native encode/interpret to constant
11623 fold (nearly) all BIT_FIELD_REFs. */
11624 if (CONSTANT_CLASS_P (arg0)
11625 && can_native_interpret_type_p (type)
11626 && BITS_PER_UNIT == 8)
11628 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11629 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11630 /* Limit us to a reasonable amount of work. To relax the
11631 other limitations we need bit-shifting of the buffer
11632 and rounding up the size. */
11633 if (bitpos % BITS_PER_UNIT == 0
11634 && bitsize % BITS_PER_UNIT == 0
11635 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11637 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11638 unsigned HOST_WIDE_INT len
11639 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11640 bitpos / BITS_PER_UNIT);
11641 if (len > 0
11642 && len * BITS_PER_UNIT >= bitsize)
11644 tree v = native_interpret_expr (type, b,
11645 bitsize / BITS_PER_UNIT);
11646 if (v)
11647 return v;
11652 return NULL_TREE;
11654 case FMA_EXPR:
11655 /* For integers we can decompose the FMA if possible. */
11656 if (TREE_CODE (arg0) == INTEGER_CST
11657 && TREE_CODE (arg1) == INTEGER_CST)
11658 return fold_build2_loc (loc, PLUS_EXPR, type,
11659 const_binop (MULT_EXPR, arg0, arg1), arg2);
11660 if (integer_zerop (arg2))
11661 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11663 return fold_fma (loc, type, arg0, arg1, arg2);
11665 case VEC_PERM_EXPR:
11666 if (TREE_CODE (arg2) == VECTOR_CST)
11668 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11669 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11670 unsigned char *sel2 = sel + nelts;
11671 bool need_mask_canon = false;
11672 bool need_mask_canon2 = false;
11673 bool all_in_vec0 = true;
11674 bool all_in_vec1 = true;
11675 bool maybe_identity = true;
11676 bool single_arg = (op0 == op1);
11677 bool changed = false;
11679 mask2 = 2 * nelts - 1;
11680 mask = single_arg ? (nelts - 1) : mask2;
11681 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11682 for (i = 0; i < nelts; i++)
11684 tree val = VECTOR_CST_ELT (arg2, i);
11685 if (TREE_CODE (val) != INTEGER_CST)
11686 return NULL_TREE;
11688 /* Make sure that the perm value is in an acceptable
11689 range. */
11690 wide_int t = val;
11691 need_mask_canon |= wi::gtu_p (t, mask);
11692 need_mask_canon2 |= wi::gtu_p (t, mask2);
11693 sel[i] = t.to_uhwi () & mask;
11694 sel2[i] = t.to_uhwi () & mask2;
11696 if (sel[i] < nelts)
11697 all_in_vec1 = false;
11698 else
11699 all_in_vec0 = false;
11701 if ((sel[i] & (nelts-1)) != i)
11702 maybe_identity = false;
11705 if (maybe_identity)
11707 if (all_in_vec0)
11708 return op0;
11709 if (all_in_vec1)
11710 return op1;
11713 if (all_in_vec0)
11714 op1 = op0;
11715 else if (all_in_vec1)
11717 op0 = op1;
11718 for (i = 0; i < nelts; i++)
11719 sel[i] -= nelts;
11720 need_mask_canon = true;
11723 if ((TREE_CODE (op0) == VECTOR_CST
11724 || TREE_CODE (op0) == CONSTRUCTOR)
11725 && (TREE_CODE (op1) == VECTOR_CST
11726 || TREE_CODE (op1) == CONSTRUCTOR))
11728 tree t = fold_vec_perm (type, op0, op1, sel);
11729 if (t != NULL_TREE)
11730 return t;
11733 if (op0 == op1 && !single_arg)
11734 changed = true;
11736 /* Some targets are deficient and fail to expand a single
11737 argument permutation while still allowing an equivalent
11738 2-argument version. */
11739 if (need_mask_canon && arg2 == op2
11740 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11741 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11743 need_mask_canon = need_mask_canon2;
11744 sel = sel2;
11747 if (need_mask_canon && arg2 == op2)
11749 tree *tsel = XALLOCAVEC (tree, nelts);
11750 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11751 for (i = 0; i < nelts; i++)
11752 tsel[i] = build_int_cst (eltype, sel[i]);
11753 op2 = build_vector (TREE_TYPE (arg2), tsel);
11754 changed = true;
11757 if (changed)
11758 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11760 return NULL_TREE;
11762 case BIT_INSERT_EXPR:
11763 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11764 if (TREE_CODE (arg0) == INTEGER_CST
11765 && TREE_CODE (arg1) == INTEGER_CST)
11767 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11768 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11769 wide_int tem = wi::bit_and (arg0,
11770 wi::shifted_mask (bitpos, bitsize, true,
11771 TYPE_PRECISION (type)));
11772 wide_int tem2
11773 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11774 bitsize), bitpos);
11775 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11777 else if (TREE_CODE (arg0) == VECTOR_CST
11778 && CONSTANT_CLASS_P (arg1)
11779 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11780 TREE_TYPE (arg1)))
11782 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11783 unsigned HOST_WIDE_INT elsize
11784 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11785 if (bitpos % elsize == 0)
11787 unsigned k = bitpos / elsize;
11788 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11789 return arg0;
11790 else
11792 tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11793 memcpy (elts, VECTOR_CST_ELTS (arg0),
11794 sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11795 elts[k] = arg1;
11796 return build_vector (type, elts);
11800 return NULL_TREE;
11802 default:
11803 return NULL_TREE;
11804 } /* switch (code) */
11807 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11808 of an array (or vector). */
11810 tree
11811 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11813 tree index_type = NULL_TREE;
11814 offset_int low_bound = 0;
11816 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11818 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11819 if (domain_type && TYPE_MIN_VALUE (domain_type))
11821 /* Static constructors for variably sized objects makes no sense. */
11822 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11823 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11824 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11828 if (index_type)
11829 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11830 TYPE_SIGN (index_type));
11832 offset_int index = low_bound - 1;
11833 if (index_type)
11834 index = wi::ext (index, TYPE_PRECISION (index_type),
11835 TYPE_SIGN (index_type));
11837 offset_int max_index;
11838 unsigned HOST_WIDE_INT cnt;
11839 tree cfield, cval;
11841 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11843 /* Array constructor might explicitly set index, or specify a range,
11844 or leave index NULL meaning that it is next index after previous
11845 one. */
11846 if (cfield)
11848 if (TREE_CODE (cfield) == INTEGER_CST)
11849 max_index = index = wi::to_offset (cfield);
11850 else
11852 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11853 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11854 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11857 else
11859 index += 1;
11860 if (index_type)
11861 index = wi::ext (index, TYPE_PRECISION (index_type),
11862 TYPE_SIGN (index_type));
11863 max_index = index;
11866 /* Do we have match? */
11867 if (wi::cmpu (access_index, index) >= 0
11868 && wi::cmpu (access_index, max_index) <= 0)
11869 return cval;
11871 return NULL_TREE;
11874 /* Perform constant folding and related simplification of EXPR.
11875 The related simplifications include x*1 => x, x*0 => 0, etc.,
11876 and application of the associative law.
11877 NOP_EXPR conversions may be removed freely (as long as we
11878 are careful not to change the type of the overall expression).
11879 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11880 but we can constant-fold them if they have constant operands. */
11882 #ifdef ENABLE_FOLD_CHECKING
11883 # define fold(x) fold_1 (x)
11884 static tree fold_1 (tree);
11885 static
11886 #endif
11887 tree
11888 fold (tree expr)
11890 const tree t = expr;
11891 enum tree_code code = TREE_CODE (t);
11892 enum tree_code_class kind = TREE_CODE_CLASS (code);
11893 tree tem;
11894 location_t loc = EXPR_LOCATION (expr);
11896 /* Return right away if a constant. */
11897 if (kind == tcc_constant)
11898 return t;
11900 /* CALL_EXPR-like objects with variable numbers of operands are
11901 treated specially. */
11902 if (kind == tcc_vl_exp)
11904 if (code == CALL_EXPR)
11906 tem = fold_call_expr (loc, expr, false);
11907 return tem ? tem : expr;
11909 return expr;
11912 if (IS_EXPR_CODE_CLASS (kind))
11914 tree type = TREE_TYPE (t);
11915 tree op0, op1, op2;
11917 switch (TREE_CODE_LENGTH (code))
11919 case 1:
11920 op0 = TREE_OPERAND (t, 0);
11921 tem = fold_unary_loc (loc, code, type, op0);
11922 return tem ? tem : expr;
11923 case 2:
11924 op0 = TREE_OPERAND (t, 0);
11925 op1 = TREE_OPERAND (t, 1);
11926 tem = fold_binary_loc (loc, code, type, op0, op1);
11927 return tem ? tem : expr;
11928 case 3:
11929 op0 = TREE_OPERAND (t, 0);
11930 op1 = TREE_OPERAND (t, 1);
11931 op2 = TREE_OPERAND (t, 2);
11932 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11933 return tem ? tem : expr;
11934 default:
11935 break;
11939 switch (code)
11941 case ARRAY_REF:
11943 tree op0 = TREE_OPERAND (t, 0);
11944 tree op1 = TREE_OPERAND (t, 1);
11946 if (TREE_CODE (op1) == INTEGER_CST
11947 && TREE_CODE (op0) == CONSTRUCTOR
11948 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11950 tree val = get_array_ctor_element_at_index (op0,
11951 wi::to_offset (op1));
11952 if (val)
11953 return val;
11956 return t;
11959 /* Return a VECTOR_CST if possible. */
11960 case CONSTRUCTOR:
11962 tree type = TREE_TYPE (t);
11963 if (TREE_CODE (type) != VECTOR_TYPE)
11964 return t;
11966 unsigned i;
11967 tree val;
11968 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11969 if (! CONSTANT_CLASS_P (val))
11970 return t;
11972 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11975 case CONST_DECL:
11976 return fold (DECL_INITIAL (t));
11978 default:
11979 return t;
11980 } /* switch (code) */
11983 #ifdef ENABLE_FOLD_CHECKING
11984 #undef fold
11986 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11987 hash_table<nofree_ptr_hash<const tree_node> > *);
11988 static void fold_check_failed (const_tree, const_tree);
11989 void print_fold_checksum (const_tree);
11991 /* When --enable-checking=fold, compute a digest of expr before
11992 and after actual fold call to see if fold did not accidentally
11993 change original expr. */
11995 tree
11996 fold (tree expr)
11998 tree ret;
11999 struct md5_ctx ctx;
12000 unsigned char checksum_before[16], checksum_after[16];
12001 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12003 md5_init_ctx (&ctx);
12004 fold_checksum_tree (expr, &ctx, &ht);
12005 md5_finish_ctx (&ctx, checksum_before);
12006 ht.empty ();
12008 ret = fold_1 (expr);
12010 md5_init_ctx (&ctx);
12011 fold_checksum_tree (expr, &ctx, &ht);
12012 md5_finish_ctx (&ctx, checksum_after);
12014 if (memcmp (checksum_before, checksum_after, 16))
12015 fold_check_failed (expr, ret);
12017 return ret;
12020 void
12021 print_fold_checksum (const_tree expr)
12023 struct md5_ctx ctx;
12024 unsigned char checksum[16], cnt;
12025 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12027 md5_init_ctx (&ctx);
12028 fold_checksum_tree (expr, &ctx, &ht);
12029 md5_finish_ctx (&ctx, checksum);
12030 for (cnt = 0; cnt < 16; ++cnt)
12031 fprintf (stderr, "%02x", checksum[cnt]);
12032 putc ('\n', stderr);
12035 static void
12036 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12038 internal_error ("fold check: original tree changed by fold");
12041 static void
12042 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12043 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12045 const tree_node **slot;
12046 enum tree_code code;
12047 union tree_node buf;
12048 int i, len;
12050 recursive_label:
12051 if (expr == NULL)
12052 return;
12053 slot = ht->find_slot (expr, INSERT);
12054 if (*slot != NULL)
12055 return;
12056 *slot = expr;
12057 code = TREE_CODE (expr);
12058 if (TREE_CODE_CLASS (code) == tcc_declaration
12059 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12061 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12062 memcpy ((char *) &buf, expr, tree_size (expr));
12063 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12064 buf.decl_with_vis.symtab_node = NULL;
12065 expr = (tree) &buf;
12067 else if (TREE_CODE_CLASS (code) == tcc_type
12068 && (TYPE_POINTER_TO (expr)
12069 || TYPE_REFERENCE_TO (expr)
12070 || TYPE_CACHED_VALUES_P (expr)
12071 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12072 || TYPE_NEXT_VARIANT (expr)
12073 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12075 /* Allow these fields to be modified. */
12076 tree tmp;
12077 memcpy ((char *) &buf, expr, tree_size (expr));
12078 expr = tmp = (tree) &buf;
12079 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12080 TYPE_POINTER_TO (tmp) = NULL;
12081 TYPE_REFERENCE_TO (tmp) = NULL;
12082 TYPE_NEXT_VARIANT (tmp) = NULL;
12083 TYPE_ALIAS_SET (tmp) = -1;
12084 if (TYPE_CACHED_VALUES_P (tmp))
12086 TYPE_CACHED_VALUES_P (tmp) = 0;
12087 TYPE_CACHED_VALUES (tmp) = NULL;
12090 md5_process_bytes (expr, tree_size (expr), ctx);
12091 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12092 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12093 if (TREE_CODE_CLASS (code) != tcc_type
12094 && TREE_CODE_CLASS (code) != tcc_declaration
12095 && code != TREE_LIST
12096 && code != SSA_NAME
12097 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12098 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12099 switch (TREE_CODE_CLASS (code))
12101 case tcc_constant:
12102 switch (code)
12104 case STRING_CST:
12105 md5_process_bytes (TREE_STRING_POINTER (expr),
12106 TREE_STRING_LENGTH (expr), ctx);
12107 break;
12108 case COMPLEX_CST:
12109 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12110 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12111 break;
12112 case VECTOR_CST:
12113 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12114 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12115 break;
12116 default:
12117 break;
12119 break;
12120 case tcc_exceptional:
12121 switch (code)
12123 case TREE_LIST:
12124 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12125 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12126 expr = TREE_CHAIN (expr);
12127 goto recursive_label;
12128 break;
12129 case TREE_VEC:
12130 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12131 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12132 break;
12133 default:
12134 break;
12136 break;
12137 case tcc_expression:
12138 case tcc_reference:
12139 case tcc_comparison:
12140 case tcc_unary:
12141 case tcc_binary:
12142 case tcc_statement:
12143 case tcc_vl_exp:
12144 len = TREE_OPERAND_LENGTH (expr);
12145 for (i = 0; i < len; ++i)
12146 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12147 break;
12148 case tcc_declaration:
12149 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12150 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12151 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12153 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12154 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12155 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12156 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12157 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12160 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12162 if (TREE_CODE (expr) == FUNCTION_DECL)
12164 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12165 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12167 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12169 break;
12170 case tcc_type:
12171 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12172 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12173 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12174 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12175 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12176 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12177 if (INTEGRAL_TYPE_P (expr)
12178 || SCALAR_FLOAT_TYPE_P (expr))
12180 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12181 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12183 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12184 if (TREE_CODE (expr) == RECORD_TYPE
12185 || TREE_CODE (expr) == UNION_TYPE
12186 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12187 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12188 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12189 break;
12190 default:
12191 break;
12195 /* Helper function for outputting the checksum of a tree T. When
12196 debugging with gdb, you can "define mynext" to be "next" followed
12197 by "call debug_fold_checksum (op0)", then just trace down till the
12198 outputs differ. */
12200 DEBUG_FUNCTION void
12201 debug_fold_checksum (const_tree t)
12203 int i;
12204 unsigned char checksum[16];
12205 struct md5_ctx ctx;
12206 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12208 md5_init_ctx (&ctx);
12209 fold_checksum_tree (t, &ctx, &ht);
12210 md5_finish_ctx (&ctx, checksum);
12211 ht.empty ();
12213 for (i = 0; i < 16; i++)
12214 fprintf (stderr, "%d ", checksum[i]);
12216 fprintf (stderr, "\n");
12219 #endif
12221 /* Fold a unary tree expression with code CODE of type TYPE with an
12222 operand OP0. LOC is the location of the resulting expression.
12223 Return a folded expression if successful. Otherwise, return a tree
12224 expression with code CODE of type TYPE with an operand OP0. */
12226 tree
12227 fold_build1_loc (location_t loc,
12228 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12230 tree tem;
12231 #ifdef ENABLE_FOLD_CHECKING
12232 unsigned char checksum_before[16], checksum_after[16];
12233 struct md5_ctx ctx;
12234 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12236 md5_init_ctx (&ctx);
12237 fold_checksum_tree (op0, &ctx, &ht);
12238 md5_finish_ctx (&ctx, checksum_before);
12239 ht.empty ();
12240 #endif
12242 tem = fold_unary_loc (loc, code, type, op0);
12243 if (!tem)
12244 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12246 #ifdef ENABLE_FOLD_CHECKING
12247 md5_init_ctx (&ctx);
12248 fold_checksum_tree (op0, &ctx, &ht);
12249 md5_finish_ctx (&ctx, checksum_after);
12251 if (memcmp (checksum_before, checksum_after, 16))
12252 fold_check_failed (op0, tem);
12253 #endif
12254 return tem;
12257 /* Fold a binary tree expression with code CODE of type TYPE with
12258 operands OP0 and OP1. LOC is the location of the resulting
12259 expression. Return a folded expression if successful. Otherwise,
12260 return a tree expression with code CODE of type TYPE with operands
12261 OP0 and OP1. */
12263 tree
12264 fold_build2_loc (location_t loc,
12265 enum tree_code code, tree type, tree op0, tree op1
12266 MEM_STAT_DECL)
12268 tree tem;
12269 #ifdef ENABLE_FOLD_CHECKING
12270 unsigned char checksum_before_op0[16],
12271 checksum_before_op1[16],
12272 checksum_after_op0[16],
12273 checksum_after_op1[16];
12274 struct md5_ctx ctx;
12275 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12277 md5_init_ctx (&ctx);
12278 fold_checksum_tree (op0, &ctx, &ht);
12279 md5_finish_ctx (&ctx, checksum_before_op0);
12280 ht.empty ();
12282 md5_init_ctx (&ctx);
12283 fold_checksum_tree (op1, &ctx, &ht);
12284 md5_finish_ctx (&ctx, checksum_before_op1);
12285 ht.empty ();
12286 #endif
12288 tem = fold_binary_loc (loc, code, type, op0, op1);
12289 if (!tem)
12290 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12292 #ifdef ENABLE_FOLD_CHECKING
12293 md5_init_ctx (&ctx);
12294 fold_checksum_tree (op0, &ctx, &ht);
12295 md5_finish_ctx (&ctx, checksum_after_op0);
12296 ht.empty ();
12298 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12299 fold_check_failed (op0, tem);
12301 md5_init_ctx (&ctx);
12302 fold_checksum_tree (op1, &ctx, &ht);
12303 md5_finish_ctx (&ctx, checksum_after_op1);
12305 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12306 fold_check_failed (op1, tem);
12307 #endif
12308 return tem;
12311 /* Fold a ternary tree expression with code CODE of type TYPE with
12312 operands OP0, OP1, and OP2. Return a folded expression if
12313 successful. Otherwise, return a tree expression with code CODE of
12314 type TYPE with operands OP0, OP1, and OP2. */
12316 tree
12317 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12318 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12320 tree tem;
12321 #ifdef ENABLE_FOLD_CHECKING
12322 unsigned char checksum_before_op0[16],
12323 checksum_before_op1[16],
12324 checksum_before_op2[16],
12325 checksum_after_op0[16],
12326 checksum_after_op1[16],
12327 checksum_after_op2[16];
12328 struct md5_ctx ctx;
12329 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12331 md5_init_ctx (&ctx);
12332 fold_checksum_tree (op0, &ctx, &ht);
12333 md5_finish_ctx (&ctx, checksum_before_op0);
12334 ht.empty ();
12336 md5_init_ctx (&ctx);
12337 fold_checksum_tree (op1, &ctx, &ht);
12338 md5_finish_ctx (&ctx, checksum_before_op1);
12339 ht.empty ();
12341 md5_init_ctx (&ctx);
12342 fold_checksum_tree (op2, &ctx, &ht);
12343 md5_finish_ctx (&ctx, checksum_before_op2);
12344 ht.empty ();
12345 #endif
12347 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12348 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12349 if (!tem)
12350 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12352 #ifdef ENABLE_FOLD_CHECKING
12353 md5_init_ctx (&ctx);
12354 fold_checksum_tree (op0, &ctx, &ht);
12355 md5_finish_ctx (&ctx, checksum_after_op0);
12356 ht.empty ();
12358 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12359 fold_check_failed (op0, tem);
12361 md5_init_ctx (&ctx);
12362 fold_checksum_tree (op1, &ctx, &ht);
12363 md5_finish_ctx (&ctx, checksum_after_op1);
12364 ht.empty ();
12366 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12367 fold_check_failed (op1, tem);
12369 md5_init_ctx (&ctx);
12370 fold_checksum_tree (op2, &ctx, &ht);
12371 md5_finish_ctx (&ctx, checksum_after_op2);
12373 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12374 fold_check_failed (op2, tem);
12375 #endif
12376 return tem;
12379 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12380 arguments in ARGARRAY, and a null static chain.
12381 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12382 of type TYPE from the given operands as constructed by build_call_array. */
12384 tree
12385 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12386 int nargs, tree *argarray)
12388 tree tem;
12389 #ifdef ENABLE_FOLD_CHECKING
12390 unsigned char checksum_before_fn[16],
12391 checksum_before_arglist[16],
12392 checksum_after_fn[16],
12393 checksum_after_arglist[16];
12394 struct md5_ctx ctx;
12395 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12396 int i;
12398 md5_init_ctx (&ctx);
12399 fold_checksum_tree (fn, &ctx, &ht);
12400 md5_finish_ctx (&ctx, checksum_before_fn);
12401 ht.empty ();
12403 md5_init_ctx (&ctx);
12404 for (i = 0; i < nargs; i++)
12405 fold_checksum_tree (argarray[i], &ctx, &ht);
12406 md5_finish_ctx (&ctx, checksum_before_arglist);
12407 ht.empty ();
12408 #endif
12410 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12411 if (!tem)
12412 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12414 #ifdef ENABLE_FOLD_CHECKING
12415 md5_init_ctx (&ctx);
12416 fold_checksum_tree (fn, &ctx, &ht);
12417 md5_finish_ctx (&ctx, checksum_after_fn);
12418 ht.empty ();
12420 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12421 fold_check_failed (fn, tem);
12423 md5_init_ctx (&ctx);
12424 for (i = 0; i < nargs; i++)
12425 fold_checksum_tree (argarray[i], &ctx, &ht);
12426 md5_finish_ctx (&ctx, checksum_after_arglist);
12428 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12429 fold_check_failed (NULL_TREE, tem);
12430 #endif
12431 return tem;
12434 /* Perform constant folding and related simplification of initializer
12435 expression EXPR. These behave identically to "fold_buildN" but ignore
12436 potential run-time traps and exceptions that fold must preserve. */
12438 #define START_FOLD_INIT \
12439 int saved_signaling_nans = flag_signaling_nans;\
12440 int saved_trapping_math = flag_trapping_math;\
12441 int saved_rounding_math = flag_rounding_math;\
12442 int saved_trapv = flag_trapv;\
12443 int saved_folding_initializer = folding_initializer;\
12444 flag_signaling_nans = 0;\
12445 flag_trapping_math = 0;\
12446 flag_rounding_math = 0;\
12447 flag_trapv = 0;\
12448 folding_initializer = 1;
12450 #define END_FOLD_INIT \
12451 flag_signaling_nans = saved_signaling_nans;\
12452 flag_trapping_math = saved_trapping_math;\
12453 flag_rounding_math = saved_rounding_math;\
12454 flag_trapv = saved_trapv;\
12455 folding_initializer = saved_folding_initializer;
12457 tree
12458 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12459 tree type, tree op)
12461 tree result;
12462 START_FOLD_INIT;
12464 result = fold_build1_loc (loc, code, type, op);
12466 END_FOLD_INIT;
12467 return result;
12470 tree
12471 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12472 tree type, tree op0, tree op1)
12474 tree result;
12475 START_FOLD_INIT;
12477 result = fold_build2_loc (loc, code, type, op0, op1);
12479 END_FOLD_INIT;
12480 return result;
12483 tree
12484 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12485 int nargs, tree *argarray)
12487 tree result;
12488 START_FOLD_INIT;
12490 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12492 END_FOLD_INIT;
12493 return result;
12496 #undef START_FOLD_INIT
12497 #undef END_FOLD_INIT
12499 /* Determine if first argument is a multiple of second argument. Return 0 if
12500 it is not, or we cannot easily determined it to be.
12502 An example of the sort of thing we care about (at this point; this routine
12503 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12504 fold cases do now) is discovering that
12506 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12508 is a multiple of
12510 SAVE_EXPR (J * 8)
12512 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12514 This code also handles discovering that
12516 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12518 is a multiple of 8 so we don't have to worry about dealing with a
12519 possible remainder.
12521 Note that we *look* inside a SAVE_EXPR only to determine how it was
12522 calculated; it is not safe for fold to do much of anything else with the
12523 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12524 at run time. For example, the latter example above *cannot* be implemented
12525 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12526 evaluation time of the original SAVE_EXPR is not necessarily the same at
12527 the time the new expression is evaluated. The only optimization of this
12528 sort that would be valid is changing
12530 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12532 divided by 8 to
12534 SAVE_EXPR (I) * SAVE_EXPR (J)
12536 (where the same SAVE_EXPR (J) is used in the original and the
12537 transformed version). */
12540 multiple_of_p (tree type, const_tree top, const_tree bottom)
12542 gimple *stmt;
12543 tree t1, op1, op2;
12545 if (operand_equal_p (top, bottom, 0))
12546 return 1;
12548 if (TREE_CODE (type) != INTEGER_TYPE)
12549 return 0;
12551 switch (TREE_CODE (top))
12553 case BIT_AND_EXPR:
12554 /* Bitwise and provides a power of two multiple. If the mask is
12555 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12556 if (!integer_pow2p (bottom))
12557 return 0;
12558 /* FALLTHRU */
12560 case MULT_EXPR:
12561 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12562 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12564 case MINUS_EXPR:
12565 /* It is impossible to prove if op0 - op1 is multiple of bottom
12566 precisely, so be conservative here checking if both op0 and op1
12567 are multiple of bottom. Note we check the second operand first
12568 since it's usually simpler. */
12569 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12570 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12572 case PLUS_EXPR:
12573 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12574 as op0 - 3 if the expression has unsigned type. For example,
12575 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12576 op1 = TREE_OPERAND (top, 1);
12577 if (TYPE_UNSIGNED (type)
12578 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12579 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12580 return (multiple_of_p (type, op1, bottom)
12581 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12583 case LSHIFT_EXPR:
12584 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12586 op1 = TREE_OPERAND (top, 1);
12587 /* const_binop may not detect overflow correctly,
12588 so check for it explicitly here. */
12589 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12590 && 0 != (t1 = fold_convert (type,
12591 const_binop (LSHIFT_EXPR,
12592 size_one_node,
12593 op1)))
12594 && !TREE_OVERFLOW (t1))
12595 return multiple_of_p (type, t1, bottom);
12597 return 0;
12599 case NOP_EXPR:
12600 /* Can't handle conversions from non-integral or wider integral type. */
12601 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12602 || (TYPE_PRECISION (type)
12603 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12604 return 0;
12606 /* fall through */
12608 case SAVE_EXPR:
12609 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12611 case COND_EXPR:
12612 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12613 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12615 case INTEGER_CST:
12616 if (TREE_CODE (bottom) != INTEGER_CST
12617 || integer_zerop (bottom)
12618 || (TYPE_UNSIGNED (type)
12619 && (tree_int_cst_sgn (top) < 0
12620 || tree_int_cst_sgn (bottom) < 0)))
12621 return 0;
12622 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12623 SIGNED);
12625 case SSA_NAME:
12626 if (TREE_CODE (bottom) == INTEGER_CST
12627 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12628 && gimple_code (stmt) == GIMPLE_ASSIGN)
12630 enum tree_code code = gimple_assign_rhs_code (stmt);
12632 /* Check for special cases to see if top is defined as multiple
12633 of bottom:
12635 top = (X & ~(bottom - 1) ; bottom is power of 2
12639 Y = X % bottom
12640 top = X - Y. */
12641 if (code == BIT_AND_EXPR
12642 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12643 && TREE_CODE (op2) == INTEGER_CST
12644 && integer_pow2p (bottom)
12645 && wi::multiple_of_p (wi::to_widest (op2),
12646 wi::to_widest (bottom), UNSIGNED))
12647 return 1;
12649 op1 = gimple_assign_rhs1 (stmt);
12650 if (code == MINUS_EXPR
12651 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12652 && TREE_CODE (op2) == SSA_NAME
12653 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12654 && gimple_code (stmt) == GIMPLE_ASSIGN
12655 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12656 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12657 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12658 return 1;
12661 /* fall through */
12663 default:
12664 return 0;
12668 #define tree_expr_nonnegative_warnv_p(X, Y) \
12669 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12671 #define RECURSE(X) \
12672 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12674 /* Return true if CODE or TYPE is known to be non-negative. */
12676 static bool
12677 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12679 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12680 && truth_value_p (code))
12681 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12682 have a signed:1 type (where the value is -1 and 0). */
12683 return true;
12684 return false;
12687 /* Return true if (CODE OP0) is known to be non-negative. If the return
12688 value is based on the assumption that signed overflow is undefined,
12689 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12690 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12692 bool
12693 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12694 bool *strict_overflow_p, int depth)
12696 if (TYPE_UNSIGNED (type))
12697 return true;
12699 switch (code)
12701 case ABS_EXPR:
12702 /* We can't return 1 if flag_wrapv is set because
12703 ABS_EXPR<INT_MIN> = INT_MIN. */
12704 if (!ANY_INTEGRAL_TYPE_P (type))
12705 return true;
12706 if (TYPE_OVERFLOW_UNDEFINED (type))
12708 *strict_overflow_p = true;
12709 return true;
12711 break;
12713 case NON_LVALUE_EXPR:
12714 case FLOAT_EXPR:
12715 case FIX_TRUNC_EXPR:
12716 return RECURSE (op0);
12718 CASE_CONVERT:
12720 tree inner_type = TREE_TYPE (op0);
12721 tree outer_type = type;
12723 if (TREE_CODE (outer_type) == REAL_TYPE)
12725 if (TREE_CODE (inner_type) == REAL_TYPE)
12726 return RECURSE (op0);
12727 if (INTEGRAL_TYPE_P (inner_type))
12729 if (TYPE_UNSIGNED (inner_type))
12730 return true;
12731 return RECURSE (op0);
12734 else if (INTEGRAL_TYPE_P (outer_type))
12736 if (TREE_CODE (inner_type) == REAL_TYPE)
12737 return RECURSE (op0);
12738 if (INTEGRAL_TYPE_P (inner_type))
12739 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12740 && TYPE_UNSIGNED (inner_type);
12743 break;
12745 default:
12746 return tree_simple_nonnegative_warnv_p (code, type);
12749 /* We don't know sign of `t', so be conservative and return false. */
12750 return false;
12753 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12754 value is based on the assumption that signed overflow is undefined,
12755 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12756 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12758 bool
12759 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12760 tree op1, bool *strict_overflow_p,
12761 int depth)
12763 if (TYPE_UNSIGNED (type))
12764 return true;
12766 switch (code)
12768 case POINTER_PLUS_EXPR:
12769 case PLUS_EXPR:
12770 if (FLOAT_TYPE_P (type))
12771 return RECURSE (op0) && RECURSE (op1);
12773 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12774 both unsigned and at least 2 bits shorter than the result. */
12775 if (TREE_CODE (type) == INTEGER_TYPE
12776 && TREE_CODE (op0) == NOP_EXPR
12777 && TREE_CODE (op1) == NOP_EXPR)
12779 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12780 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12781 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12782 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12784 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12785 TYPE_PRECISION (inner2)) + 1;
12786 return prec < TYPE_PRECISION (type);
12789 break;
12791 case MULT_EXPR:
12792 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12794 /* x * x is always non-negative for floating point x
12795 or without overflow. */
12796 if (operand_equal_p (op0, op1, 0)
12797 || (RECURSE (op0) && RECURSE (op1)))
12799 if (ANY_INTEGRAL_TYPE_P (type)
12800 && TYPE_OVERFLOW_UNDEFINED (type))
12801 *strict_overflow_p = true;
12802 return true;
12806 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12807 both unsigned and their total bits is shorter than the result. */
12808 if (TREE_CODE (type) == INTEGER_TYPE
12809 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12810 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12812 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12813 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12814 : TREE_TYPE (op0);
12815 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12816 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12817 : TREE_TYPE (op1);
12819 bool unsigned0 = TYPE_UNSIGNED (inner0);
12820 bool unsigned1 = TYPE_UNSIGNED (inner1);
12822 if (TREE_CODE (op0) == INTEGER_CST)
12823 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12825 if (TREE_CODE (op1) == INTEGER_CST)
12826 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12828 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12829 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12831 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12832 ? tree_int_cst_min_precision (op0, UNSIGNED)
12833 : TYPE_PRECISION (inner0);
12835 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12836 ? tree_int_cst_min_precision (op1, UNSIGNED)
12837 : TYPE_PRECISION (inner1);
12839 return precision0 + precision1 < TYPE_PRECISION (type);
12842 return false;
12844 case BIT_AND_EXPR:
12845 case MAX_EXPR:
12846 return RECURSE (op0) || RECURSE (op1);
12848 case BIT_IOR_EXPR:
12849 case BIT_XOR_EXPR:
12850 case MIN_EXPR:
12851 case RDIV_EXPR:
12852 case TRUNC_DIV_EXPR:
12853 case CEIL_DIV_EXPR:
12854 case FLOOR_DIV_EXPR:
12855 case ROUND_DIV_EXPR:
12856 return RECURSE (op0) && RECURSE (op1);
12858 case TRUNC_MOD_EXPR:
12859 return RECURSE (op0);
12861 case FLOOR_MOD_EXPR:
12862 return RECURSE (op1);
12864 case CEIL_MOD_EXPR:
12865 case ROUND_MOD_EXPR:
12866 default:
12867 return tree_simple_nonnegative_warnv_p (code, type);
12870 /* We don't know sign of `t', so be conservative and return false. */
12871 return false;
12874 /* Return true if T is known to be non-negative. If the return
12875 value is based on the assumption that signed overflow is undefined,
12876 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12877 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12879 bool
12880 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12882 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12883 return true;
12885 switch (TREE_CODE (t))
12887 case INTEGER_CST:
12888 return tree_int_cst_sgn (t) >= 0;
12890 case REAL_CST:
12891 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12893 case FIXED_CST:
12894 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12896 case COND_EXPR:
12897 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12899 case SSA_NAME:
12900 /* Limit the depth of recursion to avoid quadratic behavior.
12901 This is expected to catch almost all occurrences in practice.
12902 If this code misses important cases that unbounded recursion
12903 would not, passes that need this information could be revised
12904 to provide it through dataflow propagation. */
12905 return (!name_registered_for_update_p (t)
12906 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12907 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12908 strict_overflow_p, depth));
12910 default:
12911 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12915 /* Return true if T is known to be non-negative. If the return
12916 value is based on the assumption that signed overflow is undefined,
12917 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12918 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12920 bool
12921 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12922 bool *strict_overflow_p, int depth)
12924 switch (fn)
12926 CASE_CFN_ACOS:
12927 CASE_CFN_ACOSH:
12928 CASE_CFN_CABS:
12929 CASE_CFN_COSH:
12930 CASE_CFN_ERFC:
12931 CASE_CFN_EXP:
12932 CASE_CFN_EXP10:
12933 CASE_CFN_EXP2:
12934 CASE_CFN_FABS:
12935 CASE_CFN_FDIM:
12936 CASE_CFN_HYPOT:
12937 CASE_CFN_POW10:
12938 CASE_CFN_FFS:
12939 CASE_CFN_PARITY:
12940 CASE_CFN_POPCOUNT:
12941 CASE_CFN_CLZ:
12942 CASE_CFN_CLRSB:
12943 case CFN_BUILT_IN_BSWAP32:
12944 case CFN_BUILT_IN_BSWAP64:
12945 /* Always true. */
12946 return true;
12948 CASE_CFN_SQRT:
12949 /* sqrt(-0.0) is -0.0. */
12950 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12951 return true;
12952 return RECURSE (arg0);
12954 CASE_CFN_ASINH:
12955 CASE_CFN_ATAN:
12956 CASE_CFN_ATANH:
12957 CASE_CFN_CBRT:
12958 CASE_CFN_CEIL:
12959 CASE_CFN_ERF:
12960 CASE_CFN_EXPM1:
12961 CASE_CFN_FLOOR:
12962 CASE_CFN_FMOD:
12963 CASE_CFN_FREXP:
12964 CASE_CFN_ICEIL:
12965 CASE_CFN_IFLOOR:
12966 CASE_CFN_IRINT:
12967 CASE_CFN_IROUND:
12968 CASE_CFN_LCEIL:
12969 CASE_CFN_LDEXP:
12970 CASE_CFN_LFLOOR:
12971 CASE_CFN_LLCEIL:
12972 CASE_CFN_LLFLOOR:
12973 CASE_CFN_LLRINT:
12974 CASE_CFN_LLROUND:
12975 CASE_CFN_LRINT:
12976 CASE_CFN_LROUND:
12977 CASE_CFN_MODF:
12978 CASE_CFN_NEARBYINT:
12979 CASE_CFN_RINT:
12980 CASE_CFN_ROUND:
12981 CASE_CFN_SCALB:
12982 CASE_CFN_SCALBLN:
12983 CASE_CFN_SCALBN:
12984 CASE_CFN_SIGNBIT:
12985 CASE_CFN_SIGNIFICAND:
12986 CASE_CFN_SINH:
12987 CASE_CFN_TANH:
12988 CASE_CFN_TRUNC:
12989 /* True if the 1st argument is nonnegative. */
12990 return RECURSE (arg0);
12992 CASE_CFN_FMAX:
12993 /* True if the 1st OR 2nd arguments are nonnegative. */
12994 return RECURSE (arg0) || RECURSE (arg1);
12996 CASE_CFN_FMIN:
12997 /* True if the 1st AND 2nd arguments are nonnegative. */
12998 return RECURSE (arg0) && RECURSE (arg1);
13000 CASE_CFN_COPYSIGN:
13001 /* True if the 2nd argument is nonnegative. */
13002 return RECURSE (arg1);
13004 CASE_CFN_POWI:
13005 /* True if the 1st argument is nonnegative or the second
13006 argument is an even integer. */
13007 if (TREE_CODE (arg1) == INTEGER_CST
13008 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13009 return true;
13010 return RECURSE (arg0);
13012 CASE_CFN_POW:
13013 /* True if the 1st argument is nonnegative or the second
13014 argument is an even integer valued real. */
13015 if (TREE_CODE (arg1) == REAL_CST)
13017 REAL_VALUE_TYPE c;
13018 HOST_WIDE_INT n;
13020 c = TREE_REAL_CST (arg1);
13021 n = real_to_integer (&c);
13022 if ((n & 1) == 0)
13024 REAL_VALUE_TYPE cint;
13025 real_from_integer (&cint, VOIDmode, n, SIGNED);
13026 if (real_identical (&c, &cint))
13027 return true;
13030 return RECURSE (arg0);
13032 default:
13033 break;
13035 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13038 /* Return true if T is known to be non-negative. If the return
13039 value is based on the assumption that signed overflow is undefined,
13040 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13041 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13043 static bool
13044 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13046 enum tree_code code = TREE_CODE (t);
13047 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13048 return true;
13050 switch (code)
13052 case TARGET_EXPR:
13054 tree temp = TARGET_EXPR_SLOT (t);
13055 t = TARGET_EXPR_INITIAL (t);
13057 /* If the initializer is non-void, then it's a normal expression
13058 that will be assigned to the slot. */
13059 if (!VOID_TYPE_P (t))
13060 return RECURSE (t);
13062 /* Otherwise, the initializer sets the slot in some way. One common
13063 way is an assignment statement at the end of the initializer. */
13064 while (1)
13066 if (TREE_CODE (t) == BIND_EXPR)
13067 t = expr_last (BIND_EXPR_BODY (t));
13068 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13069 || TREE_CODE (t) == TRY_CATCH_EXPR)
13070 t = expr_last (TREE_OPERAND (t, 0));
13071 else if (TREE_CODE (t) == STATEMENT_LIST)
13072 t = expr_last (t);
13073 else
13074 break;
13076 if (TREE_CODE (t) == MODIFY_EXPR
13077 && TREE_OPERAND (t, 0) == temp)
13078 return RECURSE (TREE_OPERAND (t, 1));
13080 return false;
13083 case CALL_EXPR:
13085 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13086 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13088 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13089 get_call_combined_fn (t),
13090 arg0,
13091 arg1,
13092 strict_overflow_p, depth);
13094 case COMPOUND_EXPR:
13095 case MODIFY_EXPR:
13096 return RECURSE (TREE_OPERAND (t, 1));
13098 case BIND_EXPR:
13099 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13101 case SAVE_EXPR:
13102 return RECURSE (TREE_OPERAND (t, 0));
13104 default:
13105 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13109 #undef RECURSE
13110 #undef tree_expr_nonnegative_warnv_p
13112 /* Return true if T is known to be non-negative. If the return
13113 value is based on the assumption that signed overflow is undefined,
13114 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13115 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13117 bool
13118 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13120 enum tree_code code;
13121 if (t == error_mark_node)
13122 return false;
13124 code = TREE_CODE (t);
13125 switch (TREE_CODE_CLASS (code))
13127 case tcc_binary:
13128 case tcc_comparison:
13129 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13130 TREE_TYPE (t),
13131 TREE_OPERAND (t, 0),
13132 TREE_OPERAND (t, 1),
13133 strict_overflow_p, depth);
13135 case tcc_unary:
13136 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13137 TREE_TYPE (t),
13138 TREE_OPERAND (t, 0),
13139 strict_overflow_p, depth);
13141 case tcc_constant:
13142 case tcc_declaration:
13143 case tcc_reference:
13144 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13146 default:
13147 break;
13150 switch (code)
13152 case TRUTH_AND_EXPR:
13153 case TRUTH_OR_EXPR:
13154 case TRUTH_XOR_EXPR:
13155 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13156 TREE_TYPE (t),
13157 TREE_OPERAND (t, 0),
13158 TREE_OPERAND (t, 1),
13159 strict_overflow_p, depth);
13160 case TRUTH_NOT_EXPR:
13161 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13162 TREE_TYPE (t),
13163 TREE_OPERAND (t, 0),
13164 strict_overflow_p, depth);
13166 case COND_EXPR:
13167 case CONSTRUCTOR:
13168 case OBJ_TYPE_REF:
13169 case ASSERT_EXPR:
13170 case ADDR_EXPR:
13171 case WITH_SIZE_EXPR:
13172 case SSA_NAME:
13173 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13175 default:
13176 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13180 /* Return true if `t' is known to be non-negative. Handle warnings
13181 about undefined signed overflow. */
13183 bool
13184 tree_expr_nonnegative_p (tree t)
13186 bool ret, strict_overflow_p;
13188 strict_overflow_p = false;
13189 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13190 if (strict_overflow_p)
13191 fold_overflow_warning (("assuming signed overflow does not occur when "
13192 "determining that expression is always "
13193 "non-negative"),
13194 WARN_STRICT_OVERFLOW_MISC);
13195 return ret;
13199 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13200 For floating point we further ensure that T is not denormal.
13201 Similar logic is present in nonzero_address in rtlanal.h.
13203 If the return value is based on the assumption that signed overflow
13204 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13205 change *STRICT_OVERFLOW_P. */
13207 bool
13208 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13209 bool *strict_overflow_p)
13211 switch (code)
13213 case ABS_EXPR:
13214 return tree_expr_nonzero_warnv_p (op0,
13215 strict_overflow_p);
13217 case NOP_EXPR:
13219 tree inner_type = TREE_TYPE (op0);
13220 tree outer_type = type;
13222 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13223 && tree_expr_nonzero_warnv_p (op0,
13224 strict_overflow_p));
13226 break;
13228 case NON_LVALUE_EXPR:
13229 return tree_expr_nonzero_warnv_p (op0,
13230 strict_overflow_p);
13232 default:
13233 break;
13236 return false;
13239 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13240 For floating point we further ensure that T is not denormal.
13241 Similar logic is present in nonzero_address in rtlanal.h.
13243 If the return value is based on the assumption that signed overflow
13244 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13245 change *STRICT_OVERFLOW_P. */
13247 bool
13248 tree_binary_nonzero_warnv_p (enum tree_code code,
13249 tree type,
13250 tree op0,
13251 tree op1, bool *strict_overflow_p)
13253 bool sub_strict_overflow_p;
13254 switch (code)
13256 case POINTER_PLUS_EXPR:
13257 case PLUS_EXPR:
13258 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13260 /* With the presence of negative values it is hard
13261 to say something. */
13262 sub_strict_overflow_p = false;
13263 if (!tree_expr_nonnegative_warnv_p (op0,
13264 &sub_strict_overflow_p)
13265 || !tree_expr_nonnegative_warnv_p (op1,
13266 &sub_strict_overflow_p))
13267 return false;
13268 /* One of operands must be positive and the other non-negative. */
13269 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13270 overflows, on a twos-complement machine the sum of two
13271 nonnegative numbers can never be zero. */
13272 return (tree_expr_nonzero_warnv_p (op0,
13273 strict_overflow_p)
13274 || tree_expr_nonzero_warnv_p (op1,
13275 strict_overflow_p));
13277 break;
13279 case MULT_EXPR:
13280 if (TYPE_OVERFLOW_UNDEFINED (type))
13282 if (tree_expr_nonzero_warnv_p (op0,
13283 strict_overflow_p)
13284 && tree_expr_nonzero_warnv_p (op1,
13285 strict_overflow_p))
13287 *strict_overflow_p = true;
13288 return true;
13291 break;
13293 case MIN_EXPR:
13294 sub_strict_overflow_p = false;
13295 if (tree_expr_nonzero_warnv_p (op0,
13296 &sub_strict_overflow_p)
13297 && tree_expr_nonzero_warnv_p (op1,
13298 &sub_strict_overflow_p))
13300 if (sub_strict_overflow_p)
13301 *strict_overflow_p = true;
13303 break;
13305 case MAX_EXPR:
13306 sub_strict_overflow_p = false;
13307 if (tree_expr_nonzero_warnv_p (op0,
13308 &sub_strict_overflow_p))
13310 if (sub_strict_overflow_p)
13311 *strict_overflow_p = true;
13313 /* When both operands are nonzero, then MAX must be too. */
13314 if (tree_expr_nonzero_warnv_p (op1,
13315 strict_overflow_p))
13316 return true;
13318 /* MAX where operand 0 is positive is positive. */
13319 return tree_expr_nonnegative_warnv_p (op0,
13320 strict_overflow_p);
13322 /* MAX where operand 1 is positive is positive. */
13323 else if (tree_expr_nonzero_warnv_p (op1,
13324 &sub_strict_overflow_p)
13325 && tree_expr_nonnegative_warnv_p (op1,
13326 &sub_strict_overflow_p))
13328 if (sub_strict_overflow_p)
13329 *strict_overflow_p = true;
13330 return true;
13332 break;
13334 case BIT_IOR_EXPR:
13335 return (tree_expr_nonzero_warnv_p (op1,
13336 strict_overflow_p)
13337 || tree_expr_nonzero_warnv_p (op0,
13338 strict_overflow_p));
13340 default:
13341 break;
13344 return false;
13347 /* Return true when T is an address and is known to be nonzero.
13348 For floating point we further ensure that T is not denormal.
13349 Similar logic is present in nonzero_address in rtlanal.h.
13351 If the return value is based on the assumption that signed overflow
13352 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13353 change *STRICT_OVERFLOW_P. */
13355 bool
13356 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13358 bool sub_strict_overflow_p;
13359 switch (TREE_CODE (t))
13361 case INTEGER_CST:
13362 return !integer_zerop (t);
13364 case ADDR_EXPR:
13366 tree base = TREE_OPERAND (t, 0);
13368 if (!DECL_P (base))
13369 base = get_base_address (base);
13371 if (base && TREE_CODE (base) == TARGET_EXPR)
13372 base = TARGET_EXPR_SLOT (base);
13374 if (!base)
13375 return false;
13377 /* For objects in symbol table check if we know they are non-zero.
13378 Don't do anything for variables and functions before symtab is built;
13379 it is quite possible that they will be declared weak later. */
13380 int nonzero_addr = maybe_nonzero_address (base);
13381 if (nonzero_addr >= 0)
13382 return nonzero_addr;
13384 /* Constants are never weak. */
13385 if (CONSTANT_CLASS_P (base))
13386 return true;
13388 return false;
13391 case COND_EXPR:
13392 sub_strict_overflow_p = false;
13393 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13394 &sub_strict_overflow_p)
13395 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13396 &sub_strict_overflow_p))
13398 if (sub_strict_overflow_p)
13399 *strict_overflow_p = true;
13400 return true;
13402 break;
13404 case SSA_NAME:
13405 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13406 break;
13407 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13409 default:
13410 break;
13412 return false;
13415 #define integer_valued_real_p(X) \
13416 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13418 #define RECURSE(X) \
13419 ((integer_valued_real_p) (X, depth + 1))
13421 /* Return true if the floating point result of (CODE OP0) has an
13422 integer value. We also allow +Inf, -Inf and NaN to be considered
13423 integer values. Return false for signaling NaN.
13425 DEPTH is the current nesting depth of the query. */
13427 bool
13428 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13430 switch (code)
13432 case FLOAT_EXPR:
13433 return true;
13435 case ABS_EXPR:
13436 return RECURSE (op0);
13438 CASE_CONVERT:
13440 tree type = TREE_TYPE (op0);
13441 if (TREE_CODE (type) == INTEGER_TYPE)
13442 return true;
13443 if (TREE_CODE (type) == REAL_TYPE)
13444 return RECURSE (op0);
13445 break;
13448 default:
13449 break;
13451 return false;
13454 /* Return true if the floating point result of (CODE OP0 OP1) has an
13455 integer value. We also allow +Inf, -Inf and NaN to be considered
13456 integer values. Return false for signaling NaN.
13458 DEPTH is the current nesting depth of the query. */
13460 bool
13461 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13463 switch (code)
13465 case PLUS_EXPR:
13466 case MINUS_EXPR:
13467 case MULT_EXPR:
13468 case MIN_EXPR:
13469 case MAX_EXPR:
13470 return RECURSE (op0) && RECURSE (op1);
13472 default:
13473 break;
13475 return false;
13478 /* Return true if the floating point result of calling FNDECL with arguments
13479 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13480 considered integer values. Return false for signaling NaN. If FNDECL
13481 takes fewer than 2 arguments, the remaining ARGn are null.
13483 DEPTH is the current nesting depth of the query. */
13485 bool
13486 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13488 switch (fn)
13490 CASE_CFN_CEIL:
13491 CASE_CFN_FLOOR:
13492 CASE_CFN_NEARBYINT:
13493 CASE_CFN_RINT:
13494 CASE_CFN_ROUND:
13495 CASE_CFN_TRUNC:
13496 return true;
13498 CASE_CFN_FMIN:
13499 CASE_CFN_FMAX:
13500 return RECURSE (arg0) && RECURSE (arg1);
13502 default:
13503 break;
13505 return false;
13508 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13509 has an integer value. We also allow +Inf, -Inf and NaN to be
13510 considered integer values. Return false for signaling NaN.
13512 DEPTH is the current nesting depth of the query. */
13514 bool
13515 integer_valued_real_single_p (tree t, int depth)
13517 switch (TREE_CODE (t))
13519 case REAL_CST:
13520 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13522 case COND_EXPR:
13523 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13525 case SSA_NAME:
13526 /* Limit the depth of recursion to avoid quadratic behavior.
13527 This is expected to catch almost all occurrences in practice.
13528 If this code misses important cases that unbounded recursion
13529 would not, passes that need this information could be revised
13530 to provide it through dataflow propagation. */
13531 return (!name_registered_for_update_p (t)
13532 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13533 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13534 depth));
13536 default:
13537 break;
13539 return false;
13542 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13543 has an integer value. We also allow +Inf, -Inf and NaN to be
13544 considered integer values. Return false for signaling NaN.
13546 DEPTH is the current nesting depth of the query. */
13548 static bool
13549 integer_valued_real_invalid_p (tree t, int depth)
13551 switch (TREE_CODE (t))
13553 case COMPOUND_EXPR:
13554 case MODIFY_EXPR:
13555 case BIND_EXPR:
13556 return RECURSE (TREE_OPERAND (t, 1));
13558 case SAVE_EXPR:
13559 return RECURSE (TREE_OPERAND (t, 0));
13561 default:
13562 break;
13564 return false;
13567 #undef RECURSE
13568 #undef integer_valued_real_p
13570 /* Return true if the floating point expression T has an integer value.
13571 We also allow +Inf, -Inf and NaN to be considered integer values.
13572 Return false for signaling NaN.
13574 DEPTH is the current nesting depth of the query. */
13576 bool
13577 integer_valued_real_p (tree t, int depth)
13579 if (t == error_mark_node)
13580 return false;
13582 tree_code code = TREE_CODE (t);
13583 switch (TREE_CODE_CLASS (code))
13585 case tcc_binary:
13586 case tcc_comparison:
13587 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13588 TREE_OPERAND (t, 1), depth);
13590 case tcc_unary:
13591 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13593 case tcc_constant:
13594 case tcc_declaration:
13595 case tcc_reference:
13596 return integer_valued_real_single_p (t, depth);
13598 default:
13599 break;
13602 switch (code)
13604 case COND_EXPR:
13605 case SSA_NAME:
13606 return integer_valued_real_single_p (t, depth);
13608 case CALL_EXPR:
13610 tree arg0 = (call_expr_nargs (t) > 0
13611 ? CALL_EXPR_ARG (t, 0)
13612 : NULL_TREE);
13613 tree arg1 = (call_expr_nargs (t) > 1
13614 ? CALL_EXPR_ARG (t, 1)
13615 : NULL_TREE);
13616 return integer_valued_real_call_p (get_call_combined_fn (t),
13617 arg0, arg1, depth);
13620 default:
13621 return integer_valued_real_invalid_p (t, depth);
13625 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13626 attempt to fold the expression to a constant without modifying TYPE,
13627 OP0 or OP1.
13629 If the expression could be simplified to a constant, then return
13630 the constant. If the expression would not be simplified to a
13631 constant, then return NULL_TREE. */
13633 tree
13634 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13636 tree tem = fold_binary (code, type, op0, op1);
13637 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13640 /* Given the components of a unary expression CODE, TYPE and OP0,
13641 attempt to fold the expression to a constant without modifying
13642 TYPE or OP0.
13644 If the expression could be simplified to a constant, then return
13645 the constant. If the expression would not be simplified to a
13646 constant, then return NULL_TREE. */
13648 tree
13649 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13651 tree tem = fold_unary (code, type, op0);
13652 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13655 /* If EXP represents referencing an element in a constant string
13656 (either via pointer arithmetic or array indexing), return the
13657 tree representing the value accessed, otherwise return NULL. */
13659 tree
13660 fold_read_from_constant_string (tree exp)
13662 if ((TREE_CODE (exp) == INDIRECT_REF
13663 || TREE_CODE (exp) == ARRAY_REF)
13664 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13666 tree exp1 = TREE_OPERAND (exp, 0);
13667 tree index;
13668 tree string;
13669 location_t loc = EXPR_LOCATION (exp);
13671 if (TREE_CODE (exp) == INDIRECT_REF)
13672 string = string_constant (exp1, &index);
13673 else
13675 tree low_bound = array_ref_low_bound (exp);
13676 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13678 /* Optimize the special-case of a zero lower bound.
13680 We convert the low_bound to sizetype to avoid some problems
13681 with constant folding. (E.g. suppose the lower bound is 1,
13682 and its mode is QI. Without the conversion,l (ARRAY
13683 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13684 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13685 if (! integer_zerop (low_bound))
13686 index = size_diffop_loc (loc, index,
13687 fold_convert_loc (loc, sizetype, low_bound));
13689 string = exp1;
13692 if (string
13693 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13694 && TREE_CODE (string) == STRING_CST
13695 && TREE_CODE (index) == INTEGER_CST
13696 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13697 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13698 == MODE_INT)
13699 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13700 return build_int_cst_type (TREE_TYPE (exp),
13701 (TREE_STRING_POINTER (string)
13702 [TREE_INT_CST_LOW (index)]));
13704 return NULL;
13707 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13708 an integer constant, real, or fixed-point constant.
13710 TYPE is the type of the result. */
13712 static tree
13713 fold_negate_const (tree arg0, tree type)
13715 tree t = NULL_TREE;
13717 switch (TREE_CODE (arg0))
13719 case INTEGER_CST:
13721 bool overflow;
13722 wide_int val = wi::neg (arg0, &overflow);
13723 t = force_fit_type (type, val, 1,
13724 (overflow && ! TYPE_UNSIGNED (type))
13725 || TREE_OVERFLOW (arg0));
13726 break;
13729 case REAL_CST:
13730 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13731 break;
13733 case FIXED_CST:
13735 FIXED_VALUE_TYPE f;
13736 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13737 &(TREE_FIXED_CST (arg0)), NULL,
13738 TYPE_SATURATING (type));
13739 t = build_fixed (type, f);
13740 /* Propagate overflow flags. */
13741 if (overflow_p | TREE_OVERFLOW (arg0))
13742 TREE_OVERFLOW (t) = 1;
13743 break;
13746 default:
13747 gcc_unreachable ();
13750 return t;
13753 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13754 an integer constant or real constant.
13756 TYPE is the type of the result. */
13758 tree
13759 fold_abs_const (tree arg0, tree type)
13761 tree t = NULL_TREE;
13763 switch (TREE_CODE (arg0))
13765 case INTEGER_CST:
13767 /* If the value is unsigned or non-negative, then the absolute value
13768 is the same as the ordinary value. */
13769 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13770 t = arg0;
13772 /* If the value is negative, then the absolute value is
13773 its negation. */
13774 else
13776 bool overflow;
13777 wide_int val = wi::neg (arg0, &overflow);
13778 t = force_fit_type (type, val, -1,
13779 overflow | TREE_OVERFLOW (arg0));
13782 break;
13784 case REAL_CST:
13785 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13786 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13787 else
13788 t = arg0;
13789 break;
13791 default:
13792 gcc_unreachable ();
13795 return t;
13798 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13799 constant. TYPE is the type of the result. */
13801 static tree
13802 fold_not_const (const_tree arg0, tree type)
13804 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13806 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13809 /* Given CODE, a relational operator, the target type, TYPE and two
13810 constant operands OP0 and OP1, return the result of the
13811 relational operation. If the result is not a compile time
13812 constant, then return NULL_TREE. */
13814 static tree
13815 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13817 int result, invert;
13819 /* From here on, the only cases we handle are when the result is
13820 known to be a constant. */
13822 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13824 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13825 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13827 /* Handle the cases where either operand is a NaN. */
13828 if (real_isnan (c0) || real_isnan (c1))
13830 switch (code)
13832 case EQ_EXPR:
13833 case ORDERED_EXPR:
13834 result = 0;
13835 break;
13837 case NE_EXPR:
13838 case UNORDERED_EXPR:
13839 case UNLT_EXPR:
13840 case UNLE_EXPR:
13841 case UNGT_EXPR:
13842 case UNGE_EXPR:
13843 case UNEQ_EXPR:
13844 result = 1;
13845 break;
13847 case LT_EXPR:
13848 case LE_EXPR:
13849 case GT_EXPR:
13850 case GE_EXPR:
13851 case LTGT_EXPR:
13852 if (flag_trapping_math)
13853 return NULL_TREE;
13854 result = 0;
13855 break;
13857 default:
13858 gcc_unreachable ();
13861 return constant_boolean_node (result, type);
13864 return constant_boolean_node (real_compare (code, c0, c1), type);
13867 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13869 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13870 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13871 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13874 /* Handle equality/inequality of complex constants. */
13875 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13877 tree rcond = fold_relational_const (code, type,
13878 TREE_REALPART (op0),
13879 TREE_REALPART (op1));
13880 tree icond = fold_relational_const (code, type,
13881 TREE_IMAGPART (op0),
13882 TREE_IMAGPART (op1));
13883 if (code == EQ_EXPR)
13884 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13885 else if (code == NE_EXPR)
13886 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13887 else
13888 return NULL_TREE;
13891 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13893 if (!VECTOR_TYPE_P (type))
13895 /* Have vector comparison with scalar boolean result. */
13896 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13897 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13898 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13900 tree elem0 = VECTOR_CST_ELT (op0, i);
13901 tree elem1 = VECTOR_CST_ELT (op1, i);
13902 tree tmp = fold_relational_const (code, type, elem0, elem1);
13903 if (tmp == NULL_TREE)
13904 return NULL_TREE;
13905 if (integer_zerop (tmp))
13906 return constant_boolean_node (false, type);
13908 return constant_boolean_node (true, type);
13910 unsigned count = VECTOR_CST_NELTS (op0);
13911 tree *elts = XALLOCAVEC (tree, count);
13912 gcc_assert (VECTOR_CST_NELTS (op1) == count
13913 && TYPE_VECTOR_SUBPARTS (type) == count);
13915 for (unsigned i = 0; i < count; i++)
13917 tree elem_type = TREE_TYPE (type);
13918 tree elem0 = VECTOR_CST_ELT (op0, i);
13919 tree elem1 = VECTOR_CST_ELT (op1, i);
13921 tree tem = fold_relational_const (code, elem_type,
13922 elem0, elem1);
13924 if (tem == NULL_TREE)
13925 return NULL_TREE;
13927 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13930 return build_vector (type, elts);
13933 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13935 To compute GT, swap the arguments and do LT.
13936 To compute GE, do LT and invert the result.
13937 To compute LE, swap the arguments, do LT and invert the result.
13938 To compute NE, do EQ and invert the result.
13940 Therefore, the code below must handle only EQ and LT. */
13942 if (code == LE_EXPR || code == GT_EXPR)
13944 std::swap (op0, op1);
13945 code = swap_tree_comparison (code);
13948 /* Note that it is safe to invert for real values here because we
13949 have already handled the one case that it matters. */
13951 invert = 0;
13952 if (code == NE_EXPR || code == GE_EXPR)
13954 invert = 1;
13955 code = invert_tree_comparison (code, false);
13958 /* Compute a result for LT or EQ if args permit;
13959 Otherwise return T. */
13960 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13962 if (code == EQ_EXPR)
13963 result = tree_int_cst_equal (op0, op1);
13964 else
13965 result = tree_int_cst_lt (op0, op1);
13967 else
13968 return NULL_TREE;
13970 if (invert)
13971 result ^= 1;
13972 return constant_boolean_node (result, type);
13975 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13976 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13977 itself. */
13979 tree
13980 fold_build_cleanup_point_expr (tree type, tree expr)
13982 /* If the expression does not have side effects then we don't have to wrap
13983 it with a cleanup point expression. */
13984 if (!TREE_SIDE_EFFECTS (expr))
13985 return expr;
13987 /* If the expression is a return, check to see if the expression inside the
13988 return has no side effects or the right hand side of the modify expression
13989 inside the return. If either don't have side effects set we don't need to
13990 wrap the expression in a cleanup point expression. Note we don't check the
13991 left hand side of the modify because it should always be a return decl. */
13992 if (TREE_CODE (expr) == RETURN_EXPR)
13994 tree op = TREE_OPERAND (expr, 0);
13995 if (!op || !TREE_SIDE_EFFECTS (op))
13996 return expr;
13997 op = TREE_OPERAND (op, 1);
13998 if (!TREE_SIDE_EFFECTS (op))
13999 return expr;
14002 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14005 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14006 of an indirection through OP0, or NULL_TREE if no simplification is
14007 possible. */
14009 tree
14010 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14012 tree sub = op0;
14013 tree subtype;
14015 STRIP_NOPS (sub);
14016 subtype = TREE_TYPE (sub);
14017 if (!POINTER_TYPE_P (subtype)
14018 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14019 return NULL_TREE;
14021 if (TREE_CODE (sub) == ADDR_EXPR)
14023 tree op = TREE_OPERAND (sub, 0);
14024 tree optype = TREE_TYPE (op);
14025 /* *&CONST_DECL -> to the value of the const decl. */
14026 if (TREE_CODE (op) == CONST_DECL)
14027 return DECL_INITIAL (op);
14028 /* *&p => p; make sure to handle *&"str"[cst] here. */
14029 if (type == optype)
14031 tree fop = fold_read_from_constant_string (op);
14032 if (fop)
14033 return fop;
14034 else
14035 return op;
14037 /* *(foo *)&fooarray => fooarray[0] */
14038 else if (TREE_CODE (optype) == ARRAY_TYPE
14039 && type == TREE_TYPE (optype)
14040 && (!in_gimple_form
14041 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14043 tree type_domain = TYPE_DOMAIN (optype);
14044 tree min_val = size_zero_node;
14045 if (type_domain && TYPE_MIN_VALUE (type_domain))
14046 min_val = TYPE_MIN_VALUE (type_domain);
14047 if (in_gimple_form
14048 && TREE_CODE (min_val) != INTEGER_CST)
14049 return NULL_TREE;
14050 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14051 NULL_TREE, NULL_TREE);
14053 /* *(foo *)&complexfoo => __real__ complexfoo */
14054 else if (TREE_CODE (optype) == COMPLEX_TYPE
14055 && type == TREE_TYPE (optype))
14056 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14057 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14058 else if (TREE_CODE (optype) == VECTOR_TYPE
14059 && type == TREE_TYPE (optype))
14061 tree part_width = TYPE_SIZE (type);
14062 tree index = bitsize_int (0);
14063 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14067 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14068 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14070 tree op00 = TREE_OPERAND (sub, 0);
14071 tree op01 = TREE_OPERAND (sub, 1);
14073 STRIP_NOPS (op00);
14074 if (TREE_CODE (op00) == ADDR_EXPR)
14076 tree op00type;
14077 op00 = TREE_OPERAND (op00, 0);
14078 op00type = TREE_TYPE (op00);
14080 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14081 if (TREE_CODE (op00type) == VECTOR_TYPE
14082 && type == TREE_TYPE (op00type))
14084 tree part_width = TYPE_SIZE (type);
14085 unsigned HOST_WIDE_INT max_offset
14086 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14087 * TYPE_VECTOR_SUBPARTS (op00type));
14088 if (tree_int_cst_sign_bit (op01) == 0
14089 && compare_tree_int (op01, max_offset) == -1)
14091 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14092 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14093 tree index = bitsize_int (indexi);
14094 return fold_build3_loc (loc,
14095 BIT_FIELD_REF, type, op00,
14096 part_width, index);
14099 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14100 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14101 && type == TREE_TYPE (op00type))
14103 tree size = TYPE_SIZE_UNIT (type);
14104 if (tree_int_cst_equal (size, op01))
14105 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14107 /* ((foo *)&fooarray)[1] => fooarray[1] */
14108 else if (TREE_CODE (op00type) == ARRAY_TYPE
14109 && type == TREE_TYPE (op00type))
14111 tree type_domain = TYPE_DOMAIN (op00type);
14112 tree min = size_zero_node;
14113 if (type_domain && TYPE_MIN_VALUE (type_domain))
14114 min = TYPE_MIN_VALUE (type_domain);
14115 offset_int off = wi::to_offset (op01);
14116 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
14117 offset_int remainder;
14118 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
14119 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
14121 off = off + wi::to_offset (min);
14122 op01 = wide_int_to_tree (sizetype, off);
14123 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14124 NULL_TREE, NULL_TREE);
14130 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14131 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14132 && type == TREE_TYPE (TREE_TYPE (subtype))
14133 && (!in_gimple_form
14134 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14136 tree type_domain;
14137 tree min_val = size_zero_node;
14138 sub = build_fold_indirect_ref_loc (loc, sub);
14139 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14140 if (type_domain && TYPE_MIN_VALUE (type_domain))
14141 min_val = TYPE_MIN_VALUE (type_domain);
14142 if (in_gimple_form
14143 && TREE_CODE (min_val) != INTEGER_CST)
14144 return NULL_TREE;
14145 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14146 NULL_TREE);
14149 return NULL_TREE;
14152 /* Builds an expression for an indirection through T, simplifying some
14153 cases. */
14155 tree
14156 build_fold_indirect_ref_loc (location_t loc, tree t)
14158 tree type = TREE_TYPE (TREE_TYPE (t));
14159 tree sub = fold_indirect_ref_1 (loc, type, t);
14161 if (sub)
14162 return sub;
14164 return build1_loc (loc, INDIRECT_REF, type, t);
14167 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14169 tree
14170 fold_indirect_ref_loc (location_t loc, tree t)
14172 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14174 if (sub)
14175 return sub;
14176 else
14177 return t;
14180 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14181 whose result is ignored. The type of the returned tree need not be
14182 the same as the original expression. */
14184 tree
14185 fold_ignored_result (tree t)
14187 if (!TREE_SIDE_EFFECTS (t))
14188 return integer_zero_node;
14190 for (;;)
14191 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14193 case tcc_unary:
14194 t = TREE_OPERAND (t, 0);
14195 break;
14197 case tcc_binary:
14198 case tcc_comparison:
14199 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14200 t = TREE_OPERAND (t, 0);
14201 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14202 t = TREE_OPERAND (t, 1);
14203 else
14204 return t;
14205 break;
14207 case tcc_expression:
14208 switch (TREE_CODE (t))
14210 case COMPOUND_EXPR:
14211 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14212 return t;
14213 t = TREE_OPERAND (t, 0);
14214 break;
14216 case COND_EXPR:
14217 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14218 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14219 return t;
14220 t = TREE_OPERAND (t, 0);
14221 break;
14223 default:
14224 return t;
14226 break;
14228 default:
14229 return t;
14233 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14235 tree
14236 round_up_loc (location_t loc, tree value, unsigned int divisor)
14238 tree div = NULL_TREE;
14240 if (divisor == 1)
14241 return value;
14243 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14244 have to do anything. Only do this when we are not given a const,
14245 because in that case, this check is more expensive than just
14246 doing it. */
14247 if (TREE_CODE (value) != INTEGER_CST)
14249 div = build_int_cst (TREE_TYPE (value), divisor);
14251 if (multiple_of_p (TREE_TYPE (value), value, div))
14252 return value;
14255 /* If divisor is a power of two, simplify this to bit manipulation. */
14256 if (pow2_or_zerop (divisor))
14258 if (TREE_CODE (value) == INTEGER_CST)
14260 wide_int val = value;
14261 bool overflow_p;
14263 if ((val & (divisor - 1)) == 0)
14264 return value;
14266 overflow_p = TREE_OVERFLOW (value);
14267 val += divisor - 1;
14268 val &= (int) -divisor;
14269 if (val == 0)
14270 overflow_p = true;
14272 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14274 else
14276 tree t;
14278 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14279 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14280 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14281 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14284 else
14286 if (!div)
14287 div = build_int_cst (TREE_TYPE (value), divisor);
14288 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14289 value = size_binop_loc (loc, MULT_EXPR, value, div);
14292 return value;
14295 /* Likewise, but round down. */
14297 tree
14298 round_down_loc (location_t loc, tree value, int divisor)
14300 tree div = NULL_TREE;
14302 gcc_assert (divisor > 0);
14303 if (divisor == 1)
14304 return value;
14306 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14307 have to do anything. Only do this when we are not given a const,
14308 because in that case, this check is more expensive than just
14309 doing it. */
14310 if (TREE_CODE (value) != INTEGER_CST)
14312 div = build_int_cst (TREE_TYPE (value), divisor);
14314 if (multiple_of_p (TREE_TYPE (value), value, div))
14315 return value;
14318 /* If divisor is a power of two, simplify this to bit manipulation. */
14319 if (pow2_or_zerop (divisor))
14321 tree t;
14323 t = build_int_cst (TREE_TYPE (value), -divisor);
14324 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14326 else
14328 if (!div)
14329 div = build_int_cst (TREE_TYPE (value), divisor);
14330 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14331 value = size_binop_loc (loc, MULT_EXPR, value, div);
14334 return value;
14337 /* Returns the pointer to the base of the object addressed by EXP and
14338 extracts the information about the offset of the access, storing it
14339 to PBITPOS and POFFSET. */
14341 static tree
14342 split_address_to_core_and_offset (tree exp,
14343 HOST_WIDE_INT *pbitpos, tree *poffset)
14345 tree core;
14346 machine_mode mode;
14347 int unsignedp, reversep, volatilep;
14348 HOST_WIDE_INT bitsize;
14349 location_t loc = EXPR_LOCATION (exp);
14351 if (TREE_CODE (exp) == ADDR_EXPR)
14353 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14354 poffset, &mode, &unsignedp, &reversep,
14355 &volatilep);
14356 core = build_fold_addr_expr_loc (loc, core);
14358 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14360 core = TREE_OPERAND (exp, 0);
14361 STRIP_NOPS (core);
14362 *pbitpos = 0;
14363 *poffset = TREE_OPERAND (exp, 1);
14364 if (TREE_CODE (*poffset) == INTEGER_CST)
14366 offset_int tem = wi::sext (wi::to_offset (*poffset),
14367 TYPE_PRECISION (TREE_TYPE (*poffset)));
14368 tem <<= LOG2_BITS_PER_UNIT;
14369 if (wi::fits_shwi_p (tem))
14371 *pbitpos = tem.to_shwi ();
14372 *poffset = NULL_TREE;
14376 else
14378 core = exp;
14379 *pbitpos = 0;
14380 *poffset = NULL_TREE;
14383 return core;
14386 /* Returns true if addresses of E1 and E2 differ by a constant, false
14387 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14389 bool
14390 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14392 tree core1, core2;
14393 HOST_WIDE_INT bitpos1, bitpos2;
14394 tree toffset1, toffset2, tdiff, type;
14396 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14397 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14399 if (bitpos1 % BITS_PER_UNIT != 0
14400 || bitpos2 % BITS_PER_UNIT != 0
14401 || !operand_equal_p (core1, core2, 0))
14402 return false;
14404 if (toffset1 && toffset2)
14406 type = TREE_TYPE (toffset1);
14407 if (type != TREE_TYPE (toffset2))
14408 toffset2 = fold_convert (type, toffset2);
14410 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14411 if (!cst_and_fits_in_hwi (tdiff))
14412 return false;
14414 *diff = int_cst_value (tdiff);
14416 else if (toffset1 || toffset2)
14418 /* If only one of the offsets is non-constant, the difference cannot
14419 be a constant. */
14420 return false;
14422 else
14423 *diff = 0;
14425 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14426 return true;
14429 /* Return OFF converted to a pointer offset type suitable as offset for
14430 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14431 tree
14432 convert_to_ptrofftype_loc (location_t loc, tree off)
14434 return fold_convert_loc (loc, sizetype, off);
14437 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14438 tree
14439 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14441 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14442 ptr, convert_to_ptrofftype_loc (loc, off));
14445 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14446 tree
14447 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14449 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14450 ptr, size_int (off));
14453 /* Return a char pointer for a C string if it is a string constant
14454 or sum of string constant and integer constant. We only support
14455 string constants properly terminated with '\0' character.
14456 If STRLEN is a valid pointer, length (including terminating character)
14457 of returned string is stored to the argument. */
14459 const char *
14460 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14462 tree offset_node;
14464 if (strlen)
14465 *strlen = 0;
14467 src = string_constant (src, &offset_node);
14468 if (src == 0)
14469 return NULL;
14471 unsigned HOST_WIDE_INT offset = 0;
14472 if (offset_node != NULL_TREE)
14474 if (!tree_fits_uhwi_p (offset_node))
14475 return NULL;
14476 else
14477 offset = tree_to_uhwi (offset_node);
14480 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14481 const char *string = TREE_STRING_POINTER (src);
14483 /* Support only properly null-terminated strings. */
14484 if (string_length == 0
14485 || string[string_length - 1] != '\0'
14486 || offset >= string_length)
14487 return NULL;
14489 if (strlen)
14490 *strlen = string_length - offset;
14491 return string + offset;
14494 #if CHECKING_P
14496 namespace selftest {
14498 /* Helper functions for writing tests of folding trees. */
14500 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14502 static void
14503 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14504 tree constant)
14506 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14509 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14510 wrapping WRAPPED_EXPR. */
14512 static void
14513 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14514 tree wrapped_expr)
14516 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14517 ASSERT_NE (wrapped_expr, result);
14518 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14519 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14522 /* Verify that various arithmetic binary operations are folded
14523 correctly. */
14525 static void
14526 test_arithmetic_folding ()
14528 tree type = integer_type_node;
14529 tree x = create_tmp_var_raw (type, "x");
14530 tree zero = build_zero_cst (type);
14531 tree one = build_int_cst (type, 1);
14533 /* Addition. */
14534 /* 1 <-- (0 + 1) */
14535 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14536 one);
14537 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14538 one);
14540 /* (nonlvalue)x <-- (x + 0) */
14541 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14544 /* Subtraction. */
14545 /* 0 <-- (x - x) */
14546 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14547 zero);
14548 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14551 /* Multiplication. */
14552 /* 0 <-- (x * 0) */
14553 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14554 zero);
14556 /* (nonlvalue)x <-- (x * 1) */
14557 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14561 /* Verify that various binary operations on vectors are folded
14562 correctly. */
14564 static void
14565 test_vector_folding ()
14567 tree inner_type = integer_type_node;
14568 tree type = build_vector_type (inner_type, 4);
14569 tree zero = build_zero_cst (type);
14570 tree one = build_one_cst (type);
14572 /* Verify equality tests that return a scalar boolean result. */
14573 tree res_type = boolean_type_node;
14574 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14575 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14576 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14577 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14580 /* Run all of the selftests within this file. */
14582 void
14583 fold_const_c_tests ()
14585 test_arithmetic_folding ();
14586 test_vector_folding ();
14589 } // namespace selftest
14591 #endif /* CHECKING_P */