[AArch64] PR target/79913: VEC_SELECT bugs in aarch64 patterns
[official-gcc.git] / gcc / fold-const.c
blob12445aa6e44f2bd63960626489adab3f056a96f1
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
83 /* Nonzero if we are folding constants inside an initializer; zero
84 otherwise. */
85 int folding_initializer = 0;
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code {
91 COMPCODE_FALSE = 0,
92 COMPCODE_LT = 1,
93 COMPCODE_EQ = 2,
94 COMPCODE_LE = 3,
95 COMPCODE_GT = 4,
96 COMPCODE_LTGT = 5,
97 COMPCODE_GE = 6,
98 COMPCODE_ORD = 7,
99 COMPCODE_UNORD = 8,
100 COMPCODE_UNLT = 9,
101 COMPCODE_UNEQ = 10,
102 COMPCODE_UNLE = 11,
103 COMPCODE_UNGT = 12,
104 COMPCODE_NE = 13,
105 COMPCODE_UNGE = 14,
106 COMPCODE_TRUE = 15
109 static bool negate_expr_p (tree);
110 static tree negate_expr (tree);
111 static tree split_tree (location_t, tree, tree, enum tree_code,
112 tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int operand_equal_for_comparison_p (tree, tree, tree);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static bool vec_cst_ctor_to_array (tree, tree *);
142 static tree fold_negate_expr (location_t, tree);
145 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
146 Otherwise, return LOC. */
148 static location_t
149 expr_location_or (tree t, location_t loc)
151 location_t tloc = EXPR_LOCATION (t);
152 return tloc == UNKNOWN_LOCATION ? loc : tloc;
155 /* Similar to protected_set_expr_location, but never modify x in place,
156 if location can and needs to be set, unshare it. */
158 static inline tree
159 protected_set_expr_location_unshare (tree x, location_t loc)
161 if (CAN_HAVE_LOCATION_P (x)
162 && EXPR_LOCATION (x) != loc
163 && !(TREE_CODE (x) == SAVE_EXPR
164 || TREE_CODE (x) == TARGET_EXPR
165 || TREE_CODE (x) == BIND_EXPR))
167 x = copy_node (x);
168 SET_EXPR_LOCATION (x, loc);
170 return x;
173 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
174 division and returns the quotient. Otherwise returns
175 NULL_TREE. */
177 tree
178 div_if_zero_remainder (const_tree arg1, const_tree arg2)
180 widest_int quo;
182 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
183 SIGNED, &quo))
184 return wide_int_to_tree (TREE_TYPE (arg1), quo);
186 return NULL_TREE;
189 /* This is nonzero if we should defer warnings about undefined
190 overflow. This facility exists because these warnings are a
191 special case. The code to estimate loop iterations does not want
192 to issue any warnings, since it works with expressions which do not
193 occur in user code. Various bits of cleanup code call fold(), but
194 only use the result if it has certain characteristics (e.g., is a
195 constant); that code only wants to issue a warning if the result is
196 used. */
198 static int fold_deferring_overflow_warnings;
200 /* If a warning about undefined overflow is deferred, this is the
201 warning. Note that this may cause us to turn two warnings into
202 one, but that is fine since it is sufficient to only give one
203 warning per expression. */
205 static const char* fold_deferred_overflow_warning;
207 /* If a warning about undefined overflow is deferred, this is the
208 level at which the warning should be emitted. */
210 static enum warn_strict_overflow_code fold_deferred_overflow_code;
212 /* Start deferring overflow warnings. We could use a stack here to
213 permit nested calls, but at present it is not necessary. */
215 void
216 fold_defer_overflow_warnings (void)
218 ++fold_deferring_overflow_warnings;
221 /* Stop deferring overflow warnings. If there is a pending warning,
222 and ISSUE is true, then issue the warning if appropriate. STMT is
223 the statement with which the warning should be associated (used for
224 location information); STMT may be NULL. CODE is the level of the
225 warning--a warn_strict_overflow_code value. This function will use
226 the smaller of CODE and the deferred code when deciding whether to
227 issue the warning. CODE may be zero to mean to always use the
228 deferred code. */
230 void
231 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
233 const char *warnmsg;
234 location_t locus;
236 gcc_assert (fold_deferring_overflow_warnings > 0);
237 --fold_deferring_overflow_warnings;
238 if (fold_deferring_overflow_warnings > 0)
240 if (fold_deferred_overflow_warning != NULL
241 && code != 0
242 && code < (int) fold_deferred_overflow_code)
243 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
244 return;
247 warnmsg = fold_deferred_overflow_warning;
248 fold_deferred_overflow_warning = NULL;
250 if (!issue || warnmsg == NULL)
251 return;
253 if (gimple_no_warning_p (stmt))
254 return;
256 /* Use the smallest code level when deciding to issue the
257 warning. */
258 if (code == 0 || code > (int) fold_deferred_overflow_code)
259 code = fold_deferred_overflow_code;
261 if (!issue_strict_overflow_warning (code))
262 return;
264 if (stmt == NULL)
265 locus = input_location;
266 else
267 locus = gimple_location (stmt);
268 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 /* Stop deferring overflow warnings, ignoring any deferred
272 warnings. */
274 void
275 fold_undefer_and_ignore_overflow_warnings (void)
277 fold_undefer_overflow_warnings (false, NULL, 0);
280 /* Whether we are deferring overflow warnings. */
282 bool
283 fold_deferring_overflow_warnings_p (void)
285 return fold_deferring_overflow_warnings > 0;
288 /* This is called when we fold something based on the fact that signed
289 overflow is undefined. */
291 void
292 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
294 if (fold_deferring_overflow_warnings > 0)
296 if (fold_deferred_overflow_warning == NULL
297 || wc < fold_deferred_overflow_code)
299 fold_deferred_overflow_warning = gmsgid;
300 fold_deferred_overflow_code = wc;
303 else if (issue_strict_overflow_warning (wc))
304 warning (OPT_Wstrict_overflow, gmsgid);
307 /* Return true if the built-in mathematical function specified by CODE
308 is odd, i.e. -f(x) == f(-x). */
310 bool
311 negate_mathfn_p (combined_fn fn)
313 switch (fn)
315 CASE_CFN_ASIN:
316 CASE_CFN_ASINH:
317 CASE_CFN_ATAN:
318 CASE_CFN_ATANH:
319 CASE_CFN_CASIN:
320 CASE_CFN_CASINH:
321 CASE_CFN_CATAN:
322 CASE_CFN_CATANH:
323 CASE_CFN_CBRT:
324 CASE_CFN_CPROJ:
325 CASE_CFN_CSIN:
326 CASE_CFN_CSINH:
327 CASE_CFN_CTAN:
328 CASE_CFN_CTANH:
329 CASE_CFN_ERF:
330 CASE_CFN_LLROUND:
331 CASE_CFN_LROUND:
332 CASE_CFN_ROUND:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
346 default:
347 break;
349 return false;
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
355 bool
356 may_negate_without_overflow_p (const_tree t)
358 tree type;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
366 return !wi::only_sign_bit_p (t);
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
372 static bool
373 negate_expr_p (tree t)
375 tree type;
377 if (t == 0)
378 return false;
380 type = TREE_TYPE (t);
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
387 return true;
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
395 case FIXED_CST:
396 return true;
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 int count = TYPE_VECTOR_SUBPARTS (type), i;
417 for (i = 0; i < count; i++)
418 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
459 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
461 break;
463 /* Fall through. */
465 case RDIV_EXPR:
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
469 break;
471 case TRUNC_DIV_EXPR:
472 case ROUND_DIV_EXPR:
473 case EXACT_DIV_EXPR:
474 if (TYPE_UNSIGNED (type))
475 break;
476 if (negate_expr_p (TREE_OPERAND (t, 0)))
477 return true;
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
483 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t, 1))))
485 return negate_expr_p (TREE_OPERAND (t, 1));
486 break;
488 case NOP_EXPR:
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type) == REAL_TYPE)
492 tree tem = strip_float_extensions (t);
493 if (tem != t)
494 return negate_expr_p (tem);
496 break;
498 case CALL_EXPR:
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t)))
501 return negate_expr_p (CALL_EXPR_ARG (t, 0));
502 break;
504 case RSHIFT_EXPR:
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 tree op1 = TREE_OPERAND (t, 1);
509 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
510 return true;
512 break;
514 default:
515 break;
517 return false;
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
523 returned. */
525 static tree
526 fold_negate_expr_1 (location_t loc, tree t)
528 tree type = TREE_TYPE (t);
529 tree tem;
531 switch (TREE_CODE (t))
533 /* Convert - (~A) to A + 1. */
534 case BIT_NOT_EXPR:
535 if (INTEGRAL_TYPE_P (type))
536 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
537 build_one_cst (type));
538 break;
540 case INTEGER_CST:
541 tem = fold_negate_const (t, type);
542 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
543 || (ANY_INTEGRAL_TYPE_P (type)
544 && !TYPE_OVERFLOW_TRAPS (type)
545 && TYPE_OVERFLOW_WRAPS (type))
546 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 return tem;
554 case FIXED_CST:
555 tem = fold_negate_const (t, type);
556 return tem;
558 case COMPLEX_CST:
560 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
561 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
562 if (rpart && ipart)
563 return build_complex (type, rpart, ipart);
565 break;
567 case VECTOR_CST:
569 int count = TYPE_VECTOR_SUBPARTS (type), i;
570 tree *elts = XALLOCAVEC (tree, count);
572 for (i = 0; i < count; i++)
574 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
575 if (elts[i] == NULL_TREE)
576 return NULL_TREE;
579 return build_vector (type, elts);
582 case COMPLEX_EXPR:
583 if (negate_expr_p (t))
584 return fold_build2_loc (loc, COMPLEX_EXPR, type,
585 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
586 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
587 break;
589 case CONJ_EXPR:
590 if (negate_expr_p (t))
591 return fold_build1_loc (loc, CONJ_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
593 break;
595 case NEGATE_EXPR:
596 if (!TYPE_OVERFLOW_SANITIZED (type))
597 return TREE_OPERAND (t, 0);
598 break;
600 case PLUS_EXPR:
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
602 && !HONOR_SIGNED_ZEROS (element_mode (type)))
604 /* -(A + B) -> (-B) - A. */
605 if (negate_expr_p (TREE_OPERAND (t, 1)))
607 tem = negate_expr (TREE_OPERAND (t, 1));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 0));
612 /* -(A + B) -> (-A) - B. */
613 if (negate_expr_p (TREE_OPERAND (t, 0)))
615 tem = negate_expr (TREE_OPERAND (t, 0));
616 return fold_build2_loc (loc, MINUS_EXPR, type,
617 tem, TREE_OPERAND (t, 1));
620 break;
622 case MINUS_EXPR:
623 /* - (A - B) -> B - A */
624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
625 && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
628 break;
630 case MULT_EXPR:
631 if (TYPE_UNSIGNED (type))
632 break;
634 /* Fall through. */
636 case RDIV_EXPR:
637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 TREE_OPERAND (t, 0), negate_expr (tem));
643 tem = TREE_OPERAND (t, 0);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 negate_expr (tem), TREE_OPERAND (t, 1));
648 break;
650 case TRUNC_DIV_EXPR:
651 case ROUND_DIV_EXPR:
652 case EXACT_DIV_EXPR:
653 if (TYPE_UNSIGNED (type))
654 break;
655 if (negate_expr_p (TREE_OPERAND (t, 0)))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (TREE_OPERAND (t, 0)),
658 TREE_OPERAND (t, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t, 1))))
666 && negate_expr_p (TREE_OPERAND (t, 1)))
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0),
669 negate_expr (TREE_OPERAND (t, 1)));
670 break;
672 case NOP_EXPR:
673 /* Convert -((double)float) into (double)(-float). */
674 if (TREE_CODE (type) == REAL_TYPE)
676 tem = strip_float_extensions (t);
677 if (tem != t && negate_expr_p (tem))
678 return fold_convert_loc (loc, type, negate_expr (tem));
680 break;
682 case CALL_EXPR:
683 /* Negate -f(x) as f(-x). */
684 if (negate_mathfn_p (get_call_combined_fn (t))
685 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
687 tree fndecl, arg;
689 fndecl = get_callee_fndecl (t);
690 arg = negate_expr (CALL_EXPR_ARG (t, 0));
691 return build_call_expr_loc (loc, fndecl, 1, arg);
693 break;
695 case RSHIFT_EXPR:
696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
699 tree op1 = TREE_OPERAND (t, 1);
700 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
702 tree ntype = TYPE_UNSIGNED (type)
703 ? signed_type_for (type)
704 : unsigned_type_for (type);
705 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
706 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
707 return fold_convert_loc (loc, type, temp);
710 break;
712 default:
713 break;
716 return NULL_TREE;
719 /* A wrapper for fold_negate_expr_1. */
721 static tree
722 fold_negate_expr (location_t loc, tree t)
724 tree type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
726 tree tem = fold_negate_expr_1 (loc, t);
727 if (tem == NULL_TREE)
728 return NULL_TREE;
729 return fold_convert_loc (loc, type, tem);
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
736 static tree
737 negate_expr (tree t)
739 tree type, tem;
740 location_t loc;
742 if (t == NULL_TREE)
743 return NULL_TREE;
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead. If a variable part is of pointer
769 type, it is negated after converting to TYPE. This prevents us from
770 generating illegal MINUS pointer expression. LOC is the location of
771 the converted variable part.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
778 static tree
779 split_tree (location_t loc, tree in, tree type, enum tree_code code,
780 tree *conp, tree *litp, tree *minus_litp, int negate_p)
782 tree var = 0;
784 *conp = 0;
785 *litp = 0;
786 *minus_litp = 0;
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
793 *litp = in;
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
804 tree op0 = TREE_OPERAND (in, 0);
805 tree op1 = TREE_OPERAND (in, 1);
806 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 || TREE_CODE (op0) == FIXED_CST)
812 *litp = op0, op0 = 0;
813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 || TREE_CODE (op1) == FIXED_CST)
815 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
817 if (op0 != 0 && TREE_CONSTANT (op0))
818 *conp = op0, op0 = 0;
819 else if (op1 != 0 && TREE_CONSTANT (op1))
820 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0 != 0 && op1 != 0)
825 var = in;
826 else if (op0 != 0)
827 var = op0;
828 else
829 var = op1, neg_var_p = neg1_p;
831 /* Now do any needed negations. */
832 if (neg_litp_p)
833 *minus_litp = *litp, *litp = 0;
834 if (neg_conp_p)
835 *conp = negate_expr (*conp);
836 if (neg_var_p && var)
838 /* Convert to TYPE before negating. */
839 var = fold_convert_loc (loc, type, var);
840 var = negate_expr (var);
843 else if (TREE_CONSTANT (in))
844 *conp = in;
845 else if (TREE_CODE (in) == BIT_NOT_EXPR
846 && code == PLUS_EXPR)
848 /* -X - 1 is folded to ~X, undo that here. Do _not_ do this
849 when IN is constant. */
850 *minus_litp = build_one_cst (TREE_TYPE (in));
851 var = negate_expr (TREE_OPERAND (in, 0));
853 else
854 var = in;
856 if (negate_p)
858 if (*litp)
859 *minus_litp = *litp, *litp = 0;
860 else if (*minus_litp)
861 *litp = *minus_litp, *minus_litp = 0;
862 *conp = negate_expr (*conp);
863 if (var)
865 /* Convert to TYPE before negating. */
866 var = fold_convert_loc (loc, type, var);
867 var = negate_expr (var);
871 return var;
874 /* Re-associate trees split by the above function. T1 and T2 are
875 either expressions to associate or null. Return the new
876 expression, if any. LOC is the location of the new expression. If
877 we build an operation, do it in TYPE and with CODE. */
879 static tree
880 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
882 if (t1 == 0)
883 return t2;
884 else if (t2 == 0)
885 return t1;
887 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
888 try to fold this since we will have infinite recursion. But do
889 deal with any NEGATE_EXPRs. */
890 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
891 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
893 if (code == PLUS_EXPR)
895 if (TREE_CODE (t1) == NEGATE_EXPR)
896 return build2_loc (loc, MINUS_EXPR, type,
897 fold_convert_loc (loc, type, t2),
898 fold_convert_loc (loc, type,
899 TREE_OPERAND (t1, 0)));
900 else if (TREE_CODE (t2) == NEGATE_EXPR)
901 return build2_loc (loc, MINUS_EXPR, type,
902 fold_convert_loc (loc, type, t1),
903 fold_convert_loc (loc, type,
904 TREE_OPERAND (t2, 0)));
905 else if (integer_zerop (t2))
906 return fold_convert_loc (loc, type, t1);
908 else if (code == MINUS_EXPR)
910 if (integer_zerop (t2))
911 return fold_convert_loc (loc, type, t1);
914 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type, t2));
918 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
919 fold_convert_loc (loc, type, t2));
922 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
923 for use in int_const_binop, size_binop and size_diffop. */
925 static bool
926 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
928 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
929 return false;
930 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
931 return false;
933 switch (code)
935 case LSHIFT_EXPR:
936 case RSHIFT_EXPR:
937 case LROTATE_EXPR:
938 case RROTATE_EXPR:
939 return true;
941 default:
942 break;
945 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
946 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
947 && TYPE_MODE (type1) == TYPE_MODE (type2);
951 /* Combine two integer constants ARG1 and ARG2 under operation CODE
952 to produce a new constant. Return NULL_TREE if we don't know how
953 to evaluate CODE at compile-time. */
955 static tree
956 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
957 int overflowable)
959 wide_int res;
960 tree t;
961 tree type = TREE_TYPE (arg1);
962 signop sign = TYPE_SIGN (type);
963 bool overflow = false;
965 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
967 switch (code)
969 case BIT_IOR_EXPR:
970 res = wi::bit_or (arg1, arg2);
971 break;
973 case BIT_XOR_EXPR:
974 res = wi::bit_xor (arg1, arg2);
975 break;
977 case BIT_AND_EXPR:
978 res = wi::bit_and (arg1, arg2);
979 break;
981 case RSHIFT_EXPR:
982 case LSHIFT_EXPR:
983 if (wi::neg_p (arg2))
985 arg2 = -arg2;
986 if (code == RSHIFT_EXPR)
987 code = LSHIFT_EXPR;
988 else
989 code = RSHIFT_EXPR;
992 if (code == RSHIFT_EXPR)
993 /* It's unclear from the C standard whether shifts can overflow.
994 The following code ignores overflow; perhaps a C standard
995 interpretation ruling is needed. */
996 res = wi::rshift (arg1, arg2, sign);
997 else
998 res = wi::lshift (arg1, arg2);
999 break;
1001 case RROTATE_EXPR:
1002 case LROTATE_EXPR:
1003 if (wi::neg_p (arg2))
1005 arg2 = -arg2;
1006 if (code == RROTATE_EXPR)
1007 code = LROTATE_EXPR;
1008 else
1009 code = RROTATE_EXPR;
1012 if (code == RROTATE_EXPR)
1013 res = wi::rrotate (arg1, arg2);
1014 else
1015 res = wi::lrotate (arg1, arg2);
1016 break;
1018 case PLUS_EXPR:
1019 res = wi::add (arg1, arg2, sign, &overflow);
1020 break;
1022 case MINUS_EXPR:
1023 res = wi::sub (arg1, arg2, sign, &overflow);
1024 break;
1026 case MULT_EXPR:
1027 res = wi::mul (arg1, arg2, sign, &overflow);
1028 break;
1030 case MULT_HIGHPART_EXPR:
1031 res = wi::mul_high (arg1, arg2, sign);
1032 break;
1034 case TRUNC_DIV_EXPR:
1035 case EXACT_DIV_EXPR:
1036 if (arg2 == 0)
1037 return NULL_TREE;
1038 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1039 break;
1041 case FLOOR_DIV_EXPR:
1042 if (arg2 == 0)
1043 return NULL_TREE;
1044 res = wi::div_floor (arg1, arg2, sign, &overflow);
1045 break;
1047 case CEIL_DIV_EXPR:
1048 if (arg2 == 0)
1049 return NULL_TREE;
1050 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1051 break;
1053 case ROUND_DIV_EXPR:
1054 if (arg2 == 0)
1055 return NULL_TREE;
1056 res = wi::div_round (arg1, arg2, sign, &overflow);
1057 break;
1059 case TRUNC_MOD_EXPR:
1060 if (arg2 == 0)
1061 return NULL_TREE;
1062 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1063 break;
1065 case FLOOR_MOD_EXPR:
1066 if (arg2 == 0)
1067 return NULL_TREE;
1068 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1069 break;
1071 case CEIL_MOD_EXPR:
1072 if (arg2 == 0)
1073 return NULL_TREE;
1074 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1075 break;
1077 case ROUND_MOD_EXPR:
1078 if (arg2 == 0)
1079 return NULL_TREE;
1080 res = wi::mod_round (arg1, arg2, sign, &overflow);
1081 break;
1083 case MIN_EXPR:
1084 res = wi::min (arg1, arg2, sign);
1085 break;
1087 case MAX_EXPR:
1088 res = wi::max (arg1, arg2, sign);
1089 break;
1091 default:
1092 return NULL_TREE;
1095 t = force_fit_type (type, res, overflowable,
1096 (((sign == SIGNED || overflowable == -1)
1097 && overflow)
1098 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1100 return t;
1103 tree
1104 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1106 return int_const_binop_1 (code, arg1, arg2, 1);
1109 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1110 constant. We assume ARG1 and ARG2 have the same data type, or at least
1111 are the same kind of constant and the same machine mode. Return zero if
1112 combining the constants is not allowed in the current operating mode. */
1114 static tree
1115 const_binop (enum tree_code code, tree arg1, tree arg2)
1117 /* Sanity check for the recursive cases. */
1118 if (!arg1 || !arg2)
1119 return NULL_TREE;
1121 STRIP_NOPS (arg1);
1122 STRIP_NOPS (arg2);
1124 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1126 if (code == POINTER_PLUS_EXPR)
1127 return int_const_binop (PLUS_EXPR,
1128 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1130 return int_const_binop (code, arg1, arg2);
1133 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1135 machine_mode mode;
1136 REAL_VALUE_TYPE d1;
1137 REAL_VALUE_TYPE d2;
1138 REAL_VALUE_TYPE value;
1139 REAL_VALUE_TYPE result;
1140 bool inexact;
1141 tree t, type;
1143 /* The following codes are handled by real_arithmetic. */
1144 switch (code)
1146 case PLUS_EXPR:
1147 case MINUS_EXPR:
1148 case MULT_EXPR:
1149 case RDIV_EXPR:
1150 case MIN_EXPR:
1151 case MAX_EXPR:
1152 break;
1154 default:
1155 return NULL_TREE;
1158 d1 = TREE_REAL_CST (arg1);
1159 d2 = TREE_REAL_CST (arg2);
1161 type = TREE_TYPE (arg1);
1162 mode = TYPE_MODE (type);
1164 /* Don't perform operation if we honor signaling NaNs and
1165 either operand is a signaling NaN. */
1166 if (HONOR_SNANS (mode)
1167 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1168 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1169 return NULL_TREE;
1171 /* Don't perform operation if it would raise a division
1172 by zero exception. */
1173 if (code == RDIV_EXPR
1174 && real_equal (&d2, &dconst0)
1175 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1176 return NULL_TREE;
1178 /* If either operand is a NaN, just return it. Otherwise, set up
1179 for floating-point trap; we return an overflow. */
1180 if (REAL_VALUE_ISNAN (d1))
1182 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1183 is off. */
1184 d1.signalling = 0;
1185 t = build_real (type, d1);
1186 return t;
1188 else if (REAL_VALUE_ISNAN (d2))
1190 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1191 is off. */
1192 d2.signalling = 0;
1193 t = build_real (type, d2);
1194 return t;
1197 inexact = real_arithmetic (&value, code, &d1, &d2);
1198 real_convert (&result, mode, &value);
1200 /* Don't constant fold this floating point operation if
1201 the result has overflowed and flag_trapping_math. */
1202 if (flag_trapping_math
1203 && MODE_HAS_INFINITIES (mode)
1204 && REAL_VALUE_ISINF (result)
1205 && !REAL_VALUE_ISINF (d1)
1206 && !REAL_VALUE_ISINF (d2))
1207 return NULL_TREE;
1209 /* Don't constant fold this floating point operation if the
1210 result may dependent upon the run-time rounding mode and
1211 flag_rounding_math is set, or if GCC's software emulation
1212 is unable to accurately represent the result. */
1213 if ((flag_rounding_math
1214 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1215 && (inexact || !real_identical (&result, &value)))
1216 return NULL_TREE;
1218 t = build_real (type, result);
1220 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1221 return t;
1224 if (TREE_CODE (arg1) == FIXED_CST)
1226 FIXED_VALUE_TYPE f1;
1227 FIXED_VALUE_TYPE f2;
1228 FIXED_VALUE_TYPE result;
1229 tree t, type;
1230 int sat_p;
1231 bool overflow_p;
1233 /* The following codes are handled by fixed_arithmetic. */
1234 switch (code)
1236 case PLUS_EXPR:
1237 case MINUS_EXPR:
1238 case MULT_EXPR:
1239 case TRUNC_DIV_EXPR:
1240 if (TREE_CODE (arg2) != FIXED_CST)
1241 return NULL_TREE;
1242 f2 = TREE_FIXED_CST (arg2);
1243 break;
1245 case LSHIFT_EXPR:
1246 case RSHIFT_EXPR:
1248 if (TREE_CODE (arg2) != INTEGER_CST)
1249 return NULL_TREE;
1250 wide_int w2 = arg2;
1251 f2.data.high = w2.elt (1);
1252 f2.data.low = w2.ulow ();
1253 f2.mode = SImode;
1255 break;
1257 default:
1258 return NULL_TREE;
1261 f1 = TREE_FIXED_CST (arg1);
1262 type = TREE_TYPE (arg1);
1263 sat_p = TYPE_SATURATING (type);
1264 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1265 t = build_fixed (type, result);
1266 /* Propagate overflow flags. */
1267 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1268 TREE_OVERFLOW (t) = 1;
1269 return t;
1272 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1274 tree type = TREE_TYPE (arg1);
1275 tree r1 = TREE_REALPART (arg1);
1276 tree i1 = TREE_IMAGPART (arg1);
1277 tree r2 = TREE_REALPART (arg2);
1278 tree i2 = TREE_IMAGPART (arg2);
1279 tree real, imag;
1281 switch (code)
1283 case PLUS_EXPR:
1284 case MINUS_EXPR:
1285 real = const_binop (code, r1, r2);
1286 imag = const_binop (code, i1, i2);
1287 break;
1289 case MULT_EXPR:
1290 if (COMPLEX_FLOAT_TYPE_P (type))
1291 return do_mpc_arg2 (arg1, arg2, type,
1292 /* do_nonfinite= */ folding_initializer,
1293 mpc_mul);
1295 real = const_binop (MINUS_EXPR,
1296 const_binop (MULT_EXPR, r1, r2),
1297 const_binop (MULT_EXPR, i1, i2));
1298 imag = const_binop (PLUS_EXPR,
1299 const_binop (MULT_EXPR, r1, i2),
1300 const_binop (MULT_EXPR, i1, r2));
1301 break;
1303 case RDIV_EXPR:
1304 if (COMPLEX_FLOAT_TYPE_P (type))
1305 return do_mpc_arg2 (arg1, arg2, type,
1306 /* do_nonfinite= */ folding_initializer,
1307 mpc_div);
1308 /* Fallthru. */
1309 case TRUNC_DIV_EXPR:
1310 case CEIL_DIV_EXPR:
1311 case FLOOR_DIV_EXPR:
1312 case ROUND_DIV_EXPR:
1313 if (flag_complex_method == 0)
1315 /* Keep this algorithm in sync with
1316 tree-complex.c:expand_complex_div_straight().
1318 Expand complex division to scalars, straightforward algorithm.
1319 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1320 t = br*br + bi*bi
1322 tree magsquared
1323 = const_binop (PLUS_EXPR,
1324 const_binop (MULT_EXPR, r2, r2),
1325 const_binop (MULT_EXPR, i2, i2));
1326 tree t1
1327 = const_binop (PLUS_EXPR,
1328 const_binop (MULT_EXPR, r1, r2),
1329 const_binop (MULT_EXPR, i1, i2));
1330 tree t2
1331 = const_binop (MINUS_EXPR,
1332 const_binop (MULT_EXPR, i1, r2),
1333 const_binop (MULT_EXPR, r1, i2));
1335 real = const_binop (code, t1, magsquared);
1336 imag = const_binop (code, t2, magsquared);
1338 else
1340 /* Keep this algorithm in sync with
1341 tree-complex.c:expand_complex_div_wide().
1343 Expand complex division to scalars, modified algorithm to minimize
1344 overflow with wide input ranges. */
1345 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1346 fold_abs_const (r2, TREE_TYPE (type)),
1347 fold_abs_const (i2, TREE_TYPE (type)));
1349 if (integer_nonzerop (compare))
1351 /* In the TRUE branch, we compute
1352 ratio = br/bi;
1353 div = (br * ratio) + bi;
1354 tr = (ar * ratio) + ai;
1355 ti = (ai * ratio) - ar;
1356 tr = tr / div;
1357 ti = ti / div; */
1358 tree ratio = const_binop (code, r2, i2);
1359 tree div = const_binop (PLUS_EXPR, i2,
1360 const_binop (MULT_EXPR, r2, ratio));
1361 real = const_binop (MULT_EXPR, r1, ratio);
1362 real = const_binop (PLUS_EXPR, real, i1);
1363 real = const_binop (code, real, div);
1365 imag = const_binop (MULT_EXPR, i1, ratio);
1366 imag = const_binop (MINUS_EXPR, imag, r1);
1367 imag = const_binop (code, imag, div);
1369 else
1371 /* In the FALSE branch, we compute
1372 ratio = d/c;
1373 divisor = (d * ratio) + c;
1374 tr = (b * ratio) + a;
1375 ti = b - (a * ratio);
1376 tr = tr / div;
1377 ti = ti / div; */
1378 tree ratio = const_binop (code, i2, r2);
1379 tree div = const_binop (PLUS_EXPR, r2,
1380 const_binop (MULT_EXPR, i2, ratio));
1382 real = const_binop (MULT_EXPR, i1, ratio);
1383 real = const_binop (PLUS_EXPR, real, r1);
1384 real = const_binop (code, real, div);
1386 imag = const_binop (MULT_EXPR, r1, ratio);
1387 imag = const_binop (MINUS_EXPR, i1, imag);
1388 imag = const_binop (code, imag, div);
1391 break;
1393 default:
1394 return NULL_TREE;
1397 if (real && imag)
1398 return build_complex (type, real, imag);
1401 if (TREE_CODE (arg1) == VECTOR_CST
1402 && TREE_CODE (arg2) == VECTOR_CST)
1404 tree type = TREE_TYPE (arg1);
1405 int count = TYPE_VECTOR_SUBPARTS (type), i;
1406 tree *elts = XALLOCAVEC (tree, count);
1408 for (i = 0; i < count; i++)
1410 tree elem1 = VECTOR_CST_ELT (arg1, i);
1411 tree elem2 = VECTOR_CST_ELT (arg2, i);
1413 elts[i] = const_binop (code, elem1, elem2);
1415 /* It is possible that const_binop cannot handle the given
1416 code and return NULL_TREE */
1417 if (elts[i] == NULL_TREE)
1418 return NULL_TREE;
1421 return build_vector (type, elts);
1424 /* Shifts allow a scalar offset for a vector. */
1425 if (TREE_CODE (arg1) == VECTOR_CST
1426 && TREE_CODE (arg2) == INTEGER_CST)
1428 tree type = TREE_TYPE (arg1);
1429 int count = TYPE_VECTOR_SUBPARTS (type), i;
1430 tree *elts = XALLOCAVEC (tree, count);
1432 for (i = 0; i < count; i++)
1434 tree elem1 = VECTOR_CST_ELT (arg1, i);
1436 elts[i] = const_binop (code, elem1, arg2);
1438 /* It is possible that const_binop cannot handle the given
1439 code and return NULL_TREE. */
1440 if (elts[i] == NULL_TREE)
1441 return NULL_TREE;
1444 return build_vector (type, elts);
1446 return NULL_TREE;
1449 /* Overload that adds a TYPE parameter to be able to dispatch
1450 to fold_relational_const. */
1452 tree
1453 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1455 if (TREE_CODE_CLASS (code) == tcc_comparison)
1456 return fold_relational_const (code, type, arg1, arg2);
1458 /* ??? Until we make the const_binop worker take the type of the
1459 result as argument put those cases that need it here. */
1460 switch (code)
1462 case COMPLEX_EXPR:
1463 if ((TREE_CODE (arg1) == REAL_CST
1464 && TREE_CODE (arg2) == REAL_CST)
1465 || (TREE_CODE (arg1) == INTEGER_CST
1466 && TREE_CODE (arg2) == INTEGER_CST))
1467 return build_complex (type, arg1, arg2);
1468 return NULL_TREE;
1470 case VEC_PACK_TRUNC_EXPR:
1471 case VEC_PACK_FIX_TRUNC_EXPR:
1473 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1474 tree *elts;
1476 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1477 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1478 if (TREE_CODE (arg1) != VECTOR_CST
1479 || TREE_CODE (arg2) != VECTOR_CST)
1480 return NULL_TREE;
1482 elts = XALLOCAVEC (tree, nelts);
1483 if (!vec_cst_ctor_to_array (arg1, elts)
1484 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1485 return NULL_TREE;
1487 for (i = 0; i < nelts; i++)
1489 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1490 ? NOP_EXPR : FIX_TRUNC_EXPR,
1491 TREE_TYPE (type), elts[i]);
1492 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1493 return NULL_TREE;
1496 return build_vector (type, elts);
1499 case VEC_WIDEN_MULT_LO_EXPR:
1500 case VEC_WIDEN_MULT_HI_EXPR:
1501 case VEC_WIDEN_MULT_EVEN_EXPR:
1502 case VEC_WIDEN_MULT_ODD_EXPR:
1504 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1505 unsigned int out, ofs, scale;
1506 tree *elts;
1508 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1509 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1510 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1511 return NULL_TREE;
1513 elts = XALLOCAVEC (tree, nelts * 4);
1514 if (!vec_cst_ctor_to_array (arg1, elts)
1515 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1516 return NULL_TREE;
1518 if (code == VEC_WIDEN_MULT_LO_EXPR)
1519 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1520 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1521 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1522 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1523 scale = 1, ofs = 0;
1524 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1525 scale = 1, ofs = 1;
1527 for (out = 0; out < nelts; out++)
1529 unsigned int in1 = (out << scale) + ofs;
1530 unsigned int in2 = in1 + nelts * 2;
1531 tree t1, t2;
1533 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1534 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1536 if (t1 == NULL_TREE || t2 == NULL_TREE)
1537 return NULL_TREE;
1538 elts[out] = const_binop (MULT_EXPR, t1, t2);
1539 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1540 return NULL_TREE;
1543 return build_vector (type, elts);
1546 default:;
1549 if (TREE_CODE_CLASS (code) != tcc_binary)
1550 return NULL_TREE;
1552 /* Make sure type and arg0 have the same saturating flag. */
1553 gcc_checking_assert (TYPE_SATURATING (type)
1554 == TYPE_SATURATING (TREE_TYPE (arg1)));
1556 return const_binop (code, arg1, arg2);
1559 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1560 Return zero if computing the constants is not possible. */
1562 tree
1563 const_unop (enum tree_code code, tree type, tree arg0)
1565 /* Don't perform the operation, other than NEGATE and ABS, if
1566 flag_signaling_nans is on and the operand is a signaling NaN. */
1567 if (TREE_CODE (arg0) == REAL_CST
1568 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1569 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1570 && code != NEGATE_EXPR
1571 && code != ABS_EXPR)
1572 return NULL_TREE;
1574 switch (code)
1576 CASE_CONVERT:
1577 case FLOAT_EXPR:
1578 case FIX_TRUNC_EXPR:
1579 case FIXED_CONVERT_EXPR:
1580 return fold_convert_const (code, type, arg0);
1582 case ADDR_SPACE_CONVERT_EXPR:
1583 /* If the source address is 0, and the source address space
1584 cannot have a valid object at 0, fold to dest type null. */
1585 if (integer_zerop (arg0)
1586 && !(targetm.addr_space.zero_address_valid
1587 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1588 return fold_convert_const (code, type, arg0);
1589 break;
1591 case VIEW_CONVERT_EXPR:
1592 return fold_view_convert_expr (type, arg0);
1594 case NEGATE_EXPR:
1596 /* Can't call fold_negate_const directly here as that doesn't
1597 handle all cases and we might not be able to negate some
1598 constants. */
1599 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1600 if (tem && CONSTANT_CLASS_P (tem))
1601 return tem;
1602 break;
1605 case ABS_EXPR:
1606 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1607 return fold_abs_const (arg0, type);
1608 break;
1610 case CONJ_EXPR:
1611 if (TREE_CODE (arg0) == COMPLEX_CST)
1613 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1614 TREE_TYPE (type));
1615 return build_complex (type, TREE_REALPART (arg0), ipart);
1617 break;
1619 case BIT_NOT_EXPR:
1620 if (TREE_CODE (arg0) == INTEGER_CST)
1621 return fold_not_const (arg0, type);
1622 /* Perform BIT_NOT_EXPR on each element individually. */
1623 else if (TREE_CODE (arg0) == VECTOR_CST)
1625 tree *elements;
1626 tree elem;
1627 unsigned count = VECTOR_CST_NELTS (arg0), i;
1629 elements = XALLOCAVEC (tree, count);
1630 for (i = 0; i < count; i++)
1632 elem = VECTOR_CST_ELT (arg0, i);
1633 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1634 if (elem == NULL_TREE)
1635 break;
1636 elements[i] = elem;
1638 if (i == count)
1639 return build_vector (type, elements);
1641 break;
1643 case TRUTH_NOT_EXPR:
1644 if (TREE_CODE (arg0) == INTEGER_CST)
1645 return constant_boolean_node (integer_zerop (arg0), type);
1646 break;
1648 case REALPART_EXPR:
1649 if (TREE_CODE (arg0) == COMPLEX_CST)
1650 return fold_convert (type, TREE_REALPART (arg0));
1651 break;
1653 case IMAGPART_EXPR:
1654 if (TREE_CODE (arg0) == COMPLEX_CST)
1655 return fold_convert (type, TREE_IMAGPART (arg0));
1656 break;
1658 case VEC_UNPACK_LO_EXPR:
1659 case VEC_UNPACK_HI_EXPR:
1660 case VEC_UNPACK_FLOAT_LO_EXPR:
1661 case VEC_UNPACK_FLOAT_HI_EXPR:
1663 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1664 tree *elts;
1665 enum tree_code subcode;
1667 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1668 if (TREE_CODE (arg0) != VECTOR_CST)
1669 return NULL_TREE;
1671 elts = XALLOCAVEC (tree, nelts * 2);
1672 if (!vec_cst_ctor_to_array (arg0, elts))
1673 return NULL_TREE;
1675 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1676 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1677 elts += nelts;
1679 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1680 subcode = NOP_EXPR;
1681 else
1682 subcode = FLOAT_EXPR;
1684 for (i = 0; i < nelts; i++)
1686 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1687 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1688 return NULL_TREE;
1691 return build_vector (type, elts);
1694 case REDUC_MIN_EXPR:
1695 case REDUC_MAX_EXPR:
1696 case REDUC_PLUS_EXPR:
1698 unsigned int nelts, i;
1699 tree *elts;
1700 enum tree_code subcode;
1702 if (TREE_CODE (arg0) != VECTOR_CST)
1703 return NULL_TREE;
1704 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1706 elts = XALLOCAVEC (tree, nelts);
1707 if (!vec_cst_ctor_to_array (arg0, elts))
1708 return NULL_TREE;
1710 switch (code)
1712 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1713 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1714 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1715 default: gcc_unreachable ();
1718 for (i = 1; i < nelts; i++)
1720 elts[0] = const_binop (subcode, elts[0], elts[i]);
1721 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1722 return NULL_TREE;
1725 return elts[0];
1728 default:
1729 break;
1732 return NULL_TREE;
1735 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1736 indicates which particular sizetype to create. */
1738 tree
1739 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1741 return build_int_cst (sizetype_tab[(int) kind], number);
1744 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1745 is a tree code. The type of the result is taken from the operands.
1746 Both must be equivalent integer types, ala int_binop_types_match_p.
1747 If the operands are constant, so is the result. */
1749 tree
1750 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1752 tree type = TREE_TYPE (arg0);
1754 if (arg0 == error_mark_node || arg1 == error_mark_node)
1755 return error_mark_node;
1757 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1758 TREE_TYPE (arg1)));
1760 /* Handle the special case of two integer constants faster. */
1761 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1763 /* And some specific cases even faster than that. */
1764 if (code == PLUS_EXPR)
1766 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1767 return arg1;
1768 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1769 return arg0;
1771 else if (code == MINUS_EXPR)
1773 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1774 return arg0;
1776 else if (code == MULT_EXPR)
1778 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1779 return arg1;
1782 /* Handle general case of two integer constants. For sizetype
1783 constant calculations we always want to know about overflow,
1784 even in the unsigned case. */
1785 return int_const_binop_1 (code, arg0, arg1, -1);
1788 return fold_build2_loc (loc, code, type, arg0, arg1);
1791 /* Given two values, either both of sizetype or both of bitsizetype,
1792 compute the difference between the two values. Return the value
1793 in signed type corresponding to the type of the operands. */
1795 tree
1796 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1798 tree type = TREE_TYPE (arg0);
1799 tree ctype;
1801 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1802 TREE_TYPE (arg1)));
1804 /* If the type is already signed, just do the simple thing. */
1805 if (!TYPE_UNSIGNED (type))
1806 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1808 if (type == sizetype)
1809 ctype = ssizetype;
1810 else if (type == bitsizetype)
1811 ctype = sbitsizetype;
1812 else
1813 ctype = signed_type_for (type);
1815 /* If either operand is not a constant, do the conversions to the signed
1816 type and subtract. The hardware will do the right thing with any
1817 overflow in the subtraction. */
1818 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1819 return size_binop_loc (loc, MINUS_EXPR,
1820 fold_convert_loc (loc, ctype, arg0),
1821 fold_convert_loc (loc, ctype, arg1));
1823 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1824 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1825 overflow) and negate (which can't either). Special-case a result
1826 of zero while we're here. */
1827 if (tree_int_cst_equal (arg0, arg1))
1828 return build_int_cst (ctype, 0);
1829 else if (tree_int_cst_lt (arg1, arg0))
1830 return fold_convert_loc (loc, ctype,
1831 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1832 else
1833 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1834 fold_convert_loc (loc, ctype,
1835 size_binop_loc (loc,
1836 MINUS_EXPR,
1837 arg1, arg0)));
1840 /* A subroutine of fold_convert_const handling conversions of an
1841 INTEGER_CST to another integer type. */
1843 static tree
1844 fold_convert_const_int_from_int (tree type, const_tree arg1)
1846 /* Given an integer constant, make new constant with new type,
1847 appropriately sign-extended or truncated. Use widest_int
1848 so that any extension is done according ARG1's type. */
1849 return force_fit_type (type, wi::to_widest (arg1),
1850 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1851 TREE_OVERFLOW (arg1));
1854 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1855 to an integer type. */
1857 static tree
1858 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1860 bool overflow = false;
1861 tree t;
1863 /* The following code implements the floating point to integer
1864 conversion rules required by the Java Language Specification,
1865 that IEEE NaNs are mapped to zero and values that overflow
1866 the target precision saturate, i.e. values greater than
1867 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1868 are mapped to INT_MIN. These semantics are allowed by the
1869 C and C++ standards that simply state that the behavior of
1870 FP-to-integer conversion is unspecified upon overflow. */
1872 wide_int val;
1873 REAL_VALUE_TYPE r;
1874 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1876 switch (code)
1878 case FIX_TRUNC_EXPR:
1879 real_trunc (&r, VOIDmode, &x);
1880 break;
1882 default:
1883 gcc_unreachable ();
1886 /* If R is NaN, return zero and show we have an overflow. */
1887 if (REAL_VALUE_ISNAN (r))
1889 overflow = true;
1890 val = wi::zero (TYPE_PRECISION (type));
1893 /* See if R is less than the lower bound or greater than the
1894 upper bound. */
1896 if (! overflow)
1898 tree lt = TYPE_MIN_VALUE (type);
1899 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1900 if (real_less (&r, &l))
1902 overflow = true;
1903 val = lt;
1907 if (! overflow)
1909 tree ut = TYPE_MAX_VALUE (type);
1910 if (ut)
1912 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1913 if (real_less (&u, &r))
1915 overflow = true;
1916 val = ut;
1921 if (! overflow)
1922 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1924 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1925 return t;
1928 /* A subroutine of fold_convert_const handling conversions of a
1929 FIXED_CST to an integer type. */
1931 static tree
1932 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1934 tree t;
1935 double_int temp, temp_trunc;
1936 unsigned int mode;
1938 /* Right shift FIXED_CST to temp by fbit. */
1939 temp = TREE_FIXED_CST (arg1).data;
1940 mode = TREE_FIXED_CST (arg1).mode;
1941 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1943 temp = temp.rshift (GET_MODE_FBIT (mode),
1944 HOST_BITS_PER_DOUBLE_INT,
1945 SIGNED_FIXED_POINT_MODE_P (mode));
1947 /* Left shift temp to temp_trunc by fbit. */
1948 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1949 HOST_BITS_PER_DOUBLE_INT,
1950 SIGNED_FIXED_POINT_MODE_P (mode));
1952 else
1954 temp = double_int_zero;
1955 temp_trunc = double_int_zero;
1958 /* If FIXED_CST is negative, we need to round the value toward 0.
1959 By checking if the fractional bits are not zero to add 1 to temp. */
1960 if (SIGNED_FIXED_POINT_MODE_P (mode)
1961 && temp_trunc.is_negative ()
1962 && TREE_FIXED_CST (arg1).data != temp_trunc)
1963 temp += double_int_one;
1965 /* Given a fixed-point constant, make new constant with new type,
1966 appropriately sign-extended or truncated. */
1967 t = force_fit_type (type, temp, -1,
1968 (temp.is_negative ()
1969 && (TYPE_UNSIGNED (type)
1970 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1971 | TREE_OVERFLOW (arg1));
1973 return t;
1976 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1977 to another floating point type. */
1979 static tree
1980 fold_convert_const_real_from_real (tree type, const_tree arg1)
1982 REAL_VALUE_TYPE value;
1983 tree t;
1985 /* Don't perform the operation if flag_signaling_nans is on
1986 and the operand is a signaling NaN. */
1987 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1988 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1989 return NULL_TREE;
1991 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1992 t = build_real (type, value);
1994 /* If converting an infinity or NAN to a representation that doesn't
1995 have one, set the overflow bit so that we can produce some kind of
1996 error message at the appropriate point if necessary. It's not the
1997 most user-friendly message, but it's better than nothing. */
1998 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1999 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2000 TREE_OVERFLOW (t) = 1;
2001 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2002 && !MODE_HAS_NANS (TYPE_MODE (type)))
2003 TREE_OVERFLOW (t) = 1;
2004 /* Regular overflow, conversion produced an infinity in a mode that
2005 can't represent them. */
2006 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2007 && REAL_VALUE_ISINF (value)
2008 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2009 TREE_OVERFLOW (t) = 1;
2010 else
2011 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2012 return t;
2015 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2016 to a floating point type. */
2018 static tree
2019 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2021 REAL_VALUE_TYPE value;
2022 tree t;
2024 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2025 t = build_real (type, value);
2027 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2028 return t;
2031 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2032 to another fixed-point type. */
2034 static tree
2035 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2037 FIXED_VALUE_TYPE value;
2038 tree t;
2039 bool overflow_p;
2041 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2042 TYPE_SATURATING (type));
2043 t = build_fixed (type, value);
2045 /* Propagate overflow flags. */
2046 if (overflow_p | TREE_OVERFLOW (arg1))
2047 TREE_OVERFLOW (t) = 1;
2048 return t;
2051 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2052 to a fixed-point type. */
2054 static tree
2055 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2057 FIXED_VALUE_TYPE value;
2058 tree t;
2059 bool overflow_p;
2060 double_int di;
2062 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2064 di.low = TREE_INT_CST_ELT (arg1, 0);
2065 if (TREE_INT_CST_NUNITS (arg1) == 1)
2066 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2067 else
2068 di.high = TREE_INT_CST_ELT (arg1, 1);
2070 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2071 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2072 TYPE_SATURATING (type));
2073 t = build_fixed (type, value);
2075 /* Propagate overflow flags. */
2076 if (overflow_p | TREE_OVERFLOW (arg1))
2077 TREE_OVERFLOW (t) = 1;
2078 return t;
2081 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2082 to a fixed-point type. */
2084 static tree
2085 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2087 FIXED_VALUE_TYPE value;
2088 tree t;
2089 bool overflow_p;
2091 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2092 &TREE_REAL_CST (arg1),
2093 TYPE_SATURATING (type));
2094 t = build_fixed (type, value);
2096 /* Propagate overflow flags. */
2097 if (overflow_p | TREE_OVERFLOW (arg1))
2098 TREE_OVERFLOW (t) = 1;
2099 return t;
2102 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2103 type TYPE. If no simplification can be done return NULL_TREE. */
2105 static tree
2106 fold_convert_const (enum tree_code code, tree type, tree arg1)
2108 if (TREE_TYPE (arg1) == type)
2109 return arg1;
2111 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2112 || TREE_CODE (type) == OFFSET_TYPE)
2114 if (TREE_CODE (arg1) == INTEGER_CST)
2115 return fold_convert_const_int_from_int (type, arg1);
2116 else if (TREE_CODE (arg1) == REAL_CST)
2117 return fold_convert_const_int_from_real (code, type, arg1);
2118 else if (TREE_CODE (arg1) == FIXED_CST)
2119 return fold_convert_const_int_from_fixed (type, arg1);
2121 else if (TREE_CODE (type) == REAL_TYPE)
2123 if (TREE_CODE (arg1) == INTEGER_CST)
2124 return build_real_from_int_cst (type, arg1);
2125 else if (TREE_CODE (arg1) == REAL_CST)
2126 return fold_convert_const_real_from_real (type, arg1);
2127 else if (TREE_CODE (arg1) == FIXED_CST)
2128 return fold_convert_const_real_from_fixed (type, arg1);
2130 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2132 if (TREE_CODE (arg1) == FIXED_CST)
2133 return fold_convert_const_fixed_from_fixed (type, arg1);
2134 else if (TREE_CODE (arg1) == INTEGER_CST)
2135 return fold_convert_const_fixed_from_int (type, arg1);
2136 else if (TREE_CODE (arg1) == REAL_CST)
2137 return fold_convert_const_fixed_from_real (type, arg1);
2139 else if (TREE_CODE (type) == VECTOR_TYPE)
2141 if (TREE_CODE (arg1) == VECTOR_CST
2142 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2144 int len = TYPE_VECTOR_SUBPARTS (type);
2145 tree elttype = TREE_TYPE (type);
2146 tree *v = XALLOCAVEC (tree, len);
2147 for (int i = 0; i < len; ++i)
2149 tree elt = VECTOR_CST_ELT (arg1, i);
2150 tree cvt = fold_convert_const (code, elttype, elt);
2151 if (cvt == NULL_TREE)
2152 return NULL_TREE;
2153 v[i] = cvt;
2155 return build_vector (type, v);
2158 return NULL_TREE;
2161 /* Construct a vector of zero elements of vector type TYPE. */
2163 static tree
2164 build_zero_vector (tree type)
2166 tree t;
2168 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2169 return build_vector_from_val (type, t);
2172 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2174 bool
2175 fold_convertible_p (const_tree type, const_tree arg)
2177 tree orig = TREE_TYPE (arg);
2179 if (type == orig)
2180 return true;
2182 if (TREE_CODE (arg) == ERROR_MARK
2183 || TREE_CODE (type) == ERROR_MARK
2184 || TREE_CODE (orig) == ERROR_MARK)
2185 return false;
2187 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2188 return true;
2190 switch (TREE_CODE (type))
2192 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2193 case POINTER_TYPE: case REFERENCE_TYPE:
2194 case OFFSET_TYPE:
2195 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2196 || TREE_CODE (orig) == OFFSET_TYPE);
2198 case REAL_TYPE:
2199 case FIXED_POINT_TYPE:
2200 case VECTOR_TYPE:
2201 case VOID_TYPE:
2202 return TREE_CODE (type) == TREE_CODE (orig);
2204 default:
2205 return false;
2209 /* Convert expression ARG to type TYPE. Used by the middle-end for
2210 simple conversions in preference to calling the front-end's convert. */
2212 tree
2213 fold_convert_loc (location_t loc, tree type, tree arg)
2215 tree orig = TREE_TYPE (arg);
2216 tree tem;
2218 if (type == orig)
2219 return arg;
2221 if (TREE_CODE (arg) == ERROR_MARK
2222 || TREE_CODE (type) == ERROR_MARK
2223 || TREE_CODE (orig) == ERROR_MARK)
2224 return error_mark_node;
2226 switch (TREE_CODE (type))
2228 case POINTER_TYPE:
2229 case REFERENCE_TYPE:
2230 /* Handle conversions between pointers to different address spaces. */
2231 if (POINTER_TYPE_P (orig)
2232 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2233 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2234 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2235 /* fall through */
2237 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2238 case OFFSET_TYPE:
2239 if (TREE_CODE (arg) == INTEGER_CST)
2241 tem = fold_convert_const (NOP_EXPR, type, arg);
2242 if (tem != NULL_TREE)
2243 return tem;
2245 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2246 || TREE_CODE (orig) == OFFSET_TYPE)
2247 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2248 if (TREE_CODE (orig) == COMPLEX_TYPE)
2249 return fold_convert_loc (loc, type,
2250 fold_build1_loc (loc, REALPART_EXPR,
2251 TREE_TYPE (orig), arg));
2252 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2253 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2254 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2256 case REAL_TYPE:
2257 if (TREE_CODE (arg) == INTEGER_CST)
2259 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2260 if (tem != NULL_TREE)
2261 return tem;
2263 else if (TREE_CODE (arg) == REAL_CST)
2265 tem = fold_convert_const (NOP_EXPR, type, arg);
2266 if (tem != NULL_TREE)
2267 return tem;
2269 else if (TREE_CODE (arg) == FIXED_CST)
2271 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2272 if (tem != NULL_TREE)
2273 return tem;
2276 switch (TREE_CODE (orig))
2278 case INTEGER_TYPE:
2279 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2280 case POINTER_TYPE: case REFERENCE_TYPE:
2281 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2283 case REAL_TYPE:
2284 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2286 case FIXED_POINT_TYPE:
2287 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2289 case COMPLEX_TYPE:
2290 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2291 return fold_convert_loc (loc, type, tem);
2293 default:
2294 gcc_unreachable ();
2297 case FIXED_POINT_TYPE:
2298 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2299 || TREE_CODE (arg) == REAL_CST)
2301 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2302 if (tem != NULL_TREE)
2303 goto fold_convert_exit;
2306 switch (TREE_CODE (orig))
2308 case FIXED_POINT_TYPE:
2309 case INTEGER_TYPE:
2310 case ENUMERAL_TYPE:
2311 case BOOLEAN_TYPE:
2312 case REAL_TYPE:
2313 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2315 case COMPLEX_TYPE:
2316 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2317 return fold_convert_loc (loc, type, tem);
2319 default:
2320 gcc_unreachable ();
2323 case COMPLEX_TYPE:
2324 switch (TREE_CODE (orig))
2326 case INTEGER_TYPE:
2327 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2328 case POINTER_TYPE: case REFERENCE_TYPE:
2329 case REAL_TYPE:
2330 case FIXED_POINT_TYPE:
2331 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2332 fold_convert_loc (loc, TREE_TYPE (type), arg),
2333 fold_convert_loc (loc, TREE_TYPE (type),
2334 integer_zero_node));
2335 case COMPLEX_TYPE:
2337 tree rpart, ipart;
2339 if (TREE_CODE (arg) == COMPLEX_EXPR)
2341 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2342 TREE_OPERAND (arg, 0));
2343 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2344 TREE_OPERAND (arg, 1));
2345 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2348 arg = save_expr (arg);
2349 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2350 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2351 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2352 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2353 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2356 default:
2357 gcc_unreachable ();
2360 case VECTOR_TYPE:
2361 if (integer_zerop (arg))
2362 return build_zero_vector (type);
2363 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2364 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2365 || TREE_CODE (orig) == VECTOR_TYPE);
2366 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2368 case VOID_TYPE:
2369 tem = fold_ignored_result (arg);
2370 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2372 default:
2373 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2374 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2375 gcc_unreachable ();
2377 fold_convert_exit:
2378 protected_set_expr_location_unshare (tem, loc);
2379 return tem;
2382 /* Return false if expr can be assumed not to be an lvalue, true
2383 otherwise. */
2385 static bool
2386 maybe_lvalue_p (const_tree x)
2388 /* We only need to wrap lvalue tree codes. */
2389 switch (TREE_CODE (x))
2391 case VAR_DECL:
2392 case PARM_DECL:
2393 case RESULT_DECL:
2394 case LABEL_DECL:
2395 case FUNCTION_DECL:
2396 case SSA_NAME:
2398 case COMPONENT_REF:
2399 case MEM_REF:
2400 case INDIRECT_REF:
2401 case ARRAY_REF:
2402 case ARRAY_RANGE_REF:
2403 case BIT_FIELD_REF:
2404 case OBJ_TYPE_REF:
2406 case REALPART_EXPR:
2407 case IMAGPART_EXPR:
2408 case PREINCREMENT_EXPR:
2409 case PREDECREMENT_EXPR:
2410 case SAVE_EXPR:
2411 case TRY_CATCH_EXPR:
2412 case WITH_CLEANUP_EXPR:
2413 case COMPOUND_EXPR:
2414 case MODIFY_EXPR:
2415 case TARGET_EXPR:
2416 case COND_EXPR:
2417 case BIND_EXPR:
2418 break;
2420 default:
2421 /* Assume the worst for front-end tree codes. */
2422 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2423 break;
2424 return false;
2427 return true;
2430 /* Return an expr equal to X but certainly not valid as an lvalue. */
2432 tree
2433 non_lvalue_loc (location_t loc, tree x)
2435 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2436 us. */
2437 if (in_gimple_form)
2438 return x;
2440 if (! maybe_lvalue_p (x))
2441 return x;
2442 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2445 /* When pedantic, return an expr equal to X but certainly not valid as a
2446 pedantic lvalue. Otherwise, return X. */
2448 static tree
2449 pedantic_non_lvalue_loc (location_t loc, tree x)
2451 return protected_set_expr_location_unshare (x, loc);
2454 /* Given a tree comparison code, return the code that is the logical inverse.
2455 It is generally not safe to do this for floating-point comparisons, except
2456 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2457 ERROR_MARK in this case. */
2459 enum tree_code
2460 invert_tree_comparison (enum tree_code code, bool honor_nans)
2462 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2463 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2464 return ERROR_MARK;
2466 switch (code)
2468 case EQ_EXPR:
2469 return NE_EXPR;
2470 case NE_EXPR:
2471 return EQ_EXPR;
2472 case GT_EXPR:
2473 return honor_nans ? UNLE_EXPR : LE_EXPR;
2474 case GE_EXPR:
2475 return honor_nans ? UNLT_EXPR : LT_EXPR;
2476 case LT_EXPR:
2477 return honor_nans ? UNGE_EXPR : GE_EXPR;
2478 case LE_EXPR:
2479 return honor_nans ? UNGT_EXPR : GT_EXPR;
2480 case LTGT_EXPR:
2481 return UNEQ_EXPR;
2482 case UNEQ_EXPR:
2483 return LTGT_EXPR;
2484 case UNGT_EXPR:
2485 return LE_EXPR;
2486 case UNGE_EXPR:
2487 return LT_EXPR;
2488 case UNLT_EXPR:
2489 return GE_EXPR;
2490 case UNLE_EXPR:
2491 return GT_EXPR;
2492 case ORDERED_EXPR:
2493 return UNORDERED_EXPR;
2494 case UNORDERED_EXPR:
2495 return ORDERED_EXPR;
2496 default:
2497 gcc_unreachable ();
2501 /* Similar, but return the comparison that results if the operands are
2502 swapped. This is safe for floating-point. */
2504 enum tree_code
2505 swap_tree_comparison (enum tree_code code)
2507 switch (code)
2509 case EQ_EXPR:
2510 case NE_EXPR:
2511 case ORDERED_EXPR:
2512 case UNORDERED_EXPR:
2513 case LTGT_EXPR:
2514 case UNEQ_EXPR:
2515 return code;
2516 case GT_EXPR:
2517 return LT_EXPR;
2518 case GE_EXPR:
2519 return LE_EXPR;
2520 case LT_EXPR:
2521 return GT_EXPR;
2522 case LE_EXPR:
2523 return GE_EXPR;
2524 case UNGT_EXPR:
2525 return UNLT_EXPR;
2526 case UNGE_EXPR:
2527 return UNLE_EXPR;
2528 case UNLT_EXPR:
2529 return UNGT_EXPR;
2530 case UNLE_EXPR:
2531 return UNGE_EXPR;
2532 default:
2533 gcc_unreachable ();
2538 /* Convert a comparison tree code from an enum tree_code representation
2539 into a compcode bit-based encoding. This function is the inverse of
2540 compcode_to_comparison. */
2542 static enum comparison_code
2543 comparison_to_compcode (enum tree_code code)
2545 switch (code)
2547 case LT_EXPR:
2548 return COMPCODE_LT;
2549 case EQ_EXPR:
2550 return COMPCODE_EQ;
2551 case LE_EXPR:
2552 return COMPCODE_LE;
2553 case GT_EXPR:
2554 return COMPCODE_GT;
2555 case NE_EXPR:
2556 return COMPCODE_NE;
2557 case GE_EXPR:
2558 return COMPCODE_GE;
2559 case ORDERED_EXPR:
2560 return COMPCODE_ORD;
2561 case UNORDERED_EXPR:
2562 return COMPCODE_UNORD;
2563 case UNLT_EXPR:
2564 return COMPCODE_UNLT;
2565 case UNEQ_EXPR:
2566 return COMPCODE_UNEQ;
2567 case UNLE_EXPR:
2568 return COMPCODE_UNLE;
2569 case UNGT_EXPR:
2570 return COMPCODE_UNGT;
2571 case LTGT_EXPR:
2572 return COMPCODE_LTGT;
2573 case UNGE_EXPR:
2574 return COMPCODE_UNGE;
2575 default:
2576 gcc_unreachable ();
2580 /* Convert a compcode bit-based encoding of a comparison operator back
2581 to GCC's enum tree_code representation. This function is the
2582 inverse of comparison_to_compcode. */
2584 static enum tree_code
2585 compcode_to_comparison (enum comparison_code code)
2587 switch (code)
2589 case COMPCODE_LT:
2590 return LT_EXPR;
2591 case COMPCODE_EQ:
2592 return EQ_EXPR;
2593 case COMPCODE_LE:
2594 return LE_EXPR;
2595 case COMPCODE_GT:
2596 return GT_EXPR;
2597 case COMPCODE_NE:
2598 return NE_EXPR;
2599 case COMPCODE_GE:
2600 return GE_EXPR;
2601 case COMPCODE_ORD:
2602 return ORDERED_EXPR;
2603 case COMPCODE_UNORD:
2604 return UNORDERED_EXPR;
2605 case COMPCODE_UNLT:
2606 return UNLT_EXPR;
2607 case COMPCODE_UNEQ:
2608 return UNEQ_EXPR;
2609 case COMPCODE_UNLE:
2610 return UNLE_EXPR;
2611 case COMPCODE_UNGT:
2612 return UNGT_EXPR;
2613 case COMPCODE_LTGT:
2614 return LTGT_EXPR;
2615 case COMPCODE_UNGE:
2616 return UNGE_EXPR;
2617 default:
2618 gcc_unreachable ();
2622 /* Return a tree for the comparison which is the combination of
2623 doing the AND or OR (depending on CODE) of the two operations LCODE
2624 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2625 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2626 if this makes the transformation invalid. */
2628 tree
2629 combine_comparisons (location_t loc,
2630 enum tree_code code, enum tree_code lcode,
2631 enum tree_code rcode, tree truth_type,
2632 tree ll_arg, tree lr_arg)
2634 bool honor_nans = HONOR_NANS (ll_arg);
2635 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2636 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2637 int compcode;
2639 switch (code)
2641 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2642 compcode = lcompcode & rcompcode;
2643 break;
2645 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2646 compcode = lcompcode | rcompcode;
2647 break;
2649 default:
2650 return NULL_TREE;
2653 if (!honor_nans)
2655 /* Eliminate unordered comparisons, as well as LTGT and ORD
2656 which are not used unless the mode has NaNs. */
2657 compcode &= ~COMPCODE_UNORD;
2658 if (compcode == COMPCODE_LTGT)
2659 compcode = COMPCODE_NE;
2660 else if (compcode == COMPCODE_ORD)
2661 compcode = COMPCODE_TRUE;
2663 else if (flag_trapping_math)
2665 /* Check that the original operation and the optimized ones will trap
2666 under the same condition. */
2667 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2668 && (lcompcode != COMPCODE_EQ)
2669 && (lcompcode != COMPCODE_ORD);
2670 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2671 && (rcompcode != COMPCODE_EQ)
2672 && (rcompcode != COMPCODE_ORD);
2673 bool trap = (compcode & COMPCODE_UNORD) == 0
2674 && (compcode != COMPCODE_EQ)
2675 && (compcode != COMPCODE_ORD);
2677 /* In a short-circuited boolean expression the LHS might be
2678 such that the RHS, if evaluated, will never trap. For
2679 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2680 if neither x nor y is NaN. (This is a mixed blessing: for
2681 example, the expression above will never trap, hence
2682 optimizing it to x < y would be invalid). */
2683 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2684 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2685 rtrap = false;
2687 /* If the comparison was short-circuited, and only the RHS
2688 trapped, we may now generate a spurious trap. */
2689 if (rtrap && !ltrap
2690 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2691 return NULL_TREE;
2693 /* If we changed the conditions that cause a trap, we lose. */
2694 if ((ltrap || rtrap) != trap)
2695 return NULL_TREE;
2698 if (compcode == COMPCODE_TRUE)
2699 return constant_boolean_node (true, truth_type);
2700 else if (compcode == COMPCODE_FALSE)
2701 return constant_boolean_node (false, truth_type);
2702 else
2704 enum tree_code tcode;
2706 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2707 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2711 /* Return nonzero if two operands (typically of the same tree node)
2712 are necessarily equal. FLAGS modifies behavior as follows:
2714 If OEP_ONLY_CONST is set, only return nonzero for constants.
2715 This function tests whether the operands are indistinguishable;
2716 it does not test whether they are equal using C's == operation.
2717 The distinction is important for IEEE floating point, because
2718 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2719 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2721 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2722 even though it may hold multiple values during a function.
2723 This is because a GCC tree node guarantees that nothing else is
2724 executed between the evaluation of its "operands" (which may often
2725 be evaluated in arbitrary order). Hence if the operands themselves
2726 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2727 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2728 unset means assuming isochronic (or instantaneous) tree equivalence.
2729 Unless comparing arbitrary expression trees, such as from different
2730 statements, this flag can usually be left unset.
2732 If OEP_PURE_SAME is set, then pure functions with identical arguments
2733 are considered the same. It is used when the caller has other ways
2734 to ensure that global memory is unchanged in between.
2736 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2737 not values of expressions.
2739 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2740 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2742 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2743 any operand with side effect. This is unnecesarily conservative in the
2744 case we know that arg0 and arg1 are in disjoint code paths (such as in
2745 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2746 addresses with TREE_CONSTANT flag set so we know that &var == &var
2747 even if var is volatile. */
2750 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2752 /* When checking, verify at the outermost operand_equal_p call that
2753 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2754 hash value. */
2755 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2757 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2759 if (arg0 != arg1)
2761 inchash::hash hstate0 (0), hstate1 (0);
2762 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2763 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2764 hashval_t h0 = hstate0.end ();
2765 hashval_t h1 = hstate1.end ();
2766 gcc_assert (h0 == h1);
2768 return 1;
2770 else
2771 return 0;
2774 /* If either is ERROR_MARK, they aren't equal. */
2775 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2776 || TREE_TYPE (arg0) == error_mark_node
2777 || TREE_TYPE (arg1) == error_mark_node)
2778 return 0;
2780 /* Similar, if either does not have a type (like a released SSA name),
2781 they aren't equal. */
2782 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2783 return 0;
2785 /* We cannot consider pointers to different address space equal. */
2786 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2787 && POINTER_TYPE_P (TREE_TYPE (arg1))
2788 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2789 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2790 return 0;
2792 /* Check equality of integer constants before bailing out due to
2793 precision differences. */
2794 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2796 /* Address of INTEGER_CST is not defined; check that we did not forget
2797 to drop the OEP_ADDRESS_OF flags. */
2798 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2799 return tree_int_cst_equal (arg0, arg1);
2802 if (!(flags & OEP_ADDRESS_OF))
2804 /* If both types don't have the same signedness, then we can't consider
2805 them equal. We must check this before the STRIP_NOPS calls
2806 because they may change the signedness of the arguments. As pointers
2807 strictly don't have a signedness, require either two pointers or
2808 two non-pointers as well. */
2809 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2810 || POINTER_TYPE_P (TREE_TYPE (arg0))
2811 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2812 return 0;
2814 /* If both types don't have the same precision, then it is not safe
2815 to strip NOPs. */
2816 if (element_precision (TREE_TYPE (arg0))
2817 != element_precision (TREE_TYPE (arg1)))
2818 return 0;
2820 STRIP_NOPS (arg0);
2821 STRIP_NOPS (arg1);
2823 #if 0
2824 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2825 sanity check once the issue is solved. */
2826 else
2827 /* Addresses of conversions and SSA_NAMEs (and many other things)
2828 are not defined. Check that we did not forget to drop the
2829 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2830 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2831 && TREE_CODE (arg0) != SSA_NAME);
2832 #endif
2834 /* In case both args are comparisons but with different comparison
2835 code, try to swap the comparison operands of one arg to produce
2836 a match and compare that variant. */
2837 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2838 && COMPARISON_CLASS_P (arg0)
2839 && COMPARISON_CLASS_P (arg1))
2841 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2843 if (TREE_CODE (arg0) == swap_code)
2844 return operand_equal_p (TREE_OPERAND (arg0, 0),
2845 TREE_OPERAND (arg1, 1), flags)
2846 && operand_equal_p (TREE_OPERAND (arg0, 1),
2847 TREE_OPERAND (arg1, 0), flags);
2850 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2852 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2853 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2855 else if (flags & OEP_ADDRESS_OF)
2857 /* If we are interested in comparing addresses ignore
2858 MEM_REF wrappings of the base that can appear just for
2859 TBAA reasons. */
2860 if (TREE_CODE (arg0) == MEM_REF
2861 && DECL_P (arg1)
2862 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2863 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2864 && integer_zerop (TREE_OPERAND (arg0, 1)))
2865 return 1;
2866 else if (TREE_CODE (arg1) == MEM_REF
2867 && DECL_P (arg0)
2868 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2869 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2870 && integer_zerop (TREE_OPERAND (arg1, 1)))
2871 return 1;
2872 return 0;
2874 else
2875 return 0;
2878 /* When not checking adddresses, this is needed for conversions and for
2879 COMPONENT_REF. Might as well play it safe and always test this. */
2880 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2881 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2882 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2883 && !(flags & OEP_ADDRESS_OF)))
2884 return 0;
2886 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2887 We don't care about side effects in that case because the SAVE_EXPR
2888 takes care of that for us. In all other cases, two expressions are
2889 equal if they have no side effects. If we have two identical
2890 expressions with side effects that should be treated the same due
2891 to the only side effects being identical SAVE_EXPR's, that will
2892 be detected in the recursive calls below.
2893 If we are taking an invariant address of two identical objects
2894 they are necessarily equal as well. */
2895 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2896 && (TREE_CODE (arg0) == SAVE_EXPR
2897 || (flags & OEP_MATCH_SIDE_EFFECTS)
2898 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2899 return 1;
2901 /* Next handle constant cases, those for which we can return 1 even
2902 if ONLY_CONST is set. */
2903 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2904 switch (TREE_CODE (arg0))
2906 case INTEGER_CST:
2907 return tree_int_cst_equal (arg0, arg1);
2909 case FIXED_CST:
2910 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2911 TREE_FIXED_CST (arg1));
2913 case REAL_CST:
2914 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2915 return 1;
2918 if (!HONOR_SIGNED_ZEROS (arg0))
2920 /* If we do not distinguish between signed and unsigned zero,
2921 consider them equal. */
2922 if (real_zerop (arg0) && real_zerop (arg1))
2923 return 1;
2925 return 0;
2927 case VECTOR_CST:
2929 unsigned i;
2931 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2932 return 0;
2934 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2936 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2937 VECTOR_CST_ELT (arg1, i), flags))
2938 return 0;
2940 return 1;
2943 case COMPLEX_CST:
2944 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2945 flags)
2946 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2947 flags));
2949 case STRING_CST:
2950 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2951 && ! memcmp (TREE_STRING_POINTER (arg0),
2952 TREE_STRING_POINTER (arg1),
2953 TREE_STRING_LENGTH (arg0)));
2955 case ADDR_EXPR:
2956 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2957 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2958 flags | OEP_ADDRESS_OF
2959 | OEP_MATCH_SIDE_EFFECTS);
2960 case CONSTRUCTOR:
2961 /* In GIMPLE empty constructors are allowed in initializers of
2962 aggregates. */
2963 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2964 default:
2965 break;
2968 if (flags & OEP_ONLY_CONST)
2969 return 0;
2971 /* Define macros to test an operand from arg0 and arg1 for equality and a
2972 variant that allows null and views null as being different from any
2973 non-null value. In the latter case, if either is null, the both
2974 must be; otherwise, do the normal comparison. */
2975 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2976 TREE_OPERAND (arg1, N), flags)
2978 #define OP_SAME_WITH_NULL(N) \
2979 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2980 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2982 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2984 case tcc_unary:
2985 /* Two conversions are equal only if signedness and modes match. */
2986 switch (TREE_CODE (arg0))
2988 CASE_CONVERT:
2989 case FIX_TRUNC_EXPR:
2990 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2991 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2992 return 0;
2993 break;
2994 default:
2995 break;
2998 return OP_SAME (0);
3001 case tcc_comparison:
3002 case tcc_binary:
3003 if (OP_SAME (0) && OP_SAME (1))
3004 return 1;
3006 /* For commutative ops, allow the other order. */
3007 return (commutative_tree_code (TREE_CODE (arg0))
3008 && operand_equal_p (TREE_OPERAND (arg0, 0),
3009 TREE_OPERAND (arg1, 1), flags)
3010 && operand_equal_p (TREE_OPERAND (arg0, 1),
3011 TREE_OPERAND (arg1, 0), flags));
3013 case tcc_reference:
3014 /* If either of the pointer (or reference) expressions we are
3015 dereferencing contain a side effect, these cannot be equal,
3016 but their addresses can be. */
3017 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3018 && (TREE_SIDE_EFFECTS (arg0)
3019 || TREE_SIDE_EFFECTS (arg1)))
3020 return 0;
3022 switch (TREE_CODE (arg0))
3024 case INDIRECT_REF:
3025 if (!(flags & OEP_ADDRESS_OF)
3026 && (TYPE_ALIGN (TREE_TYPE (arg0))
3027 != TYPE_ALIGN (TREE_TYPE (arg1))))
3028 return 0;
3029 flags &= ~OEP_ADDRESS_OF;
3030 return OP_SAME (0);
3032 case IMAGPART_EXPR:
3033 /* Require the same offset. */
3034 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3035 TYPE_SIZE (TREE_TYPE (arg1)),
3036 flags & ~OEP_ADDRESS_OF))
3037 return 0;
3039 /* Fallthru. */
3040 case REALPART_EXPR:
3041 case VIEW_CONVERT_EXPR:
3042 return OP_SAME (0);
3044 case TARGET_MEM_REF:
3045 case MEM_REF:
3046 if (!(flags & OEP_ADDRESS_OF))
3048 /* Require equal access sizes */
3049 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3050 && (!TYPE_SIZE (TREE_TYPE (arg0))
3051 || !TYPE_SIZE (TREE_TYPE (arg1))
3052 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3053 TYPE_SIZE (TREE_TYPE (arg1)),
3054 flags)))
3055 return 0;
3056 /* Verify that access happens in similar types. */
3057 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3058 return 0;
3059 /* Verify that accesses are TBAA compatible. */
3060 if (!alias_ptr_types_compatible_p
3061 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3062 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3063 || (MR_DEPENDENCE_CLIQUE (arg0)
3064 != MR_DEPENDENCE_CLIQUE (arg1))
3065 || (MR_DEPENDENCE_BASE (arg0)
3066 != MR_DEPENDENCE_BASE (arg1)))
3067 return 0;
3068 /* Verify that alignment is compatible. */
3069 if (TYPE_ALIGN (TREE_TYPE (arg0))
3070 != TYPE_ALIGN (TREE_TYPE (arg1)))
3071 return 0;
3073 flags &= ~OEP_ADDRESS_OF;
3074 return (OP_SAME (0) && OP_SAME (1)
3075 /* TARGET_MEM_REF require equal extra operands. */
3076 && (TREE_CODE (arg0) != TARGET_MEM_REF
3077 || (OP_SAME_WITH_NULL (2)
3078 && OP_SAME_WITH_NULL (3)
3079 && OP_SAME_WITH_NULL (4))));
3081 case ARRAY_REF:
3082 case ARRAY_RANGE_REF:
3083 if (!OP_SAME (0))
3084 return 0;
3085 flags &= ~OEP_ADDRESS_OF;
3086 /* Compare the array index by value if it is constant first as we
3087 may have different types but same value here. */
3088 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3089 TREE_OPERAND (arg1, 1))
3090 || OP_SAME (1))
3091 && OP_SAME_WITH_NULL (2)
3092 && OP_SAME_WITH_NULL (3)
3093 /* Compare low bound and element size as with OEP_ADDRESS_OF
3094 we have to account for the offset of the ref. */
3095 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3096 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3097 || (operand_equal_p (array_ref_low_bound
3098 (CONST_CAST_TREE (arg0)),
3099 array_ref_low_bound
3100 (CONST_CAST_TREE (arg1)), flags)
3101 && operand_equal_p (array_ref_element_size
3102 (CONST_CAST_TREE (arg0)),
3103 array_ref_element_size
3104 (CONST_CAST_TREE (arg1)),
3105 flags))));
3107 case COMPONENT_REF:
3108 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3109 may be NULL when we're called to compare MEM_EXPRs. */
3110 if (!OP_SAME_WITH_NULL (0)
3111 || !OP_SAME (1))
3112 return 0;
3113 flags &= ~OEP_ADDRESS_OF;
3114 return OP_SAME_WITH_NULL (2);
3116 case BIT_FIELD_REF:
3117 if (!OP_SAME (0))
3118 return 0;
3119 flags &= ~OEP_ADDRESS_OF;
3120 return OP_SAME (1) && OP_SAME (2);
3122 default:
3123 return 0;
3126 case tcc_expression:
3127 switch (TREE_CODE (arg0))
3129 case ADDR_EXPR:
3130 /* Be sure we pass right ADDRESS_OF flag. */
3131 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3132 return operand_equal_p (TREE_OPERAND (arg0, 0),
3133 TREE_OPERAND (arg1, 0),
3134 flags | OEP_ADDRESS_OF);
3136 case TRUTH_NOT_EXPR:
3137 return OP_SAME (0);
3139 case TRUTH_ANDIF_EXPR:
3140 case TRUTH_ORIF_EXPR:
3141 return OP_SAME (0) && OP_SAME (1);
3143 case FMA_EXPR:
3144 case WIDEN_MULT_PLUS_EXPR:
3145 case WIDEN_MULT_MINUS_EXPR:
3146 if (!OP_SAME (2))
3147 return 0;
3148 /* The multiplcation operands are commutative. */
3149 /* FALLTHRU */
3151 case TRUTH_AND_EXPR:
3152 case TRUTH_OR_EXPR:
3153 case TRUTH_XOR_EXPR:
3154 if (OP_SAME (0) && OP_SAME (1))
3155 return 1;
3157 /* Otherwise take into account this is a commutative operation. */
3158 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3159 TREE_OPERAND (arg1, 1), flags)
3160 && operand_equal_p (TREE_OPERAND (arg0, 1),
3161 TREE_OPERAND (arg1, 0), flags));
3163 case COND_EXPR:
3164 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3165 return 0;
3166 flags &= ~OEP_ADDRESS_OF;
3167 return OP_SAME (0);
3169 case VEC_COND_EXPR:
3170 case DOT_PROD_EXPR:
3171 case BIT_INSERT_EXPR:
3172 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3174 case MODIFY_EXPR:
3175 case INIT_EXPR:
3176 case COMPOUND_EXPR:
3177 case PREDECREMENT_EXPR:
3178 case PREINCREMENT_EXPR:
3179 case POSTDECREMENT_EXPR:
3180 case POSTINCREMENT_EXPR:
3181 if (flags & OEP_LEXICOGRAPHIC)
3182 return OP_SAME (0) && OP_SAME (1);
3183 return 0;
3185 case CLEANUP_POINT_EXPR:
3186 case EXPR_STMT:
3187 if (flags & OEP_LEXICOGRAPHIC)
3188 return OP_SAME (0);
3189 return 0;
3191 default:
3192 return 0;
3195 case tcc_vl_exp:
3196 switch (TREE_CODE (arg0))
3198 case CALL_EXPR:
3199 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3200 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3201 /* If not both CALL_EXPRs are either internal or normal function
3202 functions, then they are not equal. */
3203 return 0;
3204 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3206 /* If the CALL_EXPRs call different internal functions, then they
3207 are not equal. */
3208 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3209 return 0;
3211 else
3213 /* If the CALL_EXPRs call different functions, then they are not
3214 equal. */
3215 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3216 flags))
3217 return 0;
3220 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3222 unsigned int cef = call_expr_flags (arg0);
3223 if (flags & OEP_PURE_SAME)
3224 cef &= ECF_CONST | ECF_PURE;
3225 else
3226 cef &= ECF_CONST;
3227 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3228 return 0;
3231 /* Now see if all the arguments are the same. */
3233 const_call_expr_arg_iterator iter0, iter1;
3234 const_tree a0, a1;
3235 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3236 a1 = first_const_call_expr_arg (arg1, &iter1);
3237 a0 && a1;
3238 a0 = next_const_call_expr_arg (&iter0),
3239 a1 = next_const_call_expr_arg (&iter1))
3240 if (! operand_equal_p (a0, a1, flags))
3241 return 0;
3243 /* If we get here and both argument lists are exhausted
3244 then the CALL_EXPRs are equal. */
3245 return ! (a0 || a1);
3247 default:
3248 return 0;
3251 case tcc_declaration:
3252 /* Consider __builtin_sqrt equal to sqrt. */
3253 return (TREE_CODE (arg0) == FUNCTION_DECL
3254 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3255 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3256 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3258 case tcc_exceptional:
3259 if (TREE_CODE (arg0) == CONSTRUCTOR)
3261 /* In GIMPLE constructors are used only to build vectors from
3262 elements. Individual elements in the constructor must be
3263 indexed in increasing order and form an initial sequence.
3265 We make no effort to compare constructors in generic.
3266 (see sem_variable::equals in ipa-icf which can do so for
3267 constants). */
3268 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3269 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3270 return 0;
3272 /* Be sure that vectors constructed have the same representation.
3273 We only tested element precision and modes to match.
3274 Vectors may be BLKmode and thus also check that the number of
3275 parts match. */
3276 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3277 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3278 return 0;
3280 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3281 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3282 unsigned int len = vec_safe_length (v0);
3284 if (len != vec_safe_length (v1))
3285 return 0;
3287 for (unsigned int i = 0; i < len; i++)
3289 constructor_elt *c0 = &(*v0)[i];
3290 constructor_elt *c1 = &(*v1)[i];
3292 if (!operand_equal_p (c0->value, c1->value, flags)
3293 /* In GIMPLE the indexes can be either NULL or matching i.
3294 Double check this so we won't get false
3295 positives for GENERIC. */
3296 || (c0->index
3297 && (TREE_CODE (c0->index) != INTEGER_CST
3298 || !compare_tree_int (c0->index, i)))
3299 || (c1->index
3300 && (TREE_CODE (c1->index) != INTEGER_CST
3301 || !compare_tree_int (c1->index, i))))
3302 return 0;
3304 return 1;
3306 else if (TREE_CODE (arg0) == STATEMENT_LIST
3307 && (flags & OEP_LEXICOGRAPHIC))
3309 /* Compare the STATEMENT_LISTs. */
3310 tree_stmt_iterator tsi1, tsi2;
3311 tree body1 = CONST_CAST_TREE (arg0);
3312 tree body2 = CONST_CAST_TREE (arg1);
3313 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3314 tsi_next (&tsi1), tsi_next (&tsi2))
3316 /* The lists don't have the same number of statements. */
3317 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3318 return 0;
3319 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3320 return 1;
3321 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3322 OEP_LEXICOGRAPHIC))
3323 return 0;
3326 return 0;
3328 case tcc_statement:
3329 switch (TREE_CODE (arg0))
3331 case RETURN_EXPR:
3332 if (flags & OEP_LEXICOGRAPHIC)
3333 return OP_SAME_WITH_NULL (0);
3334 return 0;
3335 default:
3336 return 0;
3339 default:
3340 return 0;
3343 #undef OP_SAME
3344 #undef OP_SAME_WITH_NULL
3347 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3348 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3350 When in doubt, return 0. */
3352 static int
3353 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3355 int unsignedp1, unsignedpo;
3356 tree primarg0, primarg1, primother;
3357 unsigned int correct_width;
3359 if (operand_equal_p (arg0, arg1, 0))
3360 return 1;
3362 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3363 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3364 return 0;
3366 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3367 and see if the inner values are the same. This removes any
3368 signedness comparison, which doesn't matter here. */
3369 primarg0 = arg0, primarg1 = arg1;
3370 STRIP_NOPS (primarg0);
3371 STRIP_NOPS (primarg1);
3372 if (operand_equal_p (primarg0, primarg1, 0))
3373 return 1;
3375 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3376 actual comparison operand, ARG0.
3378 First throw away any conversions to wider types
3379 already present in the operands. */
3381 primarg1 = get_narrower (arg1, &unsignedp1);
3382 primother = get_narrower (other, &unsignedpo);
3384 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3385 if (unsignedp1 == unsignedpo
3386 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3387 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3389 tree type = TREE_TYPE (arg0);
3391 /* Make sure shorter operand is extended the right way
3392 to match the longer operand. */
3393 primarg1 = fold_convert (signed_or_unsigned_type_for
3394 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3396 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3397 return 1;
3400 return 0;
3403 /* See if ARG is an expression that is either a comparison or is performing
3404 arithmetic on comparisons. The comparisons must only be comparing
3405 two different values, which will be stored in *CVAL1 and *CVAL2; if
3406 they are nonzero it means that some operands have already been found.
3407 No variables may be used anywhere else in the expression except in the
3408 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3409 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3411 If this is true, return 1. Otherwise, return zero. */
3413 static int
3414 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3416 enum tree_code code = TREE_CODE (arg);
3417 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3419 /* We can handle some of the tcc_expression cases here. */
3420 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3421 tclass = tcc_unary;
3422 else if (tclass == tcc_expression
3423 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3424 || code == COMPOUND_EXPR))
3425 tclass = tcc_binary;
3427 else if (tclass == tcc_expression && code == SAVE_EXPR
3428 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3430 /* If we've already found a CVAL1 or CVAL2, this expression is
3431 two complex to handle. */
3432 if (*cval1 || *cval2)
3433 return 0;
3435 tclass = tcc_unary;
3436 *save_p = 1;
3439 switch (tclass)
3441 case tcc_unary:
3442 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3444 case tcc_binary:
3445 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3446 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3447 cval1, cval2, save_p));
3449 case tcc_constant:
3450 return 1;
3452 case tcc_expression:
3453 if (code == COND_EXPR)
3454 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3455 cval1, cval2, save_p)
3456 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3457 cval1, cval2, save_p)
3458 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3459 cval1, cval2, save_p));
3460 return 0;
3462 case tcc_comparison:
3463 /* First see if we can handle the first operand, then the second. For
3464 the second operand, we know *CVAL1 can't be zero. It must be that
3465 one side of the comparison is each of the values; test for the
3466 case where this isn't true by failing if the two operands
3467 are the same. */
3469 if (operand_equal_p (TREE_OPERAND (arg, 0),
3470 TREE_OPERAND (arg, 1), 0))
3471 return 0;
3473 if (*cval1 == 0)
3474 *cval1 = TREE_OPERAND (arg, 0);
3475 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3477 else if (*cval2 == 0)
3478 *cval2 = TREE_OPERAND (arg, 0);
3479 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3481 else
3482 return 0;
3484 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3486 else if (*cval2 == 0)
3487 *cval2 = TREE_OPERAND (arg, 1);
3488 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3490 else
3491 return 0;
3493 return 1;
3495 default:
3496 return 0;
3500 /* ARG is a tree that is known to contain just arithmetic operations and
3501 comparisons. Evaluate the operations in the tree substituting NEW0 for
3502 any occurrence of OLD0 as an operand of a comparison and likewise for
3503 NEW1 and OLD1. */
3505 static tree
3506 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3507 tree old1, tree new1)
3509 tree type = TREE_TYPE (arg);
3510 enum tree_code code = TREE_CODE (arg);
3511 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3513 /* We can handle some of the tcc_expression cases here. */
3514 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3515 tclass = tcc_unary;
3516 else if (tclass == tcc_expression
3517 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3518 tclass = tcc_binary;
3520 switch (tclass)
3522 case tcc_unary:
3523 return fold_build1_loc (loc, code, type,
3524 eval_subst (loc, TREE_OPERAND (arg, 0),
3525 old0, new0, old1, new1));
3527 case tcc_binary:
3528 return fold_build2_loc (loc, code, type,
3529 eval_subst (loc, TREE_OPERAND (arg, 0),
3530 old0, new0, old1, new1),
3531 eval_subst (loc, TREE_OPERAND (arg, 1),
3532 old0, new0, old1, new1));
3534 case tcc_expression:
3535 switch (code)
3537 case SAVE_EXPR:
3538 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3539 old1, new1);
3541 case COMPOUND_EXPR:
3542 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3543 old1, new1);
3545 case COND_EXPR:
3546 return fold_build3_loc (loc, code, type,
3547 eval_subst (loc, TREE_OPERAND (arg, 0),
3548 old0, new0, old1, new1),
3549 eval_subst (loc, TREE_OPERAND (arg, 1),
3550 old0, new0, old1, new1),
3551 eval_subst (loc, TREE_OPERAND (arg, 2),
3552 old0, new0, old1, new1));
3553 default:
3554 break;
3556 /* Fall through - ??? */
3558 case tcc_comparison:
3560 tree arg0 = TREE_OPERAND (arg, 0);
3561 tree arg1 = TREE_OPERAND (arg, 1);
3563 /* We need to check both for exact equality and tree equality. The
3564 former will be true if the operand has a side-effect. In that
3565 case, we know the operand occurred exactly once. */
3567 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3568 arg0 = new0;
3569 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3570 arg0 = new1;
3572 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3573 arg1 = new0;
3574 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3575 arg1 = new1;
3577 return fold_build2_loc (loc, code, type, arg0, arg1);
3580 default:
3581 return arg;
3585 /* Return a tree for the case when the result of an expression is RESULT
3586 converted to TYPE and OMITTED was previously an operand of the expression
3587 but is now not needed (e.g., we folded OMITTED * 0).
3589 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3590 the conversion of RESULT to TYPE. */
3592 tree
3593 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3595 tree t = fold_convert_loc (loc, type, result);
3597 /* If the resulting operand is an empty statement, just return the omitted
3598 statement casted to void. */
3599 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3600 return build1_loc (loc, NOP_EXPR, void_type_node,
3601 fold_ignored_result (omitted));
3603 if (TREE_SIDE_EFFECTS (omitted))
3604 return build2_loc (loc, COMPOUND_EXPR, type,
3605 fold_ignored_result (omitted), t);
3607 return non_lvalue_loc (loc, t);
3610 /* Return a tree for the case when the result of an expression is RESULT
3611 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3612 of the expression but are now not needed.
3614 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3615 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3616 evaluated before OMITTED2. Otherwise, if neither has side effects,
3617 just do the conversion of RESULT to TYPE. */
3619 tree
3620 omit_two_operands_loc (location_t loc, tree type, tree result,
3621 tree omitted1, tree omitted2)
3623 tree t = fold_convert_loc (loc, type, result);
3625 if (TREE_SIDE_EFFECTS (omitted2))
3626 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3627 if (TREE_SIDE_EFFECTS (omitted1))
3628 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3630 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3634 /* Return a simplified tree node for the truth-negation of ARG. This
3635 never alters ARG itself. We assume that ARG is an operation that
3636 returns a truth value (0 or 1).
3638 FIXME: one would think we would fold the result, but it causes
3639 problems with the dominator optimizer. */
3641 static tree
3642 fold_truth_not_expr (location_t loc, tree arg)
3644 tree type = TREE_TYPE (arg);
3645 enum tree_code code = TREE_CODE (arg);
3646 location_t loc1, loc2;
3648 /* If this is a comparison, we can simply invert it, except for
3649 floating-point non-equality comparisons, in which case we just
3650 enclose a TRUTH_NOT_EXPR around what we have. */
3652 if (TREE_CODE_CLASS (code) == tcc_comparison)
3654 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3655 if (FLOAT_TYPE_P (op_type)
3656 && flag_trapping_math
3657 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3658 && code != NE_EXPR && code != EQ_EXPR)
3659 return NULL_TREE;
3661 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3662 if (code == ERROR_MARK)
3663 return NULL_TREE;
3665 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3666 TREE_OPERAND (arg, 1));
3667 if (TREE_NO_WARNING (arg))
3668 TREE_NO_WARNING (ret) = 1;
3669 return ret;
3672 switch (code)
3674 case INTEGER_CST:
3675 return constant_boolean_node (integer_zerop (arg), type);
3677 case TRUTH_AND_EXPR:
3678 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3679 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3680 return build2_loc (loc, TRUTH_OR_EXPR, type,
3681 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3682 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3684 case TRUTH_OR_EXPR:
3685 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3686 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3687 return build2_loc (loc, TRUTH_AND_EXPR, type,
3688 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3689 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3691 case TRUTH_XOR_EXPR:
3692 /* Here we can invert either operand. We invert the first operand
3693 unless the second operand is a TRUTH_NOT_EXPR in which case our
3694 result is the XOR of the first operand with the inside of the
3695 negation of the second operand. */
3697 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3698 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3699 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3700 else
3701 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3702 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3703 TREE_OPERAND (arg, 1));
3705 case TRUTH_ANDIF_EXPR:
3706 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3707 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3708 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3709 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3710 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3712 case TRUTH_ORIF_EXPR:
3713 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3714 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3715 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3716 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3717 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3719 case TRUTH_NOT_EXPR:
3720 return TREE_OPERAND (arg, 0);
3722 case COND_EXPR:
3724 tree arg1 = TREE_OPERAND (arg, 1);
3725 tree arg2 = TREE_OPERAND (arg, 2);
3727 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3728 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3730 /* A COND_EXPR may have a throw as one operand, which
3731 then has void type. Just leave void operands
3732 as they are. */
3733 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3734 VOID_TYPE_P (TREE_TYPE (arg1))
3735 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3736 VOID_TYPE_P (TREE_TYPE (arg2))
3737 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3740 case COMPOUND_EXPR:
3741 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3742 return build2_loc (loc, COMPOUND_EXPR, type,
3743 TREE_OPERAND (arg, 0),
3744 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3746 case NON_LVALUE_EXPR:
3747 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3748 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3750 CASE_CONVERT:
3751 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3752 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3754 /* fall through */
3756 case FLOAT_EXPR:
3757 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3758 return build1_loc (loc, TREE_CODE (arg), type,
3759 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3761 case BIT_AND_EXPR:
3762 if (!integer_onep (TREE_OPERAND (arg, 1)))
3763 return NULL_TREE;
3764 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3766 case SAVE_EXPR:
3767 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3769 case CLEANUP_POINT_EXPR:
3770 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3771 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3772 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3774 default:
3775 return NULL_TREE;
3779 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3780 assume that ARG is an operation that returns a truth value (0 or 1
3781 for scalars, 0 or -1 for vectors). Return the folded expression if
3782 folding is successful. Otherwise, return NULL_TREE. */
3784 static tree
3785 fold_invert_truthvalue (location_t loc, tree arg)
3787 tree type = TREE_TYPE (arg);
3788 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3789 ? BIT_NOT_EXPR
3790 : TRUTH_NOT_EXPR,
3791 type, arg);
3794 /* Return a simplified tree node for the truth-negation of ARG. This
3795 never alters ARG itself. We assume that ARG is an operation that
3796 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3798 tree
3799 invert_truthvalue_loc (location_t loc, tree arg)
3801 if (TREE_CODE (arg) == ERROR_MARK)
3802 return arg;
3804 tree type = TREE_TYPE (arg);
3805 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3806 ? BIT_NOT_EXPR
3807 : TRUTH_NOT_EXPR,
3808 type, arg);
3811 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3812 with code CODE. This optimization is unsafe. */
3813 static tree
3814 distribute_real_division (location_t loc, enum tree_code code, tree type,
3815 tree arg0, tree arg1)
3817 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3818 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3820 /* (A / C) +- (B / C) -> (A +- B) / C. */
3821 if (mul0 == mul1
3822 && operand_equal_p (TREE_OPERAND (arg0, 1),
3823 TREE_OPERAND (arg1, 1), 0))
3824 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3825 fold_build2_loc (loc, code, type,
3826 TREE_OPERAND (arg0, 0),
3827 TREE_OPERAND (arg1, 0)),
3828 TREE_OPERAND (arg0, 1));
3830 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3831 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3832 TREE_OPERAND (arg1, 0), 0)
3833 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3834 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3836 REAL_VALUE_TYPE r0, r1;
3837 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3838 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3839 if (!mul0)
3840 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3841 if (!mul1)
3842 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3843 real_arithmetic (&r0, code, &r0, &r1);
3844 return fold_build2_loc (loc, MULT_EXPR, type,
3845 TREE_OPERAND (arg0, 0),
3846 build_real (type, r0));
3849 return NULL_TREE;
3852 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3853 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3854 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3855 is the original memory reference used to preserve the alias set of
3856 the access. */
3858 static tree
3859 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3860 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3861 int unsignedp, int reversep)
3863 tree result, bftype;
3865 /* Attempt not to lose the access path if possible. */
3866 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3868 tree ninner = TREE_OPERAND (orig_inner, 0);
3869 machine_mode nmode;
3870 HOST_WIDE_INT nbitsize, nbitpos;
3871 tree noffset;
3872 int nunsignedp, nreversep, nvolatilep = 0;
3873 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3874 &noffset, &nmode, &nunsignedp,
3875 &nreversep, &nvolatilep);
3876 if (base == inner
3877 && noffset == NULL_TREE
3878 && nbitsize >= bitsize
3879 && nbitpos <= bitpos
3880 && bitpos + bitsize <= nbitpos + nbitsize
3881 && !reversep
3882 && !nreversep
3883 && !nvolatilep)
3885 inner = ninner;
3886 bitpos -= nbitpos;
3890 alias_set_type iset = get_alias_set (orig_inner);
3891 if (iset == 0 && get_alias_set (inner) != iset)
3892 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3893 build_fold_addr_expr (inner),
3894 build_int_cst (ptr_type_node, 0));
3896 if (bitpos == 0 && !reversep)
3898 tree size = TYPE_SIZE (TREE_TYPE (inner));
3899 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3900 || POINTER_TYPE_P (TREE_TYPE (inner)))
3901 && tree_fits_shwi_p (size)
3902 && tree_to_shwi (size) == bitsize)
3903 return fold_convert_loc (loc, type, inner);
3906 bftype = type;
3907 if (TYPE_PRECISION (bftype) != bitsize
3908 || TYPE_UNSIGNED (bftype) == !unsignedp)
3909 bftype = build_nonstandard_integer_type (bitsize, 0);
3911 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3912 size_int (bitsize), bitsize_int (bitpos));
3913 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3915 if (bftype != type)
3916 result = fold_convert_loc (loc, type, result);
3918 return result;
3921 /* Optimize a bit-field compare.
3923 There are two cases: First is a compare against a constant and the
3924 second is a comparison of two items where the fields are at the same
3925 bit position relative to the start of a chunk (byte, halfword, word)
3926 large enough to contain it. In these cases we can avoid the shift
3927 implicit in bitfield extractions.
3929 For constants, we emit a compare of the shifted constant with the
3930 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3931 compared. For two fields at the same position, we do the ANDs with the
3932 similar mask and compare the result of the ANDs.
3934 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3935 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3936 are the left and right operands of the comparison, respectively.
3938 If the optimization described above can be done, we return the resulting
3939 tree. Otherwise we return zero. */
3941 static tree
3942 optimize_bit_field_compare (location_t loc, enum tree_code code,
3943 tree compare_type, tree lhs, tree rhs)
3945 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3946 tree type = TREE_TYPE (lhs);
3947 tree unsigned_type;
3948 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3949 machine_mode lmode, rmode, nmode;
3950 int lunsignedp, runsignedp;
3951 int lreversep, rreversep;
3952 int lvolatilep = 0, rvolatilep = 0;
3953 tree linner, rinner = NULL_TREE;
3954 tree mask;
3955 tree offset;
3957 /* Get all the information about the extractions being done. If the bit size
3958 if the same as the size of the underlying object, we aren't doing an
3959 extraction at all and so can do nothing. We also don't want to
3960 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3961 then will no longer be able to replace it. */
3962 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3963 &lunsignedp, &lreversep, &lvolatilep);
3964 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3965 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3966 return 0;
3968 if (const_p)
3969 rreversep = lreversep;
3970 else
3972 /* If this is not a constant, we can only do something if bit positions,
3973 sizes, signedness and storage order are the same. */
3974 rinner
3975 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3976 &runsignedp, &rreversep, &rvolatilep);
3978 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3979 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3980 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3981 return 0;
3984 /* Honor the C++ memory model and mimic what RTL expansion does. */
3985 unsigned HOST_WIDE_INT bitstart = 0;
3986 unsigned HOST_WIDE_INT bitend = 0;
3987 if (TREE_CODE (lhs) == COMPONENT_REF)
3989 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
3990 if (offset != NULL_TREE)
3991 return 0;
3994 /* See if we can find a mode to refer to this field. We should be able to,
3995 but fail if we can't. */
3996 nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
3997 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3998 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3999 TYPE_ALIGN (TREE_TYPE (rinner))),
4000 word_mode, false);
4001 if (nmode == VOIDmode)
4002 return 0;
4004 /* Set signed and unsigned types of the precision of this mode for the
4005 shifts below. */
4006 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4008 /* Compute the bit position and size for the new reference and our offset
4009 within it. If the new reference is the same size as the original, we
4010 won't optimize anything, so return zero. */
4011 nbitsize = GET_MODE_BITSIZE (nmode);
4012 nbitpos = lbitpos & ~ (nbitsize - 1);
4013 lbitpos -= nbitpos;
4014 if (nbitsize == lbitsize)
4015 return 0;
4017 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4018 lbitpos = nbitsize - lbitsize - lbitpos;
4020 /* Make the mask to be used against the extracted field. */
4021 mask = build_int_cst_type (unsigned_type, -1);
4022 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4023 mask = const_binop (RSHIFT_EXPR, mask,
4024 size_int (nbitsize - lbitsize - lbitpos));
4026 if (! const_p)
4027 /* If not comparing with constant, just rework the comparison
4028 and return. */
4029 return fold_build2_loc (loc, code, compare_type,
4030 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4031 make_bit_field_ref (loc, linner, lhs,
4032 unsigned_type,
4033 nbitsize, nbitpos,
4034 1, lreversep),
4035 mask),
4036 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4037 make_bit_field_ref (loc, rinner, rhs,
4038 unsigned_type,
4039 nbitsize, nbitpos,
4040 1, rreversep),
4041 mask));
4043 /* Otherwise, we are handling the constant case. See if the constant is too
4044 big for the field. Warn and return a tree for 0 (false) if so. We do
4045 this not only for its own sake, but to avoid having to test for this
4046 error case below. If we didn't, we might generate wrong code.
4048 For unsigned fields, the constant shifted right by the field length should
4049 be all zero. For signed fields, the high-order bits should agree with
4050 the sign bit. */
4052 if (lunsignedp)
4054 if (wi::lrshift (rhs, lbitsize) != 0)
4056 warning (0, "comparison is always %d due to width of bit-field",
4057 code == NE_EXPR);
4058 return constant_boolean_node (code == NE_EXPR, compare_type);
4061 else
4063 wide_int tem = wi::arshift (rhs, lbitsize - 1);
4064 if (tem != 0 && tem != -1)
4066 warning (0, "comparison is always %d due to width of bit-field",
4067 code == NE_EXPR);
4068 return constant_boolean_node (code == NE_EXPR, compare_type);
4072 /* Single-bit compares should always be against zero. */
4073 if (lbitsize == 1 && ! integer_zerop (rhs))
4075 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4076 rhs = build_int_cst (type, 0);
4079 /* Make a new bitfield reference, shift the constant over the
4080 appropriate number of bits and mask it with the computed mask
4081 (in case this was a signed field). If we changed it, make a new one. */
4082 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4083 nbitsize, nbitpos, 1, lreversep);
4085 rhs = const_binop (BIT_AND_EXPR,
4086 const_binop (LSHIFT_EXPR,
4087 fold_convert_loc (loc, unsigned_type, rhs),
4088 size_int (lbitpos)),
4089 mask);
4091 lhs = build2_loc (loc, code, compare_type,
4092 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4093 return lhs;
4096 /* Subroutine for fold_truth_andor_1: decode a field reference.
4098 If EXP is a comparison reference, we return the innermost reference.
4100 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4101 set to the starting bit number.
4103 If the innermost field can be completely contained in a mode-sized
4104 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4106 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4107 otherwise it is not changed.
4109 *PUNSIGNEDP is set to the signedness of the field.
4111 *PREVERSEP is set to the storage order of the field.
4113 *PMASK is set to the mask used. This is either contained in a
4114 BIT_AND_EXPR or derived from the width of the field.
4116 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4118 Return 0 if this is not a component reference or is one that we can't
4119 do anything with. */
4121 static tree
4122 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4123 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4124 int *punsignedp, int *preversep, int *pvolatilep,
4125 tree *pmask, tree *pand_mask)
4127 tree exp = *exp_;
4128 tree outer_type = 0;
4129 tree and_mask = 0;
4130 tree mask, inner, offset;
4131 tree unsigned_type;
4132 unsigned int precision;
4134 /* All the optimizations using this function assume integer fields.
4135 There are problems with FP fields since the type_for_size call
4136 below can fail for, e.g., XFmode. */
4137 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4138 return 0;
4140 /* We are interested in the bare arrangement of bits, so strip everything
4141 that doesn't affect the machine mode. However, record the type of the
4142 outermost expression if it may matter below. */
4143 if (CONVERT_EXPR_P (exp)
4144 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4145 outer_type = TREE_TYPE (exp);
4146 STRIP_NOPS (exp);
4148 if (TREE_CODE (exp) == BIT_AND_EXPR)
4150 and_mask = TREE_OPERAND (exp, 1);
4151 exp = TREE_OPERAND (exp, 0);
4152 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4153 if (TREE_CODE (and_mask) != INTEGER_CST)
4154 return 0;
4157 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4158 punsignedp, preversep, pvolatilep);
4159 if ((inner == exp && and_mask == 0)
4160 || *pbitsize < 0 || offset != 0
4161 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4162 /* Reject out-of-bound accesses (PR79731). */
4163 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4164 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4165 *pbitpos + *pbitsize) < 0))
4166 return 0;
4168 *exp_ = exp;
4170 /* If the number of bits in the reference is the same as the bitsize of
4171 the outer type, then the outer type gives the signedness. Otherwise
4172 (in case of a small bitfield) the signedness is unchanged. */
4173 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4174 *punsignedp = TYPE_UNSIGNED (outer_type);
4176 /* Compute the mask to access the bitfield. */
4177 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4178 precision = TYPE_PRECISION (unsigned_type);
4180 mask = build_int_cst_type (unsigned_type, -1);
4182 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4183 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4185 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4186 if (and_mask != 0)
4187 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4188 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4190 *pmask = mask;
4191 *pand_mask = and_mask;
4192 return inner;
4195 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4196 bit positions and MASK is SIGNED. */
4198 static int
4199 all_ones_mask_p (const_tree mask, unsigned int size)
4201 tree type = TREE_TYPE (mask);
4202 unsigned int precision = TYPE_PRECISION (type);
4204 /* If this function returns true when the type of the mask is
4205 UNSIGNED, then there will be errors. In particular see
4206 gcc.c-torture/execute/990326-1.c. There does not appear to be
4207 any documentation paper trail as to why this is so. But the pre
4208 wide-int worked with that restriction and it has been preserved
4209 here. */
4210 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4211 return false;
4213 return wi::mask (size, false, precision) == mask;
4216 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4217 represents the sign bit of EXP's type. If EXP represents a sign
4218 or zero extension, also test VAL against the unextended type.
4219 The return value is the (sub)expression whose sign bit is VAL,
4220 or NULL_TREE otherwise. */
4222 tree
4223 sign_bit_p (tree exp, const_tree val)
4225 int width;
4226 tree t;
4228 /* Tree EXP must have an integral type. */
4229 t = TREE_TYPE (exp);
4230 if (! INTEGRAL_TYPE_P (t))
4231 return NULL_TREE;
4233 /* Tree VAL must be an integer constant. */
4234 if (TREE_CODE (val) != INTEGER_CST
4235 || TREE_OVERFLOW (val))
4236 return NULL_TREE;
4238 width = TYPE_PRECISION (t);
4239 if (wi::only_sign_bit_p (val, width))
4240 return exp;
4242 /* Handle extension from a narrower type. */
4243 if (TREE_CODE (exp) == NOP_EXPR
4244 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4245 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4247 return NULL_TREE;
4250 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4251 to be evaluated unconditionally. */
4253 static int
4254 simple_operand_p (const_tree exp)
4256 /* Strip any conversions that don't change the machine mode. */
4257 STRIP_NOPS (exp);
4259 return (CONSTANT_CLASS_P (exp)
4260 || TREE_CODE (exp) == SSA_NAME
4261 || (DECL_P (exp)
4262 && ! TREE_ADDRESSABLE (exp)
4263 && ! TREE_THIS_VOLATILE (exp)
4264 && ! DECL_NONLOCAL (exp)
4265 /* Don't regard global variables as simple. They may be
4266 allocated in ways unknown to the compiler (shared memory,
4267 #pragma weak, etc). */
4268 && ! TREE_PUBLIC (exp)
4269 && ! DECL_EXTERNAL (exp)
4270 /* Weakrefs are not safe to be read, since they can be NULL.
4271 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4272 have DECL_WEAK flag set. */
4273 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4274 /* Loading a static variable is unduly expensive, but global
4275 registers aren't expensive. */
4276 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4279 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4280 to be evaluated unconditionally.
4281 I addition to simple_operand_p, we assume that comparisons, conversions,
4282 and logic-not operations are simple, if their operands are simple, too. */
4284 static bool
4285 simple_operand_p_2 (tree exp)
4287 enum tree_code code;
4289 if (TREE_SIDE_EFFECTS (exp)
4290 || tree_could_trap_p (exp))
4291 return false;
4293 while (CONVERT_EXPR_P (exp))
4294 exp = TREE_OPERAND (exp, 0);
4296 code = TREE_CODE (exp);
4298 if (TREE_CODE_CLASS (code) == tcc_comparison)
4299 return (simple_operand_p (TREE_OPERAND (exp, 0))
4300 && simple_operand_p (TREE_OPERAND (exp, 1)));
4302 if (code == TRUTH_NOT_EXPR)
4303 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4305 return simple_operand_p (exp);
4309 /* The following functions are subroutines to fold_range_test and allow it to
4310 try to change a logical combination of comparisons into a range test.
4312 For example, both
4313 X == 2 || X == 3 || X == 4 || X == 5
4315 X >= 2 && X <= 5
4316 are converted to
4317 (unsigned) (X - 2) <= 3
4319 We describe each set of comparisons as being either inside or outside
4320 a range, using a variable named like IN_P, and then describe the
4321 range with a lower and upper bound. If one of the bounds is omitted,
4322 it represents either the highest or lowest value of the type.
4324 In the comments below, we represent a range by two numbers in brackets
4325 preceded by a "+" to designate being inside that range, or a "-" to
4326 designate being outside that range, so the condition can be inverted by
4327 flipping the prefix. An omitted bound is represented by a "-". For
4328 example, "- [-, 10]" means being outside the range starting at the lowest
4329 possible value and ending at 10, in other words, being greater than 10.
4330 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4331 always false.
4333 We set up things so that the missing bounds are handled in a consistent
4334 manner so neither a missing bound nor "true" and "false" need to be
4335 handled using a special case. */
4337 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4338 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4339 and UPPER1_P are nonzero if the respective argument is an upper bound
4340 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4341 must be specified for a comparison. ARG1 will be converted to ARG0's
4342 type if both are specified. */
4344 static tree
4345 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4346 tree arg1, int upper1_p)
4348 tree tem;
4349 int result;
4350 int sgn0, sgn1;
4352 /* If neither arg represents infinity, do the normal operation.
4353 Else, if not a comparison, return infinity. Else handle the special
4354 comparison rules. Note that most of the cases below won't occur, but
4355 are handled for consistency. */
4357 if (arg0 != 0 && arg1 != 0)
4359 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4360 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4361 STRIP_NOPS (tem);
4362 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4365 if (TREE_CODE_CLASS (code) != tcc_comparison)
4366 return 0;
4368 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4369 for neither. In real maths, we cannot assume open ended ranges are
4370 the same. But, this is computer arithmetic, where numbers are finite.
4371 We can therefore make the transformation of any unbounded range with
4372 the value Z, Z being greater than any representable number. This permits
4373 us to treat unbounded ranges as equal. */
4374 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4375 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4376 switch (code)
4378 case EQ_EXPR:
4379 result = sgn0 == sgn1;
4380 break;
4381 case NE_EXPR:
4382 result = sgn0 != sgn1;
4383 break;
4384 case LT_EXPR:
4385 result = sgn0 < sgn1;
4386 break;
4387 case LE_EXPR:
4388 result = sgn0 <= sgn1;
4389 break;
4390 case GT_EXPR:
4391 result = sgn0 > sgn1;
4392 break;
4393 case GE_EXPR:
4394 result = sgn0 >= sgn1;
4395 break;
4396 default:
4397 gcc_unreachable ();
4400 return constant_boolean_node (result, type);
4403 /* Helper routine for make_range. Perform one step for it, return
4404 new expression if the loop should continue or NULL_TREE if it should
4405 stop. */
4407 tree
4408 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4409 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4410 bool *strict_overflow_p)
4412 tree arg0_type = TREE_TYPE (arg0);
4413 tree n_low, n_high, low = *p_low, high = *p_high;
4414 int in_p = *p_in_p, n_in_p;
4416 switch (code)
4418 case TRUTH_NOT_EXPR:
4419 /* We can only do something if the range is testing for zero. */
4420 if (low == NULL_TREE || high == NULL_TREE
4421 || ! integer_zerop (low) || ! integer_zerop (high))
4422 return NULL_TREE;
4423 *p_in_p = ! in_p;
4424 return arg0;
4426 case EQ_EXPR: case NE_EXPR:
4427 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4428 /* We can only do something if the range is testing for zero
4429 and if the second operand is an integer constant. Note that
4430 saying something is "in" the range we make is done by
4431 complementing IN_P since it will set in the initial case of
4432 being not equal to zero; "out" is leaving it alone. */
4433 if (low == NULL_TREE || high == NULL_TREE
4434 || ! integer_zerop (low) || ! integer_zerop (high)
4435 || TREE_CODE (arg1) != INTEGER_CST)
4436 return NULL_TREE;
4438 switch (code)
4440 case NE_EXPR: /* - [c, c] */
4441 low = high = arg1;
4442 break;
4443 case EQ_EXPR: /* + [c, c] */
4444 in_p = ! in_p, low = high = arg1;
4445 break;
4446 case GT_EXPR: /* - [-, c] */
4447 low = 0, high = arg1;
4448 break;
4449 case GE_EXPR: /* + [c, -] */
4450 in_p = ! in_p, low = arg1, high = 0;
4451 break;
4452 case LT_EXPR: /* - [c, -] */
4453 low = arg1, high = 0;
4454 break;
4455 case LE_EXPR: /* + [-, c] */
4456 in_p = ! in_p, low = 0, high = arg1;
4457 break;
4458 default:
4459 gcc_unreachable ();
4462 /* If this is an unsigned comparison, we also know that EXP is
4463 greater than or equal to zero. We base the range tests we make
4464 on that fact, so we record it here so we can parse existing
4465 range tests. We test arg0_type since often the return type
4466 of, e.g. EQ_EXPR, is boolean. */
4467 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4469 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4470 in_p, low, high, 1,
4471 build_int_cst (arg0_type, 0),
4472 NULL_TREE))
4473 return NULL_TREE;
4475 in_p = n_in_p, low = n_low, high = n_high;
4477 /* If the high bound is missing, but we have a nonzero low
4478 bound, reverse the range so it goes from zero to the low bound
4479 minus 1. */
4480 if (high == 0 && low && ! integer_zerop (low))
4482 in_p = ! in_p;
4483 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4484 build_int_cst (TREE_TYPE (low), 1), 0);
4485 low = build_int_cst (arg0_type, 0);
4489 *p_low = low;
4490 *p_high = high;
4491 *p_in_p = in_p;
4492 return arg0;
4494 case NEGATE_EXPR:
4495 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4496 low and high are non-NULL, then normalize will DTRT. */
4497 if (!TYPE_UNSIGNED (arg0_type)
4498 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4500 if (low == NULL_TREE)
4501 low = TYPE_MIN_VALUE (arg0_type);
4502 if (high == NULL_TREE)
4503 high = TYPE_MAX_VALUE (arg0_type);
4506 /* (-x) IN [a,b] -> x in [-b, -a] */
4507 n_low = range_binop (MINUS_EXPR, exp_type,
4508 build_int_cst (exp_type, 0),
4509 0, high, 1);
4510 n_high = range_binop (MINUS_EXPR, exp_type,
4511 build_int_cst (exp_type, 0),
4512 0, low, 0);
4513 if (n_high != 0 && TREE_OVERFLOW (n_high))
4514 return NULL_TREE;
4515 goto normalize;
4517 case BIT_NOT_EXPR:
4518 /* ~ X -> -X - 1 */
4519 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4520 build_int_cst (exp_type, 1));
4522 case PLUS_EXPR:
4523 case MINUS_EXPR:
4524 if (TREE_CODE (arg1) != INTEGER_CST)
4525 return NULL_TREE;
4527 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4528 move a constant to the other side. */
4529 if (!TYPE_UNSIGNED (arg0_type)
4530 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4531 return NULL_TREE;
4533 /* If EXP is signed, any overflow in the computation is undefined,
4534 so we don't worry about it so long as our computations on
4535 the bounds don't overflow. For unsigned, overflow is defined
4536 and this is exactly the right thing. */
4537 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4538 arg0_type, low, 0, arg1, 0);
4539 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4540 arg0_type, high, 1, arg1, 0);
4541 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4542 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4543 return NULL_TREE;
4545 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4546 *strict_overflow_p = true;
4548 normalize:
4549 /* Check for an unsigned range which has wrapped around the maximum
4550 value thus making n_high < n_low, and normalize it. */
4551 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4553 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4554 build_int_cst (TREE_TYPE (n_high), 1), 0);
4555 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4556 build_int_cst (TREE_TYPE (n_low), 1), 0);
4558 /* If the range is of the form +/- [ x+1, x ], we won't
4559 be able to normalize it. But then, it represents the
4560 whole range or the empty set, so make it
4561 +/- [ -, - ]. */
4562 if (tree_int_cst_equal (n_low, low)
4563 && tree_int_cst_equal (n_high, high))
4564 low = high = 0;
4565 else
4566 in_p = ! in_p;
4568 else
4569 low = n_low, high = n_high;
4571 *p_low = low;
4572 *p_high = high;
4573 *p_in_p = in_p;
4574 return arg0;
4576 CASE_CONVERT:
4577 case NON_LVALUE_EXPR:
4578 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4579 return NULL_TREE;
4581 if (! INTEGRAL_TYPE_P (arg0_type)
4582 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4583 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4584 return NULL_TREE;
4586 n_low = low, n_high = high;
4588 if (n_low != 0)
4589 n_low = fold_convert_loc (loc, arg0_type, n_low);
4591 if (n_high != 0)
4592 n_high = fold_convert_loc (loc, arg0_type, n_high);
4594 /* If we're converting arg0 from an unsigned type, to exp,
4595 a signed type, we will be doing the comparison as unsigned.
4596 The tests above have already verified that LOW and HIGH
4597 are both positive.
4599 So we have to ensure that we will handle large unsigned
4600 values the same way that the current signed bounds treat
4601 negative values. */
4603 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4605 tree high_positive;
4606 tree equiv_type;
4607 /* For fixed-point modes, we need to pass the saturating flag
4608 as the 2nd parameter. */
4609 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4610 equiv_type
4611 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4612 TYPE_SATURATING (arg0_type));
4613 else
4614 equiv_type
4615 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4617 /* A range without an upper bound is, naturally, unbounded.
4618 Since convert would have cropped a very large value, use
4619 the max value for the destination type. */
4620 high_positive
4621 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4622 : TYPE_MAX_VALUE (arg0_type);
4624 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4625 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4626 fold_convert_loc (loc, arg0_type,
4627 high_positive),
4628 build_int_cst (arg0_type, 1));
4630 /* If the low bound is specified, "and" the range with the
4631 range for which the original unsigned value will be
4632 positive. */
4633 if (low != 0)
4635 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4636 1, fold_convert_loc (loc, arg0_type,
4637 integer_zero_node),
4638 high_positive))
4639 return NULL_TREE;
4641 in_p = (n_in_p == in_p);
4643 else
4645 /* Otherwise, "or" the range with the range of the input
4646 that will be interpreted as negative. */
4647 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4648 1, fold_convert_loc (loc, arg0_type,
4649 integer_zero_node),
4650 high_positive))
4651 return NULL_TREE;
4653 in_p = (in_p != n_in_p);
4657 *p_low = n_low;
4658 *p_high = n_high;
4659 *p_in_p = in_p;
4660 return arg0;
4662 default:
4663 return NULL_TREE;
4667 /* Given EXP, a logical expression, set the range it is testing into
4668 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4669 actually being tested. *PLOW and *PHIGH will be made of the same
4670 type as the returned expression. If EXP is not a comparison, we
4671 will most likely not be returning a useful value and range. Set
4672 *STRICT_OVERFLOW_P to true if the return value is only valid
4673 because signed overflow is undefined; otherwise, do not change
4674 *STRICT_OVERFLOW_P. */
4676 tree
4677 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4678 bool *strict_overflow_p)
4680 enum tree_code code;
4681 tree arg0, arg1 = NULL_TREE;
4682 tree exp_type, nexp;
4683 int in_p;
4684 tree low, high;
4685 location_t loc = EXPR_LOCATION (exp);
4687 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4688 and see if we can refine the range. Some of the cases below may not
4689 happen, but it doesn't seem worth worrying about this. We "continue"
4690 the outer loop when we've changed something; otherwise we "break"
4691 the switch, which will "break" the while. */
4693 in_p = 0;
4694 low = high = build_int_cst (TREE_TYPE (exp), 0);
4696 while (1)
4698 code = TREE_CODE (exp);
4699 exp_type = TREE_TYPE (exp);
4700 arg0 = NULL_TREE;
4702 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4704 if (TREE_OPERAND_LENGTH (exp) > 0)
4705 arg0 = TREE_OPERAND (exp, 0);
4706 if (TREE_CODE_CLASS (code) == tcc_binary
4707 || TREE_CODE_CLASS (code) == tcc_comparison
4708 || (TREE_CODE_CLASS (code) == tcc_expression
4709 && TREE_OPERAND_LENGTH (exp) > 1))
4710 arg1 = TREE_OPERAND (exp, 1);
4712 if (arg0 == NULL_TREE)
4713 break;
4715 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4716 &high, &in_p, strict_overflow_p);
4717 if (nexp == NULL_TREE)
4718 break;
4719 exp = nexp;
4722 /* If EXP is a constant, we can evaluate whether this is true or false. */
4723 if (TREE_CODE (exp) == INTEGER_CST)
4725 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4726 exp, 0, low, 0))
4727 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4728 exp, 1, high, 1)));
4729 low = high = 0;
4730 exp = 0;
4733 *pin_p = in_p, *plow = low, *phigh = high;
4734 return exp;
4737 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4738 type, TYPE, return an expression to test if EXP is in (or out of, depending
4739 on IN_P) the range. Return 0 if the test couldn't be created. */
4741 tree
4742 build_range_check (location_t loc, tree type, tree exp, int in_p,
4743 tree low, tree high)
4745 tree etype = TREE_TYPE (exp), value;
4747 /* Disable this optimization for function pointer expressions
4748 on targets that require function pointer canonicalization. */
4749 if (targetm.have_canonicalize_funcptr_for_compare ()
4750 && TREE_CODE (etype) == POINTER_TYPE
4751 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4752 return NULL_TREE;
4754 if (! in_p)
4756 value = build_range_check (loc, type, exp, 1, low, high);
4757 if (value != 0)
4758 return invert_truthvalue_loc (loc, value);
4760 return 0;
4763 if (low == 0 && high == 0)
4764 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4766 if (low == 0)
4767 return fold_build2_loc (loc, LE_EXPR, type, exp,
4768 fold_convert_loc (loc, etype, high));
4770 if (high == 0)
4771 return fold_build2_loc (loc, GE_EXPR, type, exp,
4772 fold_convert_loc (loc, etype, low));
4774 if (operand_equal_p (low, high, 0))
4775 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4776 fold_convert_loc (loc, etype, low));
4778 if (integer_zerop (low))
4780 if (! TYPE_UNSIGNED (etype))
4782 etype = unsigned_type_for (etype);
4783 high = fold_convert_loc (loc, etype, high);
4784 exp = fold_convert_loc (loc, etype, exp);
4786 return build_range_check (loc, type, exp, 1, 0, high);
4789 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4790 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4792 int prec = TYPE_PRECISION (etype);
4794 if (wi::mask (prec - 1, false, prec) == high)
4796 if (TYPE_UNSIGNED (etype))
4798 tree signed_etype = signed_type_for (etype);
4799 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4800 etype
4801 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4802 else
4803 etype = signed_etype;
4804 exp = fold_convert_loc (loc, etype, exp);
4806 return fold_build2_loc (loc, GT_EXPR, type, exp,
4807 build_int_cst (etype, 0));
4811 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4812 This requires wrap-around arithmetics for the type of the expression.
4813 First make sure that arithmetics in this type is valid, then make sure
4814 that it wraps around. */
4815 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4816 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4817 TYPE_UNSIGNED (etype));
4819 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4821 tree utype, minv, maxv;
4823 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4824 for the type in question, as we rely on this here. */
4825 utype = unsigned_type_for (etype);
4826 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4827 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4828 build_int_cst (TREE_TYPE (maxv), 1), 1);
4829 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4831 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4832 minv, 1, maxv, 1)))
4833 etype = utype;
4834 else
4835 return 0;
4838 high = fold_convert_loc (loc, etype, high);
4839 low = fold_convert_loc (loc, etype, low);
4840 exp = fold_convert_loc (loc, etype, exp);
4842 value = const_binop (MINUS_EXPR, high, low);
4845 if (POINTER_TYPE_P (etype))
4847 if (value != 0 && !TREE_OVERFLOW (value))
4849 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4850 return build_range_check (loc, type,
4851 fold_build_pointer_plus_loc (loc, exp, low),
4852 1, build_int_cst (etype, 0), value);
4854 return 0;
4857 if (value != 0 && !TREE_OVERFLOW (value))
4858 return build_range_check (loc, type,
4859 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4860 1, build_int_cst (etype, 0), value);
4862 return 0;
4865 /* Return the predecessor of VAL in its type, handling the infinite case. */
4867 static tree
4868 range_predecessor (tree val)
4870 tree type = TREE_TYPE (val);
4872 if (INTEGRAL_TYPE_P (type)
4873 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4874 return 0;
4875 else
4876 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4877 build_int_cst (TREE_TYPE (val), 1), 0);
4880 /* Return the successor of VAL in its type, handling the infinite case. */
4882 static tree
4883 range_successor (tree val)
4885 tree type = TREE_TYPE (val);
4887 if (INTEGRAL_TYPE_P (type)
4888 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4889 return 0;
4890 else
4891 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4892 build_int_cst (TREE_TYPE (val), 1), 0);
4895 /* Given two ranges, see if we can merge them into one. Return 1 if we
4896 can, 0 if we can't. Set the output range into the specified parameters. */
4898 bool
4899 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4900 tree high0, int in1_p, tree low1, tree high1)
4902 int no_overlap;
4903 int subset;
4904 int temp;
4905 tree tem;
4906 int in_p;
4907 tree low, high;
4908 int lowequal = ((low0 == 0 && low1 == 0)
4909 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4910 low0, 0, low1, 0)));
4911 int highequal = ((high0 == 0 && high1 == 0)
4912 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4913 high0, 1, high1, 1)));
4915 /* Make range 0 be the range that starts first, or ends last if they
4916 start at the same value. Swap them if it isn't. */
4917 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4918 low0, 0, low1, 0))
4919 || (lowequal
4920 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4921 high1, 1, high0, 1))))
4923 temp = in0_p, in0_p = in1_p, in1_p = temp;
4924 tem = low0, low0 = low1, low1 = tem;
4925 tem = high0, high0 = high1, high1 = tem;
4928 /* Now flag two cases, whether the ranges are disjoint or whether the
4929 second range is totally subsumed in the first. Note that the tests
4930 below are simplified by the ones above. */
4931 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4932 high0, 1, low1, 0));
4933 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4934 high1, 1, high0, 1));
4936 /* We now have four cases, depending on whether we are including or
4937 excluding the two ranges. */
4938 if (in0_p && in1_p)
4940 /* If they don't overlap, the result is false. If the second range
4941 is a subset it is the result. Otherwise, the range is from the start
4942 of the second to the end of the first. */
4943 if (no_overlap)
4944 in_p = 0, low = high = 0;
4945 else if (subset)
4946 in_p = 1, low = low1, high = high1;
4947 else
4948 in_p = 1, low = low1, high = high0;
4951 else if (in0_p && ! in1_p)
4953 /* If they don't overlap, the result is the first range. If they are
4954 equal, the result is false. If the second range is a subset of the
4955 first, and the ranges begin at the same place, we go from just after
4956 the end of the second range to the end of the first. If the second
4957 range is not a subset of the first, or if it is a subset and both
4958 ranges end at the same place, the range starts at the start of the
4959 first range and ends just before the second range.
4960 Otherwise, we can't describe this as a single range. */
4961 if (no_overlap)
4962 in_p = 1, low = low0, high = high0;
4963 else if (lowequal && highequal)
4964 in_p = 0, low = high = 0;
4965 else if (subset && lowequal)
4967 low = range_successor (high1);
4968 high = high0;
4969 in_p = 1;
4970 if (low == 0)
4972 /* We are in the weird situation where high0 > high1 but
4973 high1 has no successor. Punt. */
4974 return 0;
4977 else if (! subset || highequal)
4979 low = low0;
4980 high = range_predecessor (low1);
4981 in_p = 1;
4982 if (high == 0)
4984 /* low0 < low1 but low1 has no predecessor. Punt. */
4985 return 0;
4988 else
4989 return 0;
4992 else if (! in0_p && in1_p)
4994 /* If they don't overlap, the result is the second range. If the second
4995 is a subset of the first, the result is false. Otherwise,
4996 the range starts just after the first range and ends at the
4997 end of the second. */
4998 if (no_overlap)
4999 in_p = 1, low = low1, high = high1;
5000 else if (subset || highequal)
5001 in_p = 0, low = high = 0;
5002 else
5004 low = range_successor (high0);
5005 high = high1;
5006 in_p = 1;
5007 if (low == 0)
5009 /* high1 > high0 but high0 has no successor. Punt. */
5010 return 0;
5015 else
5017 /* The case where we are excluding both ranges. Here the complex case
5018 is if they don't overlap. In that case, the only time we have a
5019 range is if they are adjacent. If the second is a subset of the
5020 first, the result is the first. Otherwise, the range to exclude
5021 starts at the beginning of the first range and ends at the end of the
5022 second. */
5023 if (no_overlap)
5025 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5026 range_successor (high0),
5027 1, low1, 0)))
5028 in_p = 0, low = low0, high = high1;
5029 else
5031 /* Canonicalize - [min, x] into - [-, x]. */
5032 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5033 switch (TREE_CODE (TREE_TYPE (low0)))
5035 case ENUMERAL_TYPE:
5036 if (TYPE_PRECISION (TREE_TYPE (low0))
5037 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5038 break;
5039 /* FALLTHROUGH */
5040 case INTEGER_TYPE:
5041 if (tree_int_cst_equal (low0,
5042 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5043 low0 = 0;
5044 break;
5045 case POINTER_TYPE:
5046 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5047 && integer_zerop (low0))
5048 low0 = 0;
5049 break;
5050 default:
5051 break;
5054 /* Canonicalize - [x, max] into - [x, -]. */
5055 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5056 switch (TREE_CODE (TREE_TYPE (high1)))
5058 case ENUMERAL_TYPE:
5059 if (TYPE_PRECISION (TREE_TYPE (high1))
5060 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5061 break;
5062 /* FALLTHROUGH */
5063 case INTEGER_TYPE:
5064 if (tree_int_cst_equal (high1,
5065 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5066 high1 = 0;
5067 break;
5068 case POINTER_TYPE:
5069 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5070 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5071 high1, 1,
5072 build_int_cst (TREE_TYPE (high1), 1),
5073 1)))
5074 high1 = 0;
5075 break;
5076 default:
5077 break;
5080 /* The ranges might be also adjacent between the maximum and
5081 minimum values of the given type. For
5082 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5083 return + [x + 1, y - 1]. */
5084 if (low0 == 0 && high1 == 0)
5086 low = range_successor (high0);
5087 high = range_predecessor (low1);
5088 if (low == 0 || high == 0)
5089 return 0;
5091 in_p = 1;
5093 else
5094 return 0;
5097 else if (subset)
5098 in_p = 0, low = low0, high = high0;
5099 else
5100 in_p = 0, low = low0, high = high1;
5103 *pin_p = in_p, *plow = low, *phigh = high;
5104 return 1;
5108 /* Subroutine of fold, looking inside expressions of the form
5109 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5110 of the COND_EXPR. This function is being used also to optimize
5111 A op B ? C : A, by reversing the comparison first.
5113 Return a folded expression whose code is not a COND_EXPR
5114 anymore, or NULL_TREE if no folding opportunity is found. */
5116 static tree
5117 fold_cond_expr_with_comparison (location_t loc, tree type,
5118 tree arg0, tree arg1, tree arg2)
5120 enum tree_code comp_code = TREE_CODE (arg0);
5121 tree arg00 = TREE_OPERAND (arg0, 0);
5122 tree arg01 = TREE_OPERAND (arg0, 1);
5123 tree arg1_type = TREE_TYPE (arg1);
5124 tree tem;
5126 STRIP_NOPS (arg1);
5127 STRIP_NOPS (arg2);
5129 /* If we have A op 0 ? A : -A, consider applying the following
5130 transformations:
5132 A == 0? A : -A same as -A
5133 A != 0? A : -A same as A
5134 A >= 0? A : -A same as abs (A)
5135 A > 0? A : -A same as abs (A)
5136 A <= 0? A : -A same as -abs (A)
5137 A < 0? A : -A same as -abs (A)
5139 None of these transformations work for modes with signed
5140 zeros. If A is +/-0, the first two transformations will
5141 change the sign of the result (from +0 to -0, or vice
5142 versa). The last four will fix the sign of the result,
5143 even though the original expressions could be positive or
5144 negative, depending on the sign of A.
5146 Note that all these transformations are correct if A is
5147 NaN, since the two alternatives (A and -A) are also NaNs. */
5148 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5149 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5150 ? real_zerop (arg01)
5151 : integer_zerop (arg01))
5152 && ((TREE_CODE (arg2) == NEGATE_EXPR
5153 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5154 /* In the case that A is of the form X-Y, '-A' (arg2) may
5155 have already been folded to Y-X, check for that. */
5156 || (TREE_CODE (arg1) == MINUS_EXPR
5157 && TREE_CODE (arg2) == MINUS_EXPR
5158 && operand_equal_p (TREE_OPERAND (arg1, 0),
5159 TREE_OPERAND (arg2, 1), 0)
5160 && operand_equal_p (TREE_OPERAND (arg1, 1),
5161 TREE_OPERAND (arg2, 0), 0))))
5162 switch (comp_code)
5164 case EQ_EXPR:
5165 case UNEQ_EXPR:
5166 tem = fold_convert_loc (loc, arg1_type, arg1);
5167 return fold_convert_loc (loc, type, negate_expr (tem));
5168 case NE_EXPR:
5169 case LTGT_EXPR:
5170 return fold_convert_loc (loc, type, arg1);
5171 case UNGE_EXPR:
5172 case UNGT_EXPR:
5173 if (flag_trapping_math)
5174 break;
5175 /* Fall through. */
5176 case GE_EXPR:
5177 case GT_EXPR:
5178 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5179 break;
5180 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5181 return fold_convert_loc (loc, type, tem);
5182 case UNLE_EXPR:
5183 case UNLT_EXPR:
5184 if (flag_trapping_math)
5185 break;
5186 /* FALLTHRU */
5187 case LE_EXPR:
5188 case LT_EXPR:
5189 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5190 break;
5191 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5192 return negate_expr (fold_convert_loc (loc, type, tem));
5193 default:
5194 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5195 break;
5198 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5199 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5200 both transformations are correct when A is NaN: A != 0
5201 is then true, and A == 0 is false. */
5203 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5204 && integer_zerop (arg01) && integer_zerop (arg2))
5206 if (comp_code == NE_EXPR)
5207 return fold_convert_loc (loc, type, arg1);
5208 else if (comp_code == EQ_EXPR)
5209 return build_zero_cst (type);
5212 /* Try some transformations of A op B ? A : B.
5214 A == B? A : B same as B
5215 A != B? A : B same as A
5216 A >= B? A : B same as max (A, B)
5217 A > B? A : B same as max (B, A)
5218 A <= B? A : B same as min (A, B)
5219 A < B? A : B same as min (B, A)
5221 As above, these transformations don't work in the presence
5222 of signed zeros. For example, if A and B are zeros of
5223 opposite sign, the first two transformations will change
5224 the sign of the result. In the last four, the original
5225 expressions give different results for (A=+0, B=-0) and
5226 (A=-0, B=+0), but the transformed expressions do not.
5228 The first two transformations are correct if either A or B
5229 is a NaN. In the first transformation, the condition will
5230 be false, and B will indeed be chosen. In the case of the
5231 second transformation, the condition A != B will be true,
5232 and A will be chosen.
5234 The conversions to max() and min() are not correct if B is
5235 a number and A is not. The conditions in the original
5236 expressions will be false, so all four give B. The min()
5237 and max() versions would give a NaN instead. */
5238 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5239 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5240 /* Avoid these transformations if the COND_EXPR may be used
5241 as an lvalue in the C++ front-end. PR c++/19199. */
5242 && (in_gimple_form
5243 || VECTOR_TYPE_P (type)
5244 || (! lang_GNU_CXX ()
5245 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5246 || ! maybe_lvalue_p (arg1)
5247 || ! maybe_lvalue_p (arg2)))
5249 tree comp_op0 = arg00;
5250 tree comp_op1 = arg01;
5251 tree comp_type = TREE_TYPE (comp_op0);
5253 switch (comp_code)
5255 case EQ_EXPR:
5256 return fold_convert_loc (loc, type, arg2);
5257 case NE_EXPR:
5258 return fold_convert_loc (loc, type, arg1);
5259 case LE_EXPR:
5260 case LT_EXPR:
5261 case UNLE_EXPR:
5262 case UNLT_EXPR:
5263 /* In C++ a ?: expression can be an lvalue, so put the
5264 operand which will be used if they are equal first
5265 so that we can convert this back to the
5266 corresponding COND_EXPR. */
5267 if (!HONOR_NANS (arg1))
5269 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5270 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5271 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5272 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5273 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5274 comp_op1, comp_op0);
5275 return fold_convert_loc (loc, type, tem);
5277 break;
5278 case GE_EXPR:
5279 case GT_EXPR:
5280 case UNGE_EXPR:
5281 case UNGT_EXPR:
5282 if (!HONOR_NANS (arg1))
5284 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5285 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5286 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5287 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5288 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5289 comp_op1, comp_op0);
5290 return fold_convert_loc (loc, type, tem);
5292 break;
5293 case UNEQ_EXPR:
5294 if (!HONOR_NANS (arg1))
5295 return fold_convert_loc (loc, type, arg2);
5296 break;
5297 case LTGT_EXPR:
5298 if (!HONOR_NANS (arg1))
5299 return fold_convert_loc (loc, type, arg1);
5300 break;
5301 default:
5302 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5303 break;
5307 return NULL_TREE;
5312 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5313 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5314 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5315 false) >= 2)
5316 #endif
5318 /* EXP is some logical combination of boolean tests. See if we can
5319 merge it into some range test. Return the new tree if so. */
5321 static tree
5322 fold_range_test (location_t loc, enum tree_code code, tree type,
5323 tree op0, tree op1)
5325 int or_op = (code == TRUTH_ORIF_EXPR
5326 || code == TRUTH_OR_EXPR);
5327 int in0_p, in1_p, in_p;
5328 tree low0, low1, low, high0, high1, high;
5329 bool strict_overflow_p = false;
5330 tree tem, lhs, rhs;
5331 const char * const warnmsg = G_("assuming signed overflow does not occur "
5332 "when simplifying range test");
5334 if (!INTEGRAL_TYPE_P (type))
5335 return 0;
5337 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5338 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5340 /* If this is an OR operation, invert both sides; we will invert
5341 again at the end. */
5342 if (or_op)
5343 in0_p = ! in0_p, in1_p = ! in1_p;
5345 /* If both expressions are the same, if we can merge the ranges, and we
5346 can build the range test, return it or it inverted. If one of the
5347 ranges is always true or always false, consider it to be the same
5348 expression as the other. */
5349 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5350 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5351 in1_p, low1, high1)
5352 && 0 != (tem = (build_range_check (loc, type,
5353 lhs != 0 ? lhs
5354 : rhs != 0 ? rhs : integer_zero_node,
5355 in_p, low, high))))
5357 if (strict_overflow_p)
5358 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5359 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5362 /* On machines where the branch cost is expensive, if this is a
5363 short-circuited branch and the underlying object on both sides
5364 is the same, make a non-short-circuit operation. */
5365 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5366 && lhs != 0 && rhs != 0
5367 && (code == TRUTH_ANDIF_EXPR
5368 || code == TRUTH_ORIF_EXPR)
5369 && operand_equal_p (lhs, rhs, 0))
5371 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5372 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5373 which cases we can't do this. */
5374 if (simple_operand_p (lhs))
5375 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5376 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5377 type, op0, op1);
5379 else if (!lang_hooks.decls.global_bindings_p ()
5380 && !CONTAINS_PLACEHOLDER_P (lhs))
5382 tree common = save_expr (lhs);
5384 if (0 != (lhs = build_range_check (loc, type, common,
5385 or_op ? ! in0_p : in0_p,
5386 low0, high0))
5387 && (0 != (rhs = build_range_check (loc, type, common,
5388 or_op ? ! in1_p : in1_p,
5389 low1, high1))))
5391 if (strict_overflow_p)
5392 fold_overflow_warning (warnmsg,
5393 WARN_STRICT_OVERFLOW_COMPARISON);
5394 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5395 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5396 type, lhs, rhs);
5401 return 0;
5404 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5405 bit value. Arrange things so the extra bits will be set to zero if and
5406 only if C is signed-extended to its full width. If MASK is nonzero,
5407 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5409 static tree
5410 unextend (tree c, int p, int unsignedp, tree mask)
5412 tree type = TREE_TYPE (c);
5413 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5414 tree temp;
5416 if (p == modesize || unsignedp)
5417 return c;
5419 /* We work by getting just the sign bit into the low-order bit, then
5420 into the high-order bit, then sign-extend. We then XOR that value
5421 with C. */
5422 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5424 /* We must use a signed type in order to get an arithmetic right shift.
5425 However, we must also avoid introducing accidental overflows, so that
5426 a subsequent call to integer_zerop will work. Hence we must
5427 do the type conversion here. At this point, the constant is either
5428 zero or one, and the conversion to a signed type can never overflow.
5429 We could get an overflow if this conversion is done anywhere else. */
5430 if (TYPE_UNSIGNED (type))
5431 temp = fold_convert (signed_type_for (type), temp);
5433 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5434 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5435 if (mask != 0)
5436 temp = const_binop (BIT_AND_EXPR, temp,
5437 fold_convert (TREE_TYPE (c), mask));
5438 /* If necessary, convert the type back to match the type of C. */
5439 if (TYPE_UNSIGNED (type))
5440 temp = fold_convert (type, temp);
5442 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5445 /* For an expression that has the form
5446 (A && B) || ~B
5448 (A || B) && ~B,
5449 we can drop one of the inner expressions and simplify to
5450 A || ~B
5452 A && ~B
5453 LOC is the location of the resulting expression. OP is the inner
5454 logical operation; the left-hand side in the examples above, while CMPOP
5455 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5456 removing a condition that guards another, as in
5457 (A != NULL && A->...) || A == NULL
5458 which we must not transform. If RHS_ONLY is true, only eliminate the
5459 right-most operand of the inner logical operation. */
5461 static tree
5462 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5463 bool rhs_only)
5465 tree type = TREE_TYPE (cmpop);
5466 enum tree_code code = TREE_CODE (cmpop);
5467 enum tree_code truthop_code = TREE_CODE (op);
5468 tree lhs = TREE_OPERAND (op, 0);
5469 tree rhs = TREE_OPERAND (op, 1);
5470 tree orig_lhs = lhs, orig_rhs = rhs;
5471 enum tree_code rhs_code = TREE_CODE (rhs);
5472 enum tree_code lhs_code = TREE_CODE (lhs);
5473 enum tree_code inv_code;
5475 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5476 return NULL_TREE;
5478 if (TREE_CODE_CLASS (code) != tcc_comparison)
5479 return NULL_TREE;
5481 if (rhs_code == truthop_code)
5483 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5484 if (newrhs != NULL_TREE)
5486 rhs = newrhs;
5487 rhs_code = TREE_CODE (rhs);
5490 if (lhs_code == truthop_code && !rhs_only)
5492 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5493 if (newlhs != NULL_TREE)
5495 lhs = newlhs;
5496 lhs_code = TREE_CODE (lhs);
5500 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5501 if (inv_code == rhs_code
5502 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5503 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5504 return lhs;
5505 if (!rhs_only && inv_code == lhs_code
5506 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5507 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5508 return rhs;
5509 if (rhs != orig_rhs || lhs != orig_lhs)
5510 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5511 lhs, rhs);
5512 return NULL_TREE;
5515 /* Find ways of folding logical expressions of LHS and RHS:
5516 Try to merge two comparisons to the same innermost item.
5517 Look for range tests like "ch >= '0' && ch <= '9'".
5518 Look for combinations of simple terms on machines with expensive branches
5519 and evaluate the RHS unconditionally.
5521 For example, if we have p->a == 2 && p->b == 4 and we can make an
5522 object large enough to span both A and B, we can do this with a comparison
5523 against the object ANDed with the a mask.
5525 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5526 operations to do this with one comparison.
5528 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5529 function and the one above.
5531 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5532 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5534 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5535 two operands.
5537 We return the simplified tree or 0 if no optimization is possible. */
5539 static tree
5540 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5541 tree lhs, tree rhs)
5543 /* If this is the "or" of two comparisons, we can do something if
5544 the comparisons are NE_EXPR. If this is the "and", we can do something
5545 if the comparisons are EQ_EXPR. I.e.,
5546 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5548 WANTED_CODE is this operation code. For single bit fields, we can
5549 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5550 comparison for one-bit fields. */
5552 enum tree_code wanted_code;
5553 enum tree_code lcode, rcode;
5554 tree ll_arg, lr_arg, rl_arg, rr_arg;
5555 tree ll_inner, lr_inner, rl_inner, rr_inner;
5556 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5557 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5558 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5559 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5560 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5561 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5562 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5563 machine_mode lnmode, rnmode;
5564 tree ll_mask, lr_mask, rl_mask, rr_mask;
5565 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5566 tree l_const, r_const;
5567 tree lntype, rntype, result;
5568 HOST_WIDE_INT first_bit, end_bit;
5569 int volatilep;
5571 /* Start by getting the comparison codes. Fail if anything is volatile.
5572 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5573 it were surrounded with a NE_EXPR. */
5575 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5576 return 0;
5578 lcode = TREE_CODE (lhs);
5579 rcode = TREE_CODE (rhs);
5581 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5583 lhs = build2 (NE_EXPR, truth_type, lhs,
5584 build_int_cst (TREE_TYPE (lhs), 0));
5585 lcode = NE_EXPR;
5588 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5590 rhs = build2 (NE_EXPR, truth_type, rhs,
5591 build_int_cst (TREE_TYPE (rhs), 0));
5592 rcode = NE_EXPR;
5595 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5596 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5597 return 0;
5599 ll_arg = TREE_OPERAND (lhs, 0);
5600 lr_arg = TREE_OPERAND (lhs, 1);
5601 rl_arg = TREE_OPERAND (rhs, 0);
5602 rr_arg = TREE_OPERAND (rhs, 1);
5604 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5605 if (simple_operand_p (ll_arg)
5606 && simple_operand_p (lr_arg))
5608 if (operand_equal_p (ll_arg, rl_arg, 0)
5609 && operand_equal_p (lr_arg, rr_arg, 0))
5611 result = combine_comparisons (loc, code, lcode, rcode,
5612 truth_type, ll_arg, lr_arg);
5613 if (result)
5614 return result;
5616 else if (operand_equal_p (ll_arg, rr_arg, 0)
5617 && operand_equal_p (lr_arg, rl_arg, 0))
5619 result = combine_comparisons (loc, code, lcode,
5620 swap_tree_comparison (rcode),
5621 truth_type, ll_arg, lr_arg);
5622 if (result)
5623 return result;
5627 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5628 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5630 /* If the RHS can be evaluated unconditionally and its operands are
5631 simple, it wins to evaluate the RHS unconditionally on machines
5632 with expensive branches. In this case, this isn't a comparison
5633 that can be merged. */
5635 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5636 false) >= 2
5637 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5638 && simple_operand_p (rl_arg)
5639 && simple_operand_p (rr_arg))
5641 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5642 if (code == TRUTH_OR_EXPR
5643 && lcode == NE_EXPR && integer_zerop (lr_arg)
5644 && rcode == NE_EXPR && integer_zerop (rr_arg)
5645 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5646 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5647 return build2_loc (loc, NE_EXPR, truth_type,
5648 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5649 ll_arg, rl_arg),
5650 build_int_cst (TREE_TYPE (ll_arg), 0));
5652 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5653 if (code == TRUTH_AND_EXPR
5654 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5655 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5656 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5657 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5658 return build2_loc (loc, EQ_EXPR, truth_type,
5659 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5660 ll_arg, rl_arg),
5661 build_int_cst (TREE_TYPE (ll_arg), 0));
5664 /* See if the comparisons can be merged. Then get all the parameters for
5665 each side. */
5667 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5668 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5669 return 0;
5671 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5672 volatilep = 0;
5673 ll_inner = decode_field_reference (loc, &ll_arg,
5674 &ll_bitsize, &ll_bitpos, &ll_mode,
5675 &ll_unsignedp, &ll_reversep, &volatilep,
5676 &ll_mask, &ll_and_mask);
5677 lr_inner = decode_field_reference (loc, &lr_arg,
5678 &lr_bitsize, &lr_bitpos, &lr_mode,
5679 &lr_unsignedp, &lr_reversep, &volatilep,
5680 &lr_mask, &lr_and_mask);
5681 rl_inner = decode_field_reference (loc, &rl_arg,
5682 &rl_bitsize, &rl_bitpos, &rl_mode,
5683 &rl_unsignedp, &rl_reversep, &volatilep,
5684 &rl_mask, &rl_and_mask);
5685 rr_inner = decode_field_reference (loc, &rr_arg,
5686 &rr_bitsize, &rr_bitpos, &rr_mode,
5687 &rr_unsignedp, &rr_reversep, &volatilep,
5688 &rr_mask, &rr_and_mask);
5690 /* It must be true that the inner operation on the lhs of each
5691 comparison must be the same if we are to be able to do anything.
5692 Then see if we have constants. If not, the same must be true for
5693 the rhs's. */
5694 if (volatilep
5695 || ll_reversep != rl_reversep
5696 || ll_inner == 0 || rl_inner == 0
5697 || ! operand_equal_p (ll_inner, rl_inner, 0))
5698 return 0;
5700 if (TREE_CODE (lr_arg) == INTEGER_CST
5701 && TREE_CODE (rr_arg) == INTEGER_CST)
5703 l_const = lr_arg, r_const = rr_arg;
5704 lr_reversep = ll_reversep;
5706 else if (lr_reversep != rr_reversep
5707 || lr_inner == 0 || rr_inner == 0
5708 || ! operand_equal_p (lr_inner, rr_inner, 0))
5709 return 0;
5710 else
5711 l_const = r_const = 0;
5713 /* If either comparison code is not correct for our logical operation,
5714 fail. However, we can convert a one-bit comparison against zero into
5715 the opposite comparison against that bit being set in the field. */
5717 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5718 if (lcode != wanted_code)
5720 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5722 /* Make the left operand unsigned, since we are only interested
5723 in the value of one bit. Otherwise we are doing the wrong
5724 thing below. */
5725 ll_unsignedp = 1;
5726 l_const = ll_mask;
5728 else
5729 return 0;
5732 /* This is analogous to the code for l_const above. */
5733 if (rcode != wanted_code)
5735 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5737 rl_unsignedp = 1;
5738 r_const = rl_mask;
5740 else
5741 return 0;
5744 /* See if we can find a mode that contains both fields being compared on
5745 the left. If we can't, fail. Otherwise, update all constants and masks
5746 to be relative to a field of that size. */
5747 first_bit = MIN (ll_bitpos, rl_bitpos);
5748 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5749 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5750 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5751 volatilep);
5752 if (lnmode == VOIDmode)
5753 return 0;
5755 lnbitsize = GET_MODE_BITSIZE (lnmode);
5756 lnbitpos = first_bit & ~ (lnbitsize - 1);
5757 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5758 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5760 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5762 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5763 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5766 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5767 size_int (xll_bitpos));
5768 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5769 size_int (xrl_bitpos));
5771 if (l_const)
5773 l_const = fold_convert_loc (loc, lntype, l_const);
5774 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5775 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5776 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5777 fold_build1_loc (loc, BIT_NOT_EXPR,
5778 lntype, ll_mask))))
5780 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5782 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5785 if (r_const)
5787 r_const = fold_convert_loc (loc, lntype, r_const);
5788 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5789 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5790 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5791 fold_build1_loc (loc, BIT_NOT_EXPR,
5792 lntype, rl_mask))))
5794 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5796 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5800 /* If the right sides are not constant, do the same for it. Also,
5801 disallow this optimization if a size or signedness mismatch occurs
5802 between the left and right sides. */
5803 if (l_const == 0)
5805 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5806 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5807 /* Make sure the two fields on the right
5808 correspond to the left without being swapped. */
5809 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5810 return 0;
5812 first_bit = MIN (lr_bitpos, rr_bitpos);
5813 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5814 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5815 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5816 volatilep);
5817 if (rnmode == VOIDmode)
5818 return 0;
5820 rnbitsize = GET_MODE_BITSIZE (rnmode);
5821 rnbitpos = first_bit & ~ (rnbitsize - 1);
5822 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5823 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5825 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5827 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5828 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5831 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5832 rntype, lr_mask),
5833 size_int (xlr_bitpos));
5834 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5835 rntype, rr_mask),
5836 size_int (xrr_bitpos));
5838 /* Make a mask that corresponds to both fields being compared.
5839 Do this for both items being compared. If the operands are the
5840 same size and the bits being compared are in the same position
5841 then we can do this by masking both and comparing the masked
5842 results. */
5843 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5844 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5845 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5847 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5848 lntype, lnbitsize, lnbitpos,
5849 ll_unsignedp || rl_unsignedp, ll_reversep);
5850 if (! all_ones_mask_p (ll_mask, lnbitsize))
5851 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5853 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5854 rntype, rnbitsize, rnbitpos,
5855 lr_unsignedp || rr_unsignedp, lr_reversep);
5856 if (! all_ones_mask_p (lr_mask, rnbitsize))
5857 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5859 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5862 /* There is still another way we can do something: If both pairs of
5863 fields being compared are adjacent, we may be able to make a wider
5864 field containing them both.
5866 Note that we still must mask the lhs/rhs expressions. Furthermore,
5867 the mask must be shifted to account for the shift done by
5868 make_bit_field_ref. */
5869 if ((ll_bitsize + ll_bitpos == rl_bitpos
5870 && lr_bitsize + lr_bitpos == rr_bitpos)
5871 || (ll_bitpos == rl_bitpos + rl_bitsize
5872 && lr_bitpos == rr_bitpos + rr_bitsize))
5874 tree type;
5876 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5877 ll_bitsize + rl_bitsize,
5878 MIN (ll_bitpos, rl_bitpos),
5879 ll_unsignedp, ll_reversep);
5880 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5881 lr_bitsize + rr_bitsize,
5882 MIN (lr_bitpos, rr_bitpos),
5883 lr_unsignedp, lr_reversep);
5885 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5886 size_int (MIN (xll_bitpos, xrl_bitpos)));
5887 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5888 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5890 /* Convert to the smaller type before masking out unwanted bits. */
5891 type = lntype;
5892 if (lntype != rntype)
5894 if (lnbitsize > rnbitsize)
5896 lhs = fold_convert_loc (loc, rntype, lhs);
5897 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5898 type = rntype;
5900 else if (lnbitsize < rnbitsize)
5902 rhs = fold_convert_loc (loc, lntype, rhs);
5903 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5904 type = lntype;
5908 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5909 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5911 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5912 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5914 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5917 return 0;
5920 /* Handle the case of comparisons with constants. If there is something in
5921 common between the masks, those bits of the constants must be the same.
5922 If not, the condition is always false. Test for this to avoid generating
5923 incorrect code below. */
5924 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5925 if (! integer_zerop (result)
5926 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5927 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5929 if (wanted_code == NE_EXPR)
5931 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5932 return constant_boolean_node (true, truth_type);
5934 else
5936 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5937 return constant_boolean_node (false, truth_type);
5941 /* Construct the expression we will return. First get the component
5942 reference we will make. Unless the mask is all ones the width of
5943 that field, perform the mask operation. Then compare with the
5944 merged constant. */
5945 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5946 lntype, lnbitsize, lnbitpos,
5947 ll_unsignedp || rl_unsignedp, ll_reversep);
5949 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5950 if (! all_ones_mask_p (ll_mask, lnbitsize))
5951 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5953 return build2_loc (loc, wanted_code, truth_type, result,
5954 const_binop (BIT_IOR_EXPR, l_const, r_const));
5957 /* T is an integer expression that is being multiplied, divided, or taken a
5958 modulus (CODE says which and what kind of divide or modulus) by a
5959 constant C. See if we can eliminate that operation by folding it with
5960 other operations already in T. WIDE_TYPE, if non-null, is a type that
5961 should be used for the computation if wider than our type.
5963 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5964 (X * 2) + (Y * 4). We must, however, be assured that either the original
5965 expression would not overflow or that overflow is undefined for the type
5966 in the language in question.
5968 If we return a non-null expression, it is an equivalent form of the
5969 original computation, but need not be in the original type.
5971 We set *STRICT_OVERFLOW_P to true if the return values depends on
5972 signed overflow being undefined. Otherwise we do not change
5973 *STRICT_OVERFLOW_P. */
5975 static tree
5976 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5977 bool *strict_overflow_p)
5979 /* To avoid exponential search depth, refuse to allow recursion past
5980 three levels. Beyond that (1) it's highly unlikely that we'll find
5981 something interesting and (2) we've probably processed it before
5982 when we built the inner expression. */
5984 static int depth;
5985 tree ret;
5987 if (depth > 3)
5988 return NULL;
5990 depth++;
5991 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5992 depth--;
5994 return ret;
5997 static tree
5998 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5999 bool *strict_overflow_p)
6001 tree type = TREE_TYPE (t);
6002 enum tree_code tcode = TREE_CODE (t);
6003 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6004 > GET_MODE_SIZE (TYPE_MODE (type)))
6005 ? wide_type : type);
6006 tree t1, t2;
6007 int same_p = tcode == code;
6008 tree op0 = NULL_TREE, op1 = NULL_TREE;
6009 bool sub_strict_overflow_p;
6011 /* Don't deal with constants of zero here; they confuse the code below. */
6012 if (integer_zerop (c))
6013 return NULL_TREE;
6015 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6016 op0 = TREE_OPERAND (t, 0);
6018 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6019 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6021 /* Note that we need not handle conditional operations here since fold
6022 already handles those cases. So just do arithmetic here. */
6023 switch (tcode)
6025 case INTEGER_CST:
6026 /* For a constant, we can always simplify if we are a multiply
6027 or (for divide and modulus) if it is a multiple of our constant. */
6028 if (code == MULT_EXPR
6029 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6031 tree tem = const_binop (code, fold_convert (ctype, t),
6032 fold_convert (ctype, c));
6033 /* If the multiplication overflowed, we lost information on it.
6034 See PR68142 and PR69845. */
6035 if (TREE_OVERFLOW (tem))
6036 return NULL_TREE;
6037 return tem;
6039 break;
6041 CASE_CONVERT: case NON_LVALUE_EXPR:
6042 /* If op0 is an expression ... */
6043 if ((COMPARISON_CLASS_P (op0)
6044 || UNARY_CLASS_P (op0)
6045 || BINARY_CLASS_P (op0)
6046 || VL_EXP_CLASS_P (op0)
6047 || EXPRESSION_CLASS_P (op0))
6048 /* ... and has wrapping overflow, and its type is smaller
6049 than ctype, then we cannot pass through as widening. */
6050 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6051 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6052 && (TYPE_PRECISION (ctype)
6053 > TYPE_PRECISION (TREE_TYPE (op0))))
6054 /* ... or this is a truncation (t is narrower than op0),
6055 then we cannot pass through this narrowing. */
6056 || (TYPE_PRECISION (type)
6057 < TYPE_PRECISION (TREE_TYPE (op0)))
6058 /* ... or signedness changes for division or modulus,
6059 then we cannot pass through this conversion. */
6060 || (code != MULT_EXPR
6061 && (TYPE_UNSIGNED (ctype)
6062 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6063 /* ... or has undefined overflow while the converted to
6064 type has not, we cannot do the operation in the inner type
6065 as that would introduce undefined overflow. */
6066 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6067 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6068 && !TYPE_OVERFLOW_UNDEFINED (type))))
6069 break;
6071 /* Pass the constant down and see if we can make a simplification. If
6072 we can, replace this expression with the inner simplification for
6073 possible later conversion to our or some other type. */
6074 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6075 && TREE_CODE (t2) == INTEGER_CST
6076 && !TREE_OVERFLOW (t2)
6077 && (0 != (t1 = extract_muldiv (op0, t2, code,
6078 code == MULT_EXPR
6079 ? ctype : NULL_TREE,
6080 strict_overflow_p))))
6081 return t1;
6082 break;
6084 case ABS_EXPR:
6085 /* If widening the type changes it from signed to unsigned, then we
6086 must avoid building ABS_EXPR itself as unsigned. */
6087 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6089 tree cstype = (*signed_type_for) (ctype);
6090 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6091 != 0)
6093 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6094 return fold_convert (ctype, t1);
6096 break;
6098 /* If the constant is negative, we cannot simplify this. */
6099 if (tree_int_cst_sgn (c) == -1)
6100 break;
6101 /* FALLTHROUGH */
6102 case NEGATE_EXPR:
6103 /* For division and modulus, type can't be unsigned, as e.g.
6104 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6105 For signed types, even with wrapping overflow, this is fine. */
6106 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6107 break;
6108 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6109 != 0)
6110 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6111 break;
6113 case MIN_EXPR: case MAX_EXPR:
6114 /* If widening the type changes the signedness, then we can't perform
6115 this optimization as that changes the result. */
6116 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6117 break;
6119 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6120 sub_strict_overflow_p = false;
6121 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6122 &sub_strict_overflow_p)) != 0
6123 && (t2 = extract_muldiv (op1, c, code, wide_type,
6124 &sub_strict_overflow_p)) != 0)
6126 if (tree_int_cst_sgn (c) < 0)
6127 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6128 if (sub_strict_overflow_p)
6129 *strict_overflow_p = true;
6130 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6131 fold_convert (ctype, t2));
6133 break;
6135 case LSHIFT_EXPR: case RSHIFT_EXPR:
6136 /* If the second operand is constant, this is a multiplication
6137 or floor division, by a power of two, so we can treat it that
6138 way unless the multiplier or divisor overflows. Signed
6139 left-shift overflow is implementation-defined rather than
6140 undefined in C90, so do not convert signed left shift into
6141 multiplication. */
6142 if (TREE_CODE (op1) == INTEGER_CST
6143 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6144 /* const_binop may not detect overflow correctly,
6145 so check for it explicitly here. */
6146 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6147 && 0 != (t1 = fold_convert (ctype,
6148 const_binop (LSHIFT_EXPR,
6149 size_one_node,
6150 op1)))
6151 && !TREE_OVERFLOW (t1))
6152 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6153 ? MULT_EXPR : FLOOR_DIV_EXPR,
6154 ctype,
6155 fold_convert (ctype, op0),
6156 t1),
6157 c, code, wide_type, strict_overflow_p);
6158 break;
6160 case PLUS_EXPR: case MINUS_EXPR:
6161 /* See if we can eliminate the operation on both sides. If we can, we
6162 can return a new PLUS or MINUS. If we can't, the only remaining
6163 cases where we can do anything are if the second operand is a
6164 constant. */
6165 sub_strict_overflow_p = false;
6166 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6167 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6168 if (t1 != 0 && t2 != 0
6169 && (code == MULT_EXPR
6170 /* If not multiplication, we can only do this if both operands
6171 are divisible by c. */
6172 || (multiple_of_p (ctype, op0, c)
6173 && multiple_of_p (ctype, op1, c))))
6175 if (sub_strict_overflow_p)
6176 *strict_overflow_p = true;
6177 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6178 fold_convert (ctype, t2));
6181 /* If this was a subtraction, negate OP1 and set it to be an addition.
6182 This simplifies the logic below. */
6183 if (tcode == MINUS_EXPR)
6185 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6186 /* If OP1 was not easily negatable, the constant may be OP0. */
6187 if (TREE_CODE (op0) == INTEGER_CST)
6189 std::swap (op0, op1);
6190 std::swap (t1, t2);
6194 if (TREE_CODE (op1) != INTEGER_CST)
6195 break;
6197 /* If either OP1 or C are negative, this optimization is not safe for
6198 some of the division and remainder types while for others we need
6199 to change the code. */
6200 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6202 if (code == CEIL_DIV_EXPR)
6203 code = FLOOR_DIV_EXPR;
6204 else if (code == FLOOR_DIV_EXPR)
6205 code = CEIL_DIV_EXPR;
6206 else if (code != MULT_EXPR
6207 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6208 break;
6211 /* If it's a multiply or a division/modulus operation of a multiple
6212 of our constant, do the operation and verify it doesn't overflow. */
6213 if (code == MULT_EXPR
6214 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6216 op1 = const_binop (code, fold_convert (ctype, op1),
6217 fold_convert (ctype, c));
6218 /* We allow the constant to overflow with wrapping semantics. */
6219 if (op1 == 0
6220 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6221 break;
6223 else
6224 break;
6226 /* If we have an unsigned type, we cannot widen the operation since it
6227 will change the result if the original computation overflowed. */
6228 if (TYPE_UNSIGNED (ctype) && ctype != type)
6229 break;
6231 /* If we were able to eliminate our operation from the first side,
6232 apply our operation to the second side and reform the PLUS. */
6233 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6234 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6236 /* The last case is if we are a multiply. In that case, we can
6237 apply the distributive law to commute the multiply and addition
6238 if the multiplication of the constants doesn't overflow
6239 and overflow is defined. With undefined overflow
6240 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6241 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6242 return fold_build2 (tcode, ctype,
6243 fold_build2 (code, ctype,
6244 fold_convert (ctype, op0),
6245 fold_convert (ctype, c)),
6246 op1);
6248 break;
6250 case MULT_EXPR:
6251 /* We have a special case here if we are doing something like
6252 (C * 8) % 4 since we know that's zero. */
6253 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6254 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6255 /* If the multiplication can overflow we cannot optimize this. */
6256 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6257 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6258 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6260 *strict_overflow_p = true;
6261 return omit_one_operand (type, integer_zero_node, op0);
6264 /* ... fall through ... */
6266 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6267 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6268 /* If we can extract our operation from the LHS, do so and return a
6269 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6270 do something only if the second operand is a constant. */
6271 if (same_p
6272 && (t1 = extract_muldiv (op0, c, code, wide_type,
6273 strict_overflow_p)) != 0)
6274 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6275 fold_convert (ctype, op1));
6276 else if (tcode == MULT_EXPR && code == MULT_EXPR
6277 && (t1 = extract_muldiv (op1, c, code, wide_type,
6278 strict_overflow_p)) != 0)
6279 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6280 fold_convert (ctype, t1));
6281 else if (TREE_CODE (op1) != INTEGER_CST)
6282 return 0;
6284 /* If these are the same operation types, we can associate them
6285 assuming no overflow. */
6286 if (tcode == code)
6288 bool overflow_p = false;
6289 bool overflow_mul_p;
6290 signop sign = TYPE_SIGN (ctype);
6291 unsigned prec = TYPE_PRECISION (ctype);
6292 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6293 wi::to_wide (c, prec),
6294 sign, &overflow_mul_p);
6295 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6296 if (overflow_mul_p
6297 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6298 overflow_p = true;
6299 if (!overflow_p)
6300 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6301 wide_int_to_tree (ctype, mul));
6304 /* If these operations "cancel" each other, we have the main
6305 optimizations of this pass, which occur when either constant is a
6306 multiple of the other, in which case we replace this with either an
6307 operation or CODE or TCODE.
6309 If we have an unsigned type, we cannot do this since it will change
6310 the result if the original computation overflowed. */
6311 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6312 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6313 || (tcode == MULT_EXPR
6314 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6315 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6316 && code != MULT_EXPR)))
6318 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6320 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6321 *strict_overflow_p = true;
6322 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6323 fold_convert (ctype,
6324 const_binop (TRUNC_DIV_EXPR,
6325 op1, c)));
6327 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6329 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6330 *strict_overflow_p = true;
6331 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6332 fold_convert (ctype,
6333 const_binop (TRUNC_DIV_EXPR,
6334 c, op1)));
6337 break;
6339 default:
6340 break;
6343 return 0;
6346 /* Return a node which has the indicated constant VALUE (either 0 or
6347 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6348 and is of the indicated TYPE. */
6350 tree
6351 constant_boolean_node (bool value, tree type)
6353 if (type == integer_type_node)
6354 return value ? integer_one_node : integer_zero_node;
6355 else if (type == boolean_type_node)
6356 return value ? boolean_true_node : boolean_false_node;
6357 else if (TREE_CODE (type) == VECTOR_TYPE)
6358 return build_vector_from_val (type,
6359 build_int_cst (TREE_TYPE (type),
6360 value ? -1 : 0));
6361 else
6362 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6366 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6367 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6368 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6369 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6370 COND is the first argument to CODE; otherwise (as in the example
6371 given here), it is the second argument. TYPE is the type of the
6372 original expression. Return NULL_TREE if no simplification is
6373 possible. */
6375 static tree
6376 fold_binary_op_with_conditional_arg (location_t loc,
6377 enum tree_code code,
6378 tree type, tree op0, tree op1,
6379 tree cond, tree arg, int cond_first_p)
6381 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6382 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6383 tree test, true_value, false_value;
6384 tree lhs = NULL_TREE;
6385 tree rhs = NULL_TREE;
6386 enum tree_code cond_code = COND_EXPR;
6388 if (TREE_CODE (cond) == COND_EXPR
6389 || TREE_CODE (cond) == VEC_COND_EXPR)
6391 test = TREE_OPERAND (cond, 0);
6392 true_value = TREE_OPERAND (cond, 1);
6393 false_value = TREE_OPERAND (cond, 2);
6394 /* If this operand throws an expression, then it does not make
6395 sense to try to perform a logical or arithmetic operation
6396 involving it. */
6397 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6398 lhs = true_value;
6399 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6400 rhs = false_value;
6402 else if (!(TREE_CODE (type) != VECTOR_TYPE
6403 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6405 tree testtype = TREE_TYPE (cond);
6406 test = cond;
6407 true_value = constant_boolean_node (true, testtype);
6408 false_value = constant_boolean_node (false, testtype);
6410 else
6411 /* Detect the case of mixing vector and scalar types - bail out. */
6412 return NULL_TREE;
6414 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6415 cond_code = VEC_COND_EXPR;
6417 /* This transformation is only worthwhile if we don't have to wrap ARG
6418 in a SAVE_EXPR and the operation can be simplified without recursing
6419 on at least one of the branches once its pushed inside the COND_EXPR. */
6420 if (!TREE_CONSTANT (arg)
6421 && (TREE_SIDE_EFFECTS (arg)
6422 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6423 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6424 return NULL_TREE;
6426 arg = fold_convert_loc (loc, arg_type, arg);
6427 if (lhs == 0)
6429 true_value = fold_convert_loc (loc, cond_type, true_value);
6430 if (cond_first_p)
6431 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6432 else
6433 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6435 if (rhs == 0)
6437 false_value = fold_convert_loc (loc, cond_type, false_value);
6438 if (cond_first_p)
6439 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6440 else
6441 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6444 /* Check that we have simplified at least one of the branches. */
6445 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6446 return NULL_TREE;
6448 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6452 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6454 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6455 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6456 ADDEND is the same as X.
6458 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6459 and finite. The problematic cases are when X is zero, and its mode
6460 has signed zeros. In the case of rounding towards -infinity,
6461 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6462 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6464 bool
6465 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6467 if (!real_zerop (addend))
6468 return false;
6470 /* Don't allow the fold with -fsignaling-nans. */
6471 if (HONOR_SNANS (element_mode (type)))
6472 return false;
6474 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6475 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6476 return true;
6478 /* In a vector or complex, we would need to check the sign of all zeros. */
6479 if (TREE_CODE (addend) != REAL_CST)
6480 return false;
6482 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6483 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6484 negate = !negate;
6486 /* The mode has signed zeros, and we have to honor their sign.
6487 In this situation, there is only one case we can return true for.
6488 X - 0 is the same as X unless rounding towards -infinity is
6489 supported. */
6490 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6493 /* Subroutine of fold() that optimizes comparisons of a division by
6494 a nonzero integer constant against an integer constant, i.e.
6495 X/C1 op C2.
6497 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6498 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6499 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6501 The function returns the constant folded tree if a simplification
6502 can be made, and NULL_TREE otherwise. */
6504 static tree
6505 fold_div_compare (location_t loc,
6506 enum tree_code code, tree type, tree arg0, tree arg1)
6508 tree prod, tmp, hi, lo;
6509 tree arg00 = TREE_OPERAND (arg0, 0);
6510 tree arg01 = TREE_OPERAND (arg0, 1);
6511 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6512 bool neg_overflow = false;
6513 bool overflow;
6515 /* We have to do this the hard way to detect unsigned overflow.
6516 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6517 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6518 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6519 neg_overflow = false;
6521 if (sign == UNSIGNED)
6523 tmp = int_const_binop (MINUS_EXPR, arg01,
6524 build_int_cst (TREE_TYPE (arg01), 1));
6525 lo = prod;
6527 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6528 val = wi::add (prod, tmp, sign, &overflow);
6529 hi = force_fit_type (TREE_TYPE (arg00), val,
6530 -1, overflow | TREE_OVERFLOW (prod));
6532 else if (tree_int_cst_sgn (arg01) >= 0)
6534 tmp = int_const_binop (MINUS_EXPR, arg01,
6535 build_int_cst (TREE_TYPE (arg01), 1));
6536 switch (tree_int_cst_sgn (arg1))
6538 case -1:
6539 neg_overflow = true;
6540 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6541 hi = prod;
6542 break;
6544 case 0:
6545 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6546 hi = tmp;
6547 break;
6549 case 1:
6550 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6551 lo = prod;
6552 break;
6554 default:
6555 gcc_unreachable ();
6558 else
6560 /* A negative divisor reverses the relational operators. */
6561 code = swap_tree_comparison (code);
6563 tmp = int_const_binop (PLUS_EXPR, arg01,
6564 build_int_cst (TREE_TYPE (arg01), 1));
6565 switch (tree_int_cst_sgn (arg1))
6567 case -1:
6568 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6569 lo = prod;
6570 break;
6572 case 0:
6573 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6574 lo = tmp;
6575 break;
6577 case 1:
6578 neg_overflow = true;
6579 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6580 hi = prod;
6581 break;
6583 default:
6584 gcc_unreachable ();
6588 switch (code)
6590 case EQ_EXPR:
6591 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6592 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6593 if (TREE_OVERFLOW (hi))
6594 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6595 if (TREE_OVERFLOW (lo))
6596 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6597 return build_range_check (loc, type, arg00, 1, lo, hi);
6599 case NE_EXPR:
6600 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6601 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6602 if (TREE_OVERFLOW (hi))
6603 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6604 if (TREE_OVERFLOW (lo))
6605 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6606 return build_range_check (loc, type, arg00, 0, lo, hi);
6608 case LT_EXPR:
6609 if (TREE_OVERFLOW (lo))
6611 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6612 return omit_one_operand_loc (loc, type, tmp, arg00);
6614 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6616 case LE_EXPR:
6617 if (TREE_OVERFLOW (hi))
6619 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6620 return omit_one_operand_loc (loc, type, tmp, arg00);
6622 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6624 case GT_EXPR:
6625 if (TREE_OVERFLOW (hi))
6627 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6628 return omit_one_operand_loc (loc, type, tmp, arg00);
6630 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6632 case GE_EXPR:
6633 if (TREE_OVERFLOW (lo))
6635 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6636 return omit_one_operand_loc (loc, type, tmp, arg00);
6638 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6640 default:
6641 break;
6644 return NULL_TREE;
6648 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6649 equality/inequality test, then return a simplified form of the test
6650 using a sign testing. Otherwise return NULL. TYPE is the desired
6651 result type. */
6653 static tree
6654 fold_single_bit_test_into_sign_test (location_t loc,
6655 enum tree_code code, tree arg0, tree arg1,
6656 tree result_type)
6658 /* If this is testing a single bit, we can optimize the test. */
6659 if ((code == NE_EXPR || code == EQ_EXPR)
6660 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6661 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6663 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6664 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6665 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6667 if (arg00 != NULL_TREE
6668 /* This is only a win if casting to a signed type is cheap,
6669 i.e. when arg00's type is not a partial mode. */
6670 && TYPE_PRECISION (TREE_TYPE (arg00))
6671 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6673 tree stype = signed_type_for (TREE_TYPE (arg00));
6674 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6675 result_type,
6676 fold_convert_loc (loc, stype, arg00),
6677 build_int_cst (stype, 0));
6681 return NULL_TREE;
6684 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6685 equality/inequality test, then return a simplified form of
6686 the test using shifts and logical operations. Otherwise return
6687 NULL. TYPE is the desired result type. */
6689 tree
6690 fold_single_bit_test (location_t loc, enum tree_code code,
6691 tree arg0, tree arg1, tree result_type)
6693 /* If this is testing a single bit, we can optimize the test. */
6694 if ((code == NE_EXPR || code == EQ_EXPR)
6695 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6696 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6698 tree inner = TREE_OPERAND (arg0, 0);
6699 tree type = TREE_TYPE (arg0);
6700 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6701 machine_mode operand_mode = TYPE_MODE (type);
6702 int ops_unsigned;
6703 tree signed_type, unsigned_type, intermediate_type;
6704 tree tem, one;
6706 /* First, see if we can fold the single bit test into a sign-bit
6707 test. */
6708 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6709 result_type);
6710 if (tem)
6711 return tem;
6713 /* Otherwise we have (A & C) != 0 where C is a single bit,
6714 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6715 Similarly for (A & C) == 0. */
6717 /* If INNER is a right shift of a constant and it plus BITNUM does
6718 not overflow, adjust BITNUM and INNER. */
6719 if (TREE_CODE (inner) == RSHIFT_EXPR
6720 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6721 && bitnum < TYPE_PRECISION (type)
6722 && wi::ltu_p (TREE_OPERAND (inner, 1),
6723 TYPE_PRECISION (type) - bitnum))
6725 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6726 inner = TREE_OPERAND (inner, 0);
6729 /* If we are going to be able to omit the AND below, we must do our
6730 operations as unsigned. If we must use the AND, we have a choice.
6731 Normally unsigned is faster, but for some machines signed is. */
6732 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6733 && !flag_syntax_only) ? 0 : 1;
6735 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6736 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6737 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6738 inner = fold_convert_loc (loc, intermediate_type, inner);
6740 if (bitnum != 0)
6741 inner = build2 (RSHIFT_EXPR, intermediate_type,
6742 inner, size_int (bitnum));
6744 one = build_int_cst (intermediate_type, 1);
6746 if (code == EQ_EXPR)
6747 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6749 /* Put the AND last so it can combine with more things. */
6750 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6752 /* Make sure to return the proper type. */
6753 inner = fold_convert_loc (loc, result_type, inner);
6755 return inner;
6757 return NULL_TREE;
6760 /* Test whether it is preferable two swap two operands, ARG0 and
6761 ARG1, for example because ARG0 is an integer constant and ARG1
6762 isn't. */
6764 bool
6765 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6767 if (CONSTANT_CLASS_P (arg1))
6768 return 0;
6769 if (CONSTANT_CLASS_P (arg0))
6770 return 1;
6772 STRIP_NOPS (arg0);
6773 STRIP_NOPS (arg1);
6775 if (TREE_CONSTANT (arg1))
6776 return 0;
6777 if (TREE_CONSTANT (arg0))
6778 return 1;
6780 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6781 for commutative and comparison operators. Ensuring a canonical
6782 form allows the optimizers to find additional redundancies without
6783 having to explicitly check for both orderings. */
6784 if (TREE_CODE (arg0) == SSA_NAME
6785 && TREE_CODE (arg1) == SSA_NAME
6786 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6787 return 1;
6789 /* Put SSA_NAMEs last. */
6790 if (TREE_CODE (arg1) == SSA_NAME)
6791 return 0;
6792 if (TREE_CODE (arg0) == SSA_NAME)
6793 return 1;
6795 /* Put variables last. */
6796 if (DECL_P (arg1))
6797 return 0;
6798 if (DECL_P (arg0))
6799 return 1;
6801 return 0;
6805 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6806 means A >= Y && A != MAX, but in this case we know that
6807 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6809 static tree
6810 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6812 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6814 if (TREE_CODE (bound) == LT_EXPR)
6815 a = TREE_OPERAND (bound, 0);
6816 else if (TREE_CODE (bound) == GT_EXPR)
6817 a = TREE_OPERAND (bound, 1);
6818 else
6819 return NULL_TREE;
6821 typea = TREE_TYPE (a);
6822 if (!INTEGRAL_TYPE_P (typea)
6823 && !POINTER_TYPE_P (typea))
6824 return NULL_TREE;
6826 if (TREE_CODE (ineq) == LT_EXPR)
6828 a1 = TREE_OPERAND (ineq, 1);
6829 y = TREE_OPERAND (ineq, 0);
6831 else if (TREE_CODE (ineq) == GT_EXPR)
6833 a1 = TREE_OPERAND (ineq, 0);
6834 y = TREE_OPERAND (ineq, 1);
6836 else
6837 return NULL_TREE;
6839 if (TREE_TYPE (a1) != typea)
6840 return NULL_TREE;
6842 if (POINTER_TYPE_P (typea))
6844 /* Convert the pointer types into integer before taking the difference. */
6845 tree ta = fold_convert_loc (loc, ssizetype, a);
6846 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6847 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6849 else
6850 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6852 if (!diff || !integer_onep (diff))
6853 return NULL_TREE;
6855 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6858 /* Fold a sum or difference of at least one multiplication.
6859 Returns the folded tree or NULL if no simplification could be made. */
6861 static tree
6862 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6863 tree arg0, tree arg1)
6865 tree arg00, arg01, arg10, arg11;
6866 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6868 /* (A * C) +- (B * C) -> (A+-B) * C.
6869 (A * C) +- A -> A * (C+-1).
6870 We are most concerned about the case where C is a constant,
6871 but other combinations show up during loop reduction. Since
6872 it is not difficult, try all four possibilities. */
6874 if (TREE_CODE (arg0) == MULT_EXPR)
6876 arg00 = TREE_OPERAND (arg0, 0);
6877 arg01 = TREE_OPERAND (arg0, 1);
6879 else if (TREE_CODE (arg0) == INTEGER_CST)
6881 arg00 = build_one_cst (type);
6882 arg01 = arg0;
6884 else
6886 /* We cannot generate constant 1 for fract. */
6887 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6888 return NULL_TREE;
6889 arg00 = arg0;
6890 arg01 = build_one_cst (type);
6892 if (TREE_CODE (arg1) == MULT_EXPR)
6894 arg10 = TREE_OPERAND (arg1, 0);
6895 arg11 = TREE_OPERAND (arg1, 1);
6897 else if (TREE_CODE (arg1) == INTEGER_CST)
6899 arg10 = build_one_cst (type);
6900 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6901 the purpose of this canonicalization. */
6902 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6903 && negate_expr_p (arg1)
6904 && code == PLUS_EXPR)
6906 arg11 = negate_expr (arg1);
6907 code = MINUS_EXPR;
6909 else
6910 arg11 = arg1;
6912 else
6914 /* We cannot generate constant 1 for fract. */
6915 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6916 return NULL_TREE;
6917 arg10 = arg1;
6918 arg11 = build_one_cst (type);
6920 same = NULL_TREE;
6922 if (operand_equal_p (arg01, arg11, 0))
6923 same = arg01, alt0 = arg00, alt1 = arg10;
6924 else if (operand_equal_p (arg00, arg10, 0))
6925 same = arg00, alt0 = arg01, alt1 = arg11;
6926 else if (operand_equal_p (arg00, arg11, 0))
6927 same = arg00, alt0 = arg01, alt1 = arg10;
6928 else if (operand_equal_p (arg01, arg10, 0))
6929 same = arg01, alt0 = arg00, alt1 = arg11;
6931 /* No identical multiplicands; see if we can find a common
6932 power-of-two factor in non-power-of-two multiplies. This
6933 can help in multi-dimensional array access. */
6934 else if (tree_fits_shwi_p (arg01)
6935 && tree_fits_shwi_p (arg11))
6937 HOST_WIDE_INT int01, int11, tmp;
6938 bool swap = false;
6939 tree maybe_same;
6940 int01 = tree_to_shwi (arg01);
6941 int11 = tree_to_shwi (arg11);
6943 /* Move min of absolute values to int11. */
6944 if (absu_hwi (int01) < absu_hwi (int11))
6946 tmp = int01, int01 = int11, int11 = tmp;
6947 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6948 maybe_same = arg01;
6949 swap = true;
6951 else
6952 maybe_same = arg11;
6954 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6955 /* The remainder should not be a constant, otherwise we
6956 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6957 increased the number of multiplications necessary. */
6958 && TREE_CODE (arg10) != INTEGER_CST)
6960 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6961 build_int_cst (TREE_TYPE (arg00),
6962 int01 / int11));
6963 alt1 = arg10;
6964 same = maybe_same;
6965 if (swap)
6966 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6970 if (same)
6971 return fold_build2_loc (loc, MULT_EXPR, type,
6972 fold_build2_loc (loc, code, type,
6973 fold_convert_loc (loc, type, alt0),
6974 fold_convert_loc (loc, type, alt1)),
6975 fold_convert_loc (loc, type, same));
6977 return NULL_TREE;
6980 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6981 specified by EXPR into the buffer PTR of length LEN bytes.
6982 Return the number of bytes placed in the buffer, or zero
6983 upon failure. */
6985 static int
6986 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6988 tree type = TREE_TYPE (expr);
6989 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6990 int byte, offset, word, words;
6991 unsigned char value;
6993 if ((off == -1 && total_bytes > len)
6994 || off >= total_bytes)
6995 return 0;
6996 if (off == -1)
6997 off = 0;
6998 words = total_bytes / UNITS_PER_WORD;
7000 for (byte = 0; byte < total_bytes; byte++)
7002 int bitpos = byte * BITS_PER_UNIT;
7003 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7004 number of bytes. */
7005 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7007 if (total_bytes > UNITS_PER_WORD)
7009 word = byte / UNITS_PER_WORD;
7010 if (WORDS_BIG_ENDIAN)
7011 word = (words - 1) - word;
7012 offset = word * UNITS_PER_WORD;
7013 if (BYTES_BIG_ENDIAN)
7014 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7015 else
7016 offset += byte % UNITS_PER_WORD;
7018 else
7019 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7020 if (offset >= off
7021 && offset - off < len)
7022 ptr[offset - off] = value;
7024 return MIN (len, total_bytes - off);
7028 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7029 specified by EXPR into the buffer PTR of length LEN bytes.
7030 Return the number of bytes placed in the buffer, or zero
7031 upon failure. */
7033 static int
7034 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7036 tree type = TREE_TYPE (expr);
7037 machine_mode mode = TYPE_MODE (type);
7038 int total_bytes = GET_MODE_SIZE (mode);
7039 FIXED_VALUE_TYPE value;
7040 tree i_value, i_type;
7042 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7043 return 0;
7045 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7047 if (NULL_TREE == i_type
7048 || TYPE_PRECISION (i_type) != total_bytes)
7049 return 0;
7051 value = TREE_FIXED_CST (expr);
7052 i_value = double_int_to_tree (i_type, value.data);
7054 return native_encode_int (i_value, ptr, len, off);
7058 /* Subroutine of native_encode_expr. Encode the REAL_CST
7059 specified by EXPR into the buffer PTR of length LEN bytes.
7060 Return the number of bytes placed in the buffer, or zero
7061 upon failure. */
7063 static int
7064 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7066 tree type = TREE_TYPE (expr);
7067 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7068 int byte, offset, word, words, bitpos;
7069 unsigned char value;
7071 /* There are always 32 bits in each long, no matter the size of
7072 the hosts long. We handle floating point representations with
7073 up to 192 bits. */
7074 long tmp[6];
7076 if ((off == -1 && total_bytes > len)
7077 || off >= total_bytes)
7078 return 0;
7079 if (off == -1)
7080 off = 0;
7081 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7083 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7085 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7086 bitpos += BITS_PER_UNIT)
7088 byte = (bitpos / BITS_PER_UNIT) & 3;
7089 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7091 if (UNITS_PER_WORD < 4)
7093 word = byte / UNITS_PER_WORD;
7094 if (WORDS_BIG_ENDIAN)
7095 word = (words - 1) - word;
7096 offset = word * UNITS_PER_WORD;
7097 if (BYTES_BIG_ENDIAN)
7098 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7099 else
7100 offset += byte % UNITS_PER_WORD;
7102 else
7104 offset = byte;
7105 if (BYTES_BIG_ENDIAN)
7107 /* Reverse bytes within each long, or within the entire float
7108 if it's smaller than a long (for HFmode). */
7109 offset = MIN (3, total_bytes - 1) - offset;
7110 gcc_assert (offset >= 0);
7113 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7114 if (offset >= off
7115 && offset - off < len)
7116 ptr[offset - off] = value;
7118 return MIN (len, total_bytes - off);
7121 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7122 specified by EXPR into the buffer PTR of length LEN bytes.
7123 Return the number of bytes placed in the buffer, or zero
7124 upon failure. */
7126 static int
7127 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7129 int rsize, isize;
7130 tree part;
7132 part = TREE_REALPART (expr);
7133 rsize = native_encode_expr (part, ptr, len, off);
7134 if (off == -1
7135 && rsize == 0)
7136 return 0;
7137 part = TREE_IMAGPART (expr);
7138 if (off != -1)
7139 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7140 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7141 if (off == -1
7142 && isize != rsize)
7143 return 0;
7144 return rsize + isize;
7148 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7149 specified by EXPR into the buffer PTR of length LEN bytes.
7150 Return the number of bytes placed in the buffer, or zero
7151 upon failure. */
7153 static int
7154 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7156 unsigned i, count;
7157 int size, offset;
7158 tree itype, elem;
7160 offset = 0;
7161 count = VECTOR_CST_NELTS (expr);
7162 itype = TREE_TYPE (TREE_TYPE (expr));
7163 size = GET_MODE_SIZE (TYPE_MODE (itype));
7164 for (i = 0; i < count; i++)
7166 if (off >= size)
7168 off -= size;
7169 continue;
7171 elem = VECTOR_CST_ELT (expr, i);
7172 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7173 if ((off == -1 && res != size)
7174 || res == 0)
7175 return 0;
7176 offset += res;
7177 if (offset >= len)
7178 return offset;
7179 if (off != -1)
7180 off = 0;
7182 return offset;
7186 /* Subroutine of native_encode_expr. Encode the STRING_CST
7187 specified by EXPR into the buffer PTR of length LEN bytes.
7188 Return the number of bytes placed in the buffer, or zero
7189 upon failure. */
7191 static int
7192 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7194 tree type = TREE_TYPE (expr);
7195 HOST_WIDE_INT total_bytes;
7197 if (TREE_CODE (type) != ARRAY_TYPE
7198 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7199 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7200 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7201 return 0;
7202 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7203 if ((off == -1 && total_bytes > len)
7204 || off >= total_bytes)
7205 return 0;
7206 if (off == -1)
7207 off = 0;
7208 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7210 int written = 0;
7211 if (off < TREE_STRING_LENGTH (expr))
7213 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7214 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7216 memset (ptr + written, 0,
7217 MIN (total_bytes - written, len - written));
7219 else
7220 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7221 return MIN (total_bytes - off, len);
7225 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7226 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7227 buffer PTR of length LEN bytes. If OFF is not -1 then start
7228 the encoding at byte offset OFF and encode at most LEN bytes.
7229 Return the number of bytes placed in the buffer, or zero upon failure. */
7232 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7234 /* We don't support starting at negative offset and -1 is special. */
7235 if (off < -1)
7236 return 0;
7238 switch (TREE_CODE (expr))
7240 case INTEGER_CST:
7241 return native_encode_int (expr, ptr, len, off);
7243 case REAL_CST:
7244 return native_encode_real (expr, ptr, len, off);
7246 case FIXED_CST:
7247 return native_encode_fixed (expr, ptr, len, off);
7249 case COMPLEX_CST:
7250 return native_encode_complex (expr, ptr, len, off);
7252 case VECTOR_CST:
7253 return native_encode_vector (expr, ptr, len, off);
7255 case STRING_CST:
7256 return native_encode_string (expr, ptr, len, off);
7258 default:
7259 return 0;
7264 /* Subroutine of native_interpret_expr. Interpret the contents of
7265 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7266 If the buffer cannot be interpreted, return NULL_TREE. */
7268 static tree
7269 native_interpret_int (tree type, const unsigned char *ptr, int len)
7271 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7273 if (total_bytes > len
7274 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7275 return NULL_TREE;
7277 wide_int result = wi::from_buffer (ptr, total_bytes);
7279 return wide_int_to_tree (type, result);
7283 /* Subroutine of native_interpret_expr. Interpret the contents of
7284 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7285 If the buffer cannot be interpreted, return NULL_TREE. */
7287 static tree
7288 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7290 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7291 double_int result;
7292 FIXED_VALUE_TYPE fixed_value;
7294 if (total_bytes > len
7295 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7296 return NULL_TREE;
7298 result = double_int::from_buffer (ptr, total_bytes);
7299 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7301 return build_fixed (type, fixed_value);
7305 /* Subroutine of native_interpret_expr. Interpret the contents of
7306 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7307 If the buffer cannot be interpreted, return NULL_TREE. */
7309 static tree
7310 native_interpret_real (tree type, const unsigned char *ptr, int len)
7312 machine_mode mode = TYPE_MODE (type);
7313 int total_bytes = GET_MODE_SIZE (mode);
7314 unsigned char value;
7315 /* There are always 32 bits in each long, no matter the size of
7316 the hosts long. We handle floating point representations with
7317 up to 192 bits. */
7318 REAL_VALUE_TYPE r;
7319 long tmp[6];
7321 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7322 if (total_bytes > len || total_bytes > 24)
7323 return NULL_TREE;
7324 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7326 memset (tmp, 0, sizeof (tmp));
7327 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7328 bitpos += BITS_PER_UNIT)
7330 /* Both OFFSET and BYTE index within a long;
7331 bitpos indexes the whole float. */
7332 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7333 if (UNITS_PER_WORD < 4)
7335 int word = byte / UNITS_PER_WORD;
7336 if (WORDS_BIG_ENDIAN)
7337 word = (words - 1) - word;
7338 offset = word * UNITS_PER_WORD;
7339 if (BYTES_BIG_ENDIAN)
7340 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7341 else
7342 offset += byte % UNITS_PER_WORD;
7344 else
7346 offset = byte;
7347 if (BYTES_BIG_ENDIAN)
7349 /* Reverse bytes within each long, or within the entire float
7350 if it's smaller than a long (for HFmode). */
7351 offset = MIN (3, total_bytes - 1) - offset;
7352 gcc_assert (offset >= 0);
7355 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7357 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7360 real_from_target (&r, tmp, mode);
7361 return build_real (type, r);
7365 /* Subroutine of native_interpret_expr. Interpret the contents of
7366 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7367 If the buffer cannot be interpreted, return NULL_TREE. */
7369 static tree
7370 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7372 tree etype, rpart, ipart;
7373 int size;
7375 etype = TREE_TYPE (type);
7376 size = GET_MODE_SIZE (TYPE_MODE (etype));
7377 if (size * 2 > len)
7378 return NULL_TREE;
7379 rpart = native_interpret_expr (etype, ptr, size);
7380 if (!rpart)
7381 return NULL_TREE;
7382 ipart = native_interpret_expr (etype, ptr+size, size);
7383 if (!ipart)
7384 return NULL_TREE;
7385 return build_complex (type, rpart, ipart);
7389 /* Subroutine of native_interpret_expr. Interpret the contents of
7390 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7391 If the buffer cannot be interpreted, return NULL_TREE. */
7393 static tree
7394 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7396 tree etype, elem;
7397 int i, size, count;
7398 tree *elements;
7400 etype = TREE_TYPE (type);
7401 size = GET_MODE_SIZE (TYPE_MODE (etype));
7402 count = TYPE_VECTOR_SUBPARTS (type);
7403 if (size * count > len)
7404 return NULL_TREE;
7406 elements = XALLOCAVEC (tree, count);
7407 for (i = count - 1; i >= 0; i--)
7409 elem = native_interpret_expr (etype, ptr+(i*size), size);
7410 if (!elem)
7411 return NULL_TREE;
7412 elements[i] = elem;
7414 return build_vector (type, elements);
7418 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7419 the buffer PTR of length LEN as a constant of type TYPE. For
7420 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7421 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7422 return NULL_TREE. */
7424 tree
7425 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7427 switch (TREE_CODE (type))
7429 case INTEGER_TYPE:
7430 case ENUMERAL_TYPE:
7431 case BOOLEAN_TYPE:
7432 case POINTER_TYPE:
7433 case REFERENCE_TYPE:
7434 return native_interpret_int (type, ptr, len);
7436 case REAL_TYPE:
7437 return native_interpret_real (type, ptr, len);
7439 case FIXED_POINT_TYPE:
7440 return native_interpret_fixed (type, ptr, len);
7442 case COMPLEX_TYPE:
7443 return native_interpret_complex (type, ptr, len);
7445 case VECTOR_TYPE:
7446 return native_interpret_vector (type, ptr, len);
7448 default:
7449 return NULL_TREE;
7453 /* Returns true if we can interpret the contents of a native encoding
7454 as TYPE. */
7456 static bool
7457 can_native_interpret_type_p (tree type)
7459 switch (TREE_CODE (type))
7461 case INTEGER_TYPE:
7462 case ENUMERAL_TYPE:
7463 case BOOLEAN_TYPE:
7464 case POINTER_TYPE:
7465 case REFERENCE_TYPE:
7466 case FIXED_POINT_TYPE:
7467 case REAL_TYPE:
7468 case COMPLEX_TYPE:
7469 case VECTOR_TYPE:
7470 return true;
7471 default:
7472 return false;
7476 /* Return true iff a constant of type TYPE is accepted by
7477 native_encode_expr. */
7479 bool
7480 can_native_encode_type_p (tree type)
7482 switch (TREE_CODE (type))
7484 case INTEGER_TYPE:
7485 case REAL_TYPE:
7486 case FIXED_POINT_TYPE:
7487 case COMPLEX_TYPE:
7488 case VECTOR_TYPE:
7489 case POINTER_TYPE:
7490 return true;
7491 default:
7492 return false;
7496 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7497 TYPE at compile-time. If we're unable to perform the conversion
7498 return NULL_TREE. */
7500 static tree
7501 fold_view_convert_expr (tree type, tree expr)
7503 /* We support up to 512-bit values (for V8DFmode). */
7504 unsigned char buffer[64];
7505 int len;
7507 /* Check that the host and target are sane. */
7508 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7509 return NULL_TREE;
7511 len = native_encode_expr (expr, buffer, sizeof (buffer));
7512 if (len == 0)
7513 return NULL_TREE;
7515 return native_interpret_expr (type, buffer, len);
7518 /* Build an expression for the address of T. Folds away INDIRECT_REF
7519 to avoid confusing the gimplify process. */
7521 tree
7522 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7524 /* The size of the object is not relevant when talking about its address. */
7525 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7526 t = TREE_OPERAND (t, 0);
7528 if (TREE_CODE (t) == INDIRECT_REF)
7530 t = TREE_OPERAND (t, 0);
7532 if (TREE_TYPE (t) != ptrtype)
7533 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7535 else if (TREE_CODE (t) == MEM_REF
7536 && integer_zerop (TREE_OPERAND (t, 1)))
7537 return TREE_OPERAND (t, 0);
7538 else if (TREE_CODE (t) == MEM_REF
7539 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7540 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7541 TREE_OPERAND (t, 0),
7542 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7543 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7545 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7547 if (TREE_TYPE (t) != ptrtype)
7548 t = fold_convert_loc (loc, ptrtype, t);
7550 else
7551 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7553 return t;
7556 /* Build an expression for the address of T. */
7558 tree
7559 build_fold_addr_expr_loc (location_t loc, tree t)
7561 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7563 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7566 /* Fold a unary expression of code CODE and type TYPE with operand
7567 OP0. Return the folded expression if folding is successful.
7568 Otherwise, return NULL_TREE. */
7570 tree
7571 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7573 tree tem;
7574 tree arg0;
7575 enum tree_code_class kind = TREE_CODE_CLASS (code);
7577 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7578 && TREE_CODE_LENGTH (code) == 1);
7580 arg0 = op0;
7581 if (arg0)
7583 if (CONVERT_EXPR_CODE_P (code)
7584 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7586 /* Don't use STRIP_NOPS, because signedness of argument type
7587 matters. */
7588 STRIP_SIGN_NOPS (arg0);
7590 else
7592 /* Strip any conversions that don't change the mode. This
7593 is safe for every expression, except for a comparison
7594 expression because its signedness is derived from its
7595 operands.
7597 Note that this is done as an internal manipulation within
7598 the constant folder, in order to find the simplest
7599 representation of the arguments so that their form can be
7600 studied. In any cases, the appropriate type conversions
7601 should be put back in the tree that will get out of the
7602 constant folder. */
7603 STRIP_NOPS (arg0);
7606 if (CONSTANT_CLASS_P (arg0))
7608 tree tem = const_unop (code, type, arg0);
7609 if (tem)
7611 if (TREE_TYPE (tem) != type)
7612 tem = fold_convert_loc (loc, type, tem);
7613 return tem;
7618 tem = generic_simplify (loc, code, type, op0);
7619 if (tem)
7620 return tem;
7622 if (TREE_CODE_CLASS (code) == tcc_unary)
7624 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7625 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7626 fold_build1_loc (loc, code, type,
7627 fold_convert_loc (loc, TREE_TYPE (op0),
7628 TREE_OPERAND (arg0, 1))));
7629 else if (TREE_CODE (arg0) == COND_EXPR)
7631 tree arg01 = TREE_OPERAND (arg0, 1);
7632 tree arg02 = TREE_OPERAND (arg0, 2);
7633 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7634 arg01 = fold_build1_loc (loc, code, type,
7635 fold_convert_loc (loc,
7636 TREE_TYPE (op0), arg01));
7637 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7638 arg02 = fold_build1_loc (loc, code, type,
7639 fold_convert_loc (loc,
7640 TREE_TYPE (op0), arg02));
7641 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7642 arg01, arg02);
7644 /* If this was a conversion, and all we did was to move into
7645 inside the COND_EXPR, bring it back out. But leave it if
7646 it is a conversion from integer to integer and the
7647 result precision is no wider than a word since such a
7648 conversion is cheap and may be optimized away by combine,
7649 while it couldn't if it were outside the COND_EXPR. Then return
7650 so we don't get into an infinite recursion loop taking the
7651 conversion out and then back in. */
7653 if ((CONVERT_EXPR_CODE_P (code)
7654 || code == NON_LVALUE_EXPR)
7655 && TREE_CODE (tem) == COND_EXPR
7656 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7657 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7658 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7659 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7660 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7661 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7662 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7663 && (INTEGRAL_TYPE_P
7664 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7665 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7666 || flag_syntax_only))
7667 tem = build1_loc (loc, code, type,
7668 build3 (COND_EXPR,
7669 TREE_TYPE (TREE_OPERAND
7670 (TREE_OPERAND (tem, 1), 0)),
7671 TREE_OPERAND (tem, 0),
7672 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7673 TREE_OPERAND (TREE_OPERAND (tem, 2),
7674 0)));
7675 return tem;
7679 switch (code)
7681 case NON_LVALUE_EXPR:
7682 if (!maybe_lvalue_p (op0))
7683 return fold_convert_loc (loc, type, op0);
7684 return NULL_TREE;
7686 CASE_CONVERT:
7687 case FLOAT_EXPR:
7688 case FIX_TRUNC_EXPR:
7689 if (COMPARISON_CLASS_P (op0))
7691 /* If we have (type) (a CMP b) and type is an integral type, return
7692 new expression involving the new type. Canonicalize
7693 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7694 non-integral type.
7695 Do not fold the result as that would not simplify further, also
7696 folding again results in recursions. */
7697 if (TREE_CODE (type) == BOOLEAN_TYPE)
7698 return build2_loc (loc, TREE_CODE (op0), type,
7699 TREE_OPERAND (op0, 0),
7700 TREE_OPERAND (op0, 1));
7701 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7702 && TREE_CODE (type) != VECTOR_TYPE)
7703 return build3_loc (loc, COND_EXPR, type, op0,
7704 constant_boolean_node (true, type),
7705 constant_boolean_node (false, type));
7708 /* Handle (T *)&A.B.C for A being of type T and B and C
7709 living at offset zero. This occurs frequently in
7710 C++ upcasting and then accessing the base. */
7711 if (TREE_CODE (op0) == ADDR_EXPR
7712 && POINTER_TYPE_P (type)
7713 && handled_component_p (TREE_OPERAND (op0, 0)))
7715 HOST_WIDE_INT bitsize, bitpos;
7716 tree offset;
7717 machine_mode mode;
7718 int unsignedp, reversep, volatilep;
7719 tree base
7720 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7721 &offset, &mode, &unsignedp, &reversep,
7722 &volatilep);
7723 /* If the reference was to a (constant) zero offset, we can use
7724 the address of the base if it has the same base type
7725 as the result type and the pointer type is unqualified. */
7726 if (! offset && bitpos == 0
7727 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7728 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7729 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7730 return fold_convert_loc (loc, type,
7731 build_fold_addr_expr_loc (loc, base));
7734 if (TREE_CODE (op0) == MODIFY_EXPR
7735 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7736 /* Detect assigning a bitfield. */
7737 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7738 && DECL_BIT_FIELD
7739 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7741 /* Don't leave an assignment inside a conversion
7742 unless assigning a bitfield. */
7743 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7744 /* First do the assignment, then return converted constant. */
7745 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7746 TREE_NO_WARNING (tem) = 1;
7747 TREE_USED (tem) = 1;
7748 return tem;
7751 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7752 constants (if x has signed type, the sign bit cannot be set
7753 in c). This folds extension into the BIT_AND_EXPR.
7754 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7755 very likely don't have maximal range for their precision and this
7756 transformation effectively doesn't preserve non-maximal ranges. */
7757 if (TREE_CODE (type) == INTEGER_TYPE
7758 && TREE_CODE (op0) == BIT_AND_EXPR
7759 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7761 tree and_expr = op0;
7762 tree and0 = TREE_OPERAND (and_expr, 0);
7763 tree and1 = TREE_OPERAND (and_expr, 1);
7764 int change = 0;
7766 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7767 || (TYPE_PRECISION (type)
7768 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7769 change = 1;
7770 else if (TYPE_PRECISION (TREE_TYPE (and1))
7771 <= HOST_BITS_PER_WIDE_INT
7772 && tree_fits_uhwi_p (and1))
7774 unsigned HOST_WIDE_INT cst;
7776 cst = tree_to_uhwi (and1);
7777 cst &= HOST_WIDE_INT_M1U
7778 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7779 change = (cst == 0);
7780 if (change
7781 && !flag_syntax_only
7782 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7783 == ZERO_EXTEND))
7785 tree uns = unsigned_type_for (TREE_TYPE (and0));
7786 and0 = fold_convert_loc (loc, uns, and0);
7787 and1 = fold_convert_loc (loc, uns, and1);
7790 if (change)
7792 tem = force_fit_type (type, wi::to_widest (and1), 0,
7793 TREE_OVERFLOW (and1));
7794 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7795 fold_convert_loc (loc, type, and0), tem);
7799 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7800 cast (T1)X will fold away. We assume that this happens when X itself
7801 is a cast. */
7802 if (POINTER_TYPE_P (type)
7803 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7804 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7806 tree arg00 = TREE_OPERAND (arg0, 0);
7807 tree arg01 = TREE_OPERAND (arg0, 1);
7809 return fold_build_pointer_plus_loc
7810 (loc, fold_convert_loc (loc, type, arg00), arg01);
7813 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7814 of the same precision, and X is an integer type not narrower than
7815 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7816 if (INTEGRAL_TYPE_P (type)
7817 && TREE_CODE (op0) == BIT_NOT_EXPR
7818 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7819 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7820 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7822 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7823 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7824 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7825 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7826 fold_convert_loc (loc, type, tem));
7829 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7830 type of X and Y (integer types only). */
7831 if (INTEGRAL_TYPE_P (type)
7832 && TREE_CODE (op0) == MULT_EXPR
7833 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7834 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7836 /* Be careful not to introduce new overflows. */
7837 tree mult_type;
7838 if (TYPE_OVERFLOW_WRAPS (type))
7839 mult_type = type;
7840 else
7841 mult_type = unsigned_type_for (type);
7843 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7845 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7846 fold_convert_loc (loc, mult_type,
7847 TREE_OPERAND (op0, 0)),
7848 fold_convert_loc (loc, mult_type,
7849 TREE_OPERAND (op0, 1)));
7850 return fold_convert_loc (loc, type, tem);
7854 return NULL_TREE;
7856 case VIEW_CONVERT_EXPR:
7857 if (TREE_CODE (op0) == MEM_REF)
7859 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7860 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7861 tem = fold_build2_loc (loc, MEM_REF, type,
7862 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7863 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7864 return tem;
7867 return NULL_TREE;
7869 case NEGATE_EXPR:
7870 tem = fold_negate_expr (loc, arg0);
7871 if (tem)
7872 return fold_convert_loc (loc, type, tem);
7873 return NULL_TREE;
7875 case ABS_EXPR:
7876 /* Convert fabs((double)float) into (double)fabsf(float). */
7877 if (TREE_CODE (arg0) == NOP_EXPR
7878 && TREE_CODE (type) == REAL_TYPE)
7880 tree targ0 = strip_float_extensions (arg0);
7881 if (targ0 != arg0)
7882 return fold_convert_loc (loc, type,
7883 fold_build1_loc (loc, ABS_EXPR,
7884 TREE_TYPE (targ0),
7885 targ0));
7887 return NULL_TREE;
7889 case BIT_NOT_EXPR:
7890 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7891 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7892 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7893 fold_convert_loc (loc, type,
7894 TREE_OPERAND (arg0, 0)))))
7895 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7896 fold_convert_loc (loc, type,
7897 TREE_OPERAND (arg0, 1)));
7898 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7899 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7900 fold_convert_loc (loc, type,
7901 TREE_OPERAND (arg0, 1)))))
7902 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7903 fold_convert_loc (loc, type,
7904 TREE_OPERAND (arg0, 0)), tem);
7906 return NULL_TREE;
7908 case TRUTH_NOT_EXPR:
7909 /* Note that the operand of this must be an int
7910 and its values must be 0 or 1.
7911 ("true" is a fixed value perhaps depending on the language,
7912 but we don't handle values other than 1 correctly yet.) */
7913 tem = fold_truth_not_expr (loc, arg0);
7914 if (!tem)
7915 return NULL_TREE;
7916 return fold_convert_loc (loc, type, tem);
7918 case INDIRECT_REF:
7919 /* Fold *&X to X if X is an lvalue. */
7920 if (TREE_CODE (op0) == ADDR_EXPR)
7922 tree op00 = TREE_OPERAND (op0, 0);
7923 if ((VAR_P (op00)
7924 || TREE_CODE (op00) == PARM_DECL
7925 || TREE_CODE (op00) == RESULT_DECL)
7926 && !TREE_READONLY (op00))
7927 return op00;
7929 return NULL_TREE;
7931 default:
7932 return NULL_TREE;
7933 } /* switch (code) */
7937 /* If the operation was a conversion do _not_ mark a resulting constant
7938 with TREE_OVERFLOW if the original constant was not. These conversions
7939 have implementation defined behavior and retaining the TREE_OVERFLOW
7940 flag here would confuse later passes such as VRP. */
7941 tree
7942 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7943 tree type, tree op0)
7945 tree res = fold_unary_loc (loc, code, type, op0);
7946 if (res
7947 && TREE_CODE (res) == INTEGER_CST
7948 && TREE_CODE (op0) == INTEGER_CST
7949 && CONVERT_EXPR_CODE_P (code))
7950 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7952 return res;
7955 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7956 operands OP0 and OP1. LOC is the location of the resulting expression.
7957 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7958 Return the folded expression if folding is successful. Otherwise,
7959 return NULL_TREE. */
7960 static tree
7961 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7962 tree arg0, tree arg1, tree op0, tree op1)
7964 tree tem;
7966 /* We only do these simplifications if we are optimizing. */
7967 if (!optimize)
7968 return NULL_TREE;
7970 /* Check for things like (A || B) && (A || C). We can convert this
7971 to A || (B && C). Note that either operator can be any of the four
7972 truth and/or operations and the transformation will still be
7973 valid. Also note that we only care about order for the
7974 ANDIF and ORIF operators. If B contains side effects, this
7975 might change the truth-value of A. */
7976 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7977 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7978 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7979 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7980 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7981 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7983 tree a00 = TREE_OPERAND (arg0, 0);
7984 tree a01 = TREE_OPERAND (arg0, 1);
7985 tree a10 = TREE_OPERAND (arg1, 0);
7986 tree a11 = TREE_OPERAND (arg1, 1);
7987 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7988 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7989 && (code == TRUTH_AND_EXPR
7990 || code == TRUTH_OR_EXPR));
7992 if (operand_equal_p (a00, a10, 0))
7993 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7994 fold_build2_loc (loc, code, type, a01, a11));
7995 else if (commutative && operand_equal_p (a00, a11, 0))
7996 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7997 fold_build2_loc (loc, code, type, a01, a10));
7998 else if (commutative && operand_equal_p (a01, a10, 0))
7999 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8000 fold_build2_loc (loc, code, type, a00, a11));
8002 /* This case if tricky because we must either have commutative
8003 operators or else A10 must not have side-effects. */
8005 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8006 && operand_equal_p (a01, a11, 0))
8007 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8008 fold_build2_loc (loc, code, type, a00, a10),
8009 a01);
8012 /* See if we can build a range comparison. */
8013 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8014 return tem;
8016 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8017 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8019 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8020 if (tem)
8021 return fold_build2_loc (loc, code, type, tem, arg1);
8024 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8025 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8027 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8028 if (tem)
8029 return fold_build2_loc (loc, code, type, arg0, tem);
8032 /* Check for the possibility of merging component references. If our
8033 lhs is another similar operation, try to merge its rhs with our
8034 rhs. Then try to merge our lhs and rhs. */
8035 if (TREE_CODE (arg0) == code
8036 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8037 TREE_OPERAND (arg0, 1), arg1)))
8038 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8040 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8041 return tem;
8043 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8044 && (code == TRUTH_AND_EXPR
8045 || code == TRUTH_ANDIF_EXPR
8046 || code == TRUTH_OR_EXPR
8047 || code == TRUTH_ORIF_EXPR))
8049 enum tree_code ncode, icode;
8051 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8052 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8053 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8055 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8056 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8057 We don't want to pack more than two leafs to a non-IF AND/OR
8058 expression.
8059 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8060 equal to IF-CODE, then we don't want to add right-hand operand.
8061 If the inner right-hand side of left-hand operand has
8062 side-effects, or isn't simple, then we can't add to it,
8063 as otherwise we might destroy if-sequence. */
8064 if (TREE_CODE (arg0) == icode
8065 && simple_operand_p_2 (arg1)
8066 /* Needed for sequence points to handle trappings, and
8067 side-effects. */
8068 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8070 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8071 arg1);
8072 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8073 tem);
8075 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8076 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8077 else if (TREE_CODE (arg1) == icode
8078 && simple_operand_p_2 (arg0)
8079 /* Needed for sequence points to handle trappings, and
8080 side-effects. */
8081 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8083 tem = fold_build2_loc (loc, ncode, type,
8084 arg0, TREE_OPERAND (arg1, 0));
8085 return fold_build2_loc (loc, icode, type, tem,
8086 TREE_OPERAND (arg1, 1));
8088 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8089 into (A OR B).
8090 For sequence point consistancy, we need to check for trapping,
8091 and side-effects. */
8092 else if (code == icode && simple_operand_p_2 (arg0)
8093 && simple_operand_p_2 (arg1))
8094 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8097 return NULL_TREE;
8100 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8101 by changing CODE to reduce the magnitude of constants involved in
8102 ARG0 of the comparison.
8103 Returns a canonicalized comparison tree if a simplification was
8104 possible, otherwise returns NULL_TREE.
8105 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8106 valid if signed overflow is undefined. */
8108 static tree
8109 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8110 tree arg0, tree arg1,
8111 bool *strict_overflow_p)
8113 enum tree_code code0 = TREE_CODE (arg0);
8114 tree t, cst0 = NULL_TREE;
8115 int sgn0;
8117 /* Match A +- CST code arg1. We can change this only if overflow
8118 is undefined. */
8119 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8120 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8121 /* In principle pointers also have undefined overflow behavior,
8122 but that causes problems elsewhere. */
8123 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8124 && (code0 == MINUS_EXPR
8125 || code0 == PLUS_EXPR)
8126 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8127 return NULL_TREE;
8129 /* Identify the constant in arg0 and its sign. */
8130 cst0 = TREE_OPERAND (arg0, 1);
8131 sgn0 = tree_int_cst_sgn (cst0);
8133 /* Overflowed constants and zero will cause problems. */
8134 if (integer_zerop (cst0)
8135 || TREE_OVERFLOW (cst0))
8136 return NULL_TREE;
8138 /* See if we can reduce the magnitude of the constant in
8139 arg0 by changing the comparison code. */
8140 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8141 if (code == LT_EXPR
8142 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8143 code = LE_EXPR;
8144 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8145 else if (code == GT_EXPR
8146 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8147 code = GE_EXPR;
8148 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8149 else if (code == LE_EXPR
8150 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8151 code = LT_EXPR;
8152 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8153 else if (code == GE_EXPR
8154 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8155 code = GT_EXPR;
8156 else
8157 return NULL_TREE;
8158 *strict_overflow_p = true;
8160 /* Now build the constant reduced in magnitude. But not if that
8161 would produce one outside of its types range. */
8162 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8163 && ((sgn0 == 1
8164 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8165 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8166 || (sgn0 == -1
8167 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8168 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8169 return NULL_TREE;
8171 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8172 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8173 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8174 t = fold_convert (TREE_TYPE (arg1), t);
8176 return fold_build2_loc (loc, code, type, t, arg1);
8179 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8180 overflow further. Try to decrease the magnitude of constants involved
8181 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8182 and put sole constants at the second argument position.
8183 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8185 static tree
8186 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8187 tree arg0, tree arg1)
8189 tree t;
8190 bool strict_overflow_p;
8191 const char * const warnmsg = G_("assuming signed overflow does not occur "
8192 "when reducing constant in comparison");
8194 /* Try canonicalization by simplifying arg0. */
8195 strict_overflow_p = false;
8196 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8197 &strict_overflow_p);
8198 if (t)
8200 if (strict_overflow_p)
8201 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8202 return t;
8205 /* Try canonicalization by simplifying arg1 using the swapped
8206 comparison. */
8207 code = swap_tree_comparison (code);
8208 strict_overflow_p = false;
8209 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8210 &strict_overflow_p);
8211 if (t && strict_overflow_p)
8212 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8213 return t;
8216 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8217 space. This is used to avoid issuing overflow warnings for
8218 expressions like &p->x which can not wrap. */
8220 static bool
8221 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8223 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8224 return true;
8226 if (bitpos < 0)
8227 return true;
8229 wide_int wi_offset;
8230 int precision = TYPE_PRECISION (TREE_TYPE (base));
8231 if (offset == NULL_TREE)
8232 wi_offset = wi::zero (precision);
8233 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8234 return true;
8235 else
8236 wi_offset = offset;
8238 bool overflow;
8239 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8240 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8241 if (overflow)
8242 return true;
8244 if (!wi::fits_uhwi_p (total))
8245 return true;
8247 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8248 if (size <= 0)
8249 return true;
8251 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8252 array. */
8253 if (TREE_CODE (base) == ADDR_EXPR)
8255 HOST_WIDE_INT base_size;
8257 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8258 if (base_size > 0 && size < base_size)
8259 size = base_size;
8262 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8265 /* Return a positive integer when the symbol DECL is known to have
8266 a nonzero address, zero when it's known not to (e.g., it's a weak
8267 symbol), and a negative integer when the symbol is not yet in the
8268 symbol table and so whether or not its address is zero is unknown.
8269 For function local objects always return positive integer. */
8270 static int
8271 maybe_nonzero_address (tree decl)
8273 if (DECL_P (decl) && decl_in_symtab_p (decl))
8274 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8275 return symbol->nonzero_address ();
8277 /* Function local objects are never NULL. */
8278 if (DECL_P (decl)
8279 && (DECL_CONTEXT (decl)
8280 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8281 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8282 return 1;
8284 return -1;
8287 /* Subroutine of fold_binary. This routine performs all of the
8288 transformations that are common to the equality/inequality
8289 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8290 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8291 fold_binary should call fold_binary. Fold a comparison with
8292 tree code CODE and type TYPE with operands OP0 and OP1. Return
8293 the folded comparison or NULL_TREE. */
8295 static tree
8296 fold_comparison (location_t loc, enum tree_code code, tree type,
8297 tree op0, tree op1)
8299 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8300 tree arg0, arg1, tem;
8302 arg0 = op0;
8303 arg1 = op1;
8305 STRIP_SIGN_NOPS (arg0);
8306 STRIP_SIGN_NOPS (arg1);
8308 /* For comparisons of pointers we can decompose it to a compile time
8309 comparison of the base objects and the offsets into the object.
8310 This requires at least one operand being an ADDR_EXPR or a
8311 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8312 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8313 && (TREE_CODE (arg0) == ADDR_EXPR
8314 || TREE_CODE (arg1) == ADDR_EXPR
8315 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8316 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8318 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8319 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8320 machine_mode mode;
8321 int volatilep, reversep, unsignedp;
8322 bool indirect_base0 = false, indirect_base1 = false;
8324 /* Get base and offset for the access. Strip ADDR_EXPR for
8325 get_inner_reference, but put it back by stripping INDIRECT_REF
8326 off the base object if possible. indirect_baseN will be true
8327 if baseN is not an address but refers to the object itself. */
8328 base0 = arg0;
8329 if (TREE_CODE (arg0) == ADDR_EXPR)
8331 base0
8332 = get_inner_reference (TREE_OPERAND (arg0, 0),
8333 &bitsize, &bitpos0, &offset0, &mode,
8334 &unsignedp, &reversep, &volatilep);
8335 if (TREE_CODE (base0) == INDIRECT_REF)
8336 base0 = TREE_OPERAND (base0, 0);
8337 else
8338 indirect_base0 = true;
8340 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8342 base0 = TREE_OPERAND (arg0, 0);
8343 STRIP_SIGN_NOPS (base0);
8344 if (TREE_CODE (base0) == ADDR_EXPR)
8346 base0
8347 = get_inner_reference (TREE_OPERAND (base0, 0),
8348 &bitsize, &bitpos0, &offset0, &mode,
8349 &unsignedp, &reversep, &volatilep);
8350 if (TREE_CODE (base0) == INDIRECT_REF)
8351 base0 = TREE_OPERAND (base0, 0);
8352 else
8353 indirect_base0 = true;
8355 if (offset0 == NULL_TREE || integer_zerop (offset0))
8356 offset0 = TREE_OPERAND (arg0, 1);
8357 else
8358 offset0 = size_binop (PLUS_EXPR, offset0,
8359 TREE_OPERAND (arg0, 1));
8360 if (TREE_CODE (offset0) == INTEGER_CST)
8362 offset_int tem = wi::sext (wi::to_offset (offset0),
8363 TYPE_PRECISION (sizetype));
8364 tem <<= LOG2_BITS_PER_UNIT;
8365 tem += bitpos0;
8366 if (wi::fits_shwi_p (tem))
8368 bitpos0 = tem.to_shwi ();
8369 offset0 = NULL_TREE;
8374 base1 = arg1;
8375 if (TREE_CODE (arg1) == ADDR_EXPR)
8377 base1
8378 = get_inner_reference (TREE_OPERAND (arg1, 0),
8379 &bitsize, &bitpos1, &offset1, &mode,
8380 &unsignedp, &reversep, &volatilep);
8381 if (TREE_CODE (base1) == INDIRECT_REF)
8382 base1 = TREE_OPERAND (base1, 0);
8383 else
8384 indirect_base1 = true;
8386 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8388 base1 = TREE_OPERAND (arg1, 0);
8389 STRIP_SIGN_NOPS (base1);
8390 if (TREE_CODE (base1) == ADDR_EXPR)
8392 base1
8393 = get_inner_reference (TREE_OPERAND (base1, 0),
8394 &bitsize, &bitpos1, &offset1, &mode,
8395 &unsignedp, &reversep, &volatilep);
8396 if (TREE_CODE (base1) == INDIRECT_REF)
8397 base1 = TREE_OPERAND (base1, 0);
8398 else
8399 indirect_base1 = true;
8401 if (offset1 == NULL_TREE || integer_zerop (offset1))
8402 offset1 = TREE_OPERAND (arg1, 1);
8403 else
8404 offset1 = size_binop (PLUS_EXPR, offset1,
8405 TREE_OPERAND (arg1, 1));
8406 if (TREE_CODE (offset1) == INTEGER_CST)
8408 offset_int tem = wi::sext (wi::to_offset (offset1),
8409 TYPE_PRECISION (sizetype));
8410 tem <<= LOG2_BITS_PER_UNIT;
8411 tem += bitpos1;
8412 if (wi::fits_shwi_p (tem))
8414 bitpos1 = tem.to_shwi ();
8415 offset1 = NULL_TREE;
8420 /* If we have equivalent bases we might be able to simplify. */
8421 if (indirect_base0 == indirect_base1
8422 && operand_equal_p (base0, base1,
8423 indirect_base0 ? OEP_ADDRESS_OF : 0))
8425 /* We can fold this expression to a constant if the non-constant
8426 offset parts are equal. */
8427 if ((offset0 == offset1
8428 || (offset0 && offset1
8429 && operand_equal_p (offset0, offset1, 0)))
8430 && (equality_code
8431 || (indirect_base0
8432 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8433 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8436 if (!equality_code
8437 && bitpos0 != bitpos1
8438 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8439 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8440 fold_overflow_warning (("assuming pointer wraparound does not "
8441 "occur when comparing P +- C1 with "
8442 "P +- C2"),
8443 WARN_STRICT_OVERFLOW_CONDITIONAL);
8445 switch (code)
8447 case EQ_EXPR:
8448 return constant_boolean_node (bitpos0 == bitpos1, type);
8449 case NE_EXPR:
8450 return constant_boolean_node (bitpos0 != bitpos1, type);
8451 case LT_EXPR:
8452 return constant_boolean_node (bitpos0 < bitpos1, type);
8453 case LE_EXPR:
8454 return constant_boolean_node (bitpos0 <= bitpos1, type);
8455 case GE_EXPR:
8456 return constant_boolean_node (bitpos0 >= bitpos1, type);
8457 case GT_EXPR:
8458 return constant_boolean_node (bitpos0 > bitpos1, type);
8459 default:;
8462 /* We can simplify the comparison to a comparison of the variable
8463 offset parts if the constant offset parts are equal.
8464 Be careful to use signed sizetype here because otherwise we
8465 mess with array offsets in the wrong way. This is possible
8466 because pointer arithmetic is restricted to retain within an
8467 object and overflow on pointer differences is undefined as of
8468 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8469 else if (bitpos0 == bitpos1
8470 && (equality_code
8471 || (indirect_base0
8472 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8473 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8475 /* By converting to signed sizetype we cover middle-end pointer
8476 arithmetic which operates on unsigned pointer types of size
8477 type size and ARRAY_REF offsets which are properly sign or
8478 zero extended from their type in case it is narrower than
8479 sizetype. */
8480 if (offset0 == NULL_TREE)
8481 offset0 = build_int_cst (ssizetype, 0);
8482 else
8483 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8484 if (offset1 == NULL_TREE)
8485 offset1 = build_int_cst (ssizetype, 0);
8486 else
8487 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8489 if (!equality_code
8490 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8491 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8492 fold_overflow_warning (("assuming pointer wraparound does not "
8493 "occur when comparing P +- C1 with "
8494 "P +- C2"),
8495 WARN_STRICT_OVERFLOW_COMPARISON);
8497 return fold_build2_loc (loc, code, type, offset0, offset1);
8500 /* For equal offsets we can simplify to a comparison of the
8501 base addresses. */
8502 else if (bitpos0 == bitpos1
8503 && (indirect_base0
8504 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8505 && (indirect_base1
8506 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8507 && ((offset0 == offset1)
8508 || (offset0 && offset1
8509 && operand_equal_p (offset0, offset1, 0))))
8511 if (indirect_base0)
8512 base0 = build_fold_addr_expr_loc (loc, base0);
8513 if (indirect_base1)
8514 base1 = build_fold_addr_expr_loc (loc, base1);
8515 return fold_build2_loc (loc, code, type, base0, base1);
8517 /* Comparison between an ordinary (non-weak) symbol and a null
8518 pointer can be eliminated since such symbols must have a non
8519 null address. In C, relational expressions between pointers
8520 to objects and null pointers are undefined. The results
8521 below follow the C++ rules with the additional property that
8522 every object pointer compares greater than a null pointer.
8524 else if (((DECL_P (base0)
8525 && maybe_nonzero_address (base0) > 0
8526 /* Avoid folding references to struct members at offset 0 to
8527 prevent tests like '&ptr->firstmember == 0' from getting
8528 eliminated. When ptr is null, although the -> expression
8529 is strictly speaking invalid, GCC retains it as a matter
8530 of QoI. See PR c/44555. */
8531 && (offset0 == NULL_TREE && bitpos0 != 0))
8532 || CONSTANT_CLASS_P (base0))
8533 && indirect_base0
8534 /* The caller guarantees that when one of the arguments is
8535 constant (i.e., null in this case) it is second. */
8536 && integer_zerop (arg1))
8538 switch (code)
8540 case EQ_EXPR:
8541 case LE_EXPR:
8542 case LT_EXPR:
8543 return constant_boolean_node (false, type);
8544 case GE_EXPR:
8545 case GT_EXPR:
8546 case NE_EXPR:
8547 return constant_boolean_node (true, type);
8548 default:
8549 gcc_unreachable ();
8554 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8555 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8556 the resulting offset is smaller in absolute value than the
8557 original one and has the same sign. */
8558 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8559 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8560 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8561 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8562 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8563 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8564 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8565 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8567 tree const1 = TREE_OPERAND (arg0, 1);
8568 tree const2 = TREE_OPERAND (arg1, 1);
8569 tree variable1 = TREE_OPERAND (arg0, 0);
8570 tree variable2 = TREE_OPERAND (arg1, 0);
8571 tree cst;
8572 const char * const warnmsg = G_("assuming signed overflow does not "
8573 "occur when combining constants around "
8574 "a comparison");
8576 /* Put the constant on the side where it doesn't overflow and is
8577 of lower absolute value and of same sign than before. */
8578 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8579 ? MINUS_EXPR : PLUS_EXPR,
8580 const2, const1);
8581 if (!TREE_OVERFLOW (cst)
8582 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8583 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8585 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8586 return fold_build2_loc (loc, code, type,
8587 variable1,
8588 fold_build2_loc (loc, TREE_CODE (arg1),
8589 TREE_TYPE (arg1),
8590 variable2, cst));
8593 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8594 ? MINUS_EXPR : PLUS_EXPR,
8595 const1, const2);
8596 if (!TREE_OVERFLOW (cst)
8597 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8598 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8600 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8601 return fold_build2_loc (loc, code, type,
8602 fold_build2_loc (loc, TREE_CODE (arg0),
8603 TREE_TYPE (arg0),
8604 variable1, cst),
8605 variable2);
8609 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8610 if (tem)
8611 return tem;
8613 /* If we are comparing an expression that just has comparisons
8614 of two integer values, arithmetic expressions of those comparisons,
8615 and constants, we can simplify it. There are only three cases
8616 to check: the two values can either be equal, the first can be
8617 greater, or the second can be greater. Fold the expression for
8618 those three values. Since each value must be 0 or 1, we have
8619 eight possibilities, each of which corresponds to the constant 0
8620 or 1 or one of the six possible comparisons.
8622 This handles common cases like (a > b) == 0 but also handles
8623 expressions like ((x > y) - (y > x)) > 0, which supposedly
8624 occur in macroized code. */
8626 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8628 tree cval1 = 0, cval2 = 0;
8629 int save_p = 0;
8631 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8632 /* Don't handle degenerate cases here; they should already
8633 have been handled anyway. */
8634 && cval1 != 0 && cval2 != 0
8635 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8636 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8637 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8638 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8639 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8640 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8641 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8643 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8644 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8646 /* We can't just pass T to eval_subst in case cval1 or cval2
8647 was the same as ARG1. */
8649 tree high_result
8650 = fold_build2_loc (loc, code, type,
8651 eval_subst (loc, arg0, cval1, maxval,
8652 cval2, minval),
8653 arg1);
8654 tree equal_result
8655 = fold_build2_loc (loc, code, type,
8656 eval_subst (loc, arg0, cval1, maxval,
8657 cval2, maxval),
8658 arg1);
8659 tree low_result
8660 = fold_build2_loc (loc, code, type,
8661 eval_subst (loc, arg0, cval1, minval,
8662 cval2, maxval),
8663 arg1);
8665 /* All three of these results should be 0 or 1. Confirm they are.
8666 Then use those values to select the proper code to use. */
8668 if (TREE_CODE (high_result) == INTEGER_CST
8669 && TREE_CODE (equal_result) == INTEGER_CST
8670 && TREE_CODE (low_result) == INTEGER_CST)
8672 /* Make a 3-bit mask with the high-order bit being the
8673 value for `>', the next for '=', and the low for '<'. */
8674 switch ((integer_onep (high_result) * 4)
8675 + (integer_onep (equal_result) * 2)
8676 + integer_onep (low_result))
8678 case 0:
8679 /* Always false. */
8680 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8681 case 1:
8682 code = LT_EXPR;
8683 break;
8684 case 2:
8685 code = EQ_EXPR;
8686 break;
8687 case 3:
8688 code = LE_EXPR;
8689 break;
8690 case 4:
8691 code = GT_EXPR;
8692 break;
8693 case 5:
8694 code = NE_EXPR;
8695 break;
8696 case 6:
8697 code = GE_EXPR;
8698 break;
8699 case 7:
8700 /* Always true. */
8701 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8704 if (save_p)
8706 tem = save_expr (build2 (code, type, cval1, cval2));
8707 SET_EXPR_LOCATION (tem, loc);
8708 return tem;
8710 return fold_build2_loc (loc, code, type, cval1, cval2);
8715 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8716 into a single range test. */
8717 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8718 && TREE_CODE (arg1) == INTEGER_CST
8719 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8720 && !integer_zerop (TREE_OPERAND (arg0, 1))
8721 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8722 && !TREE_OVERFLOW (arg1))
8724 tem = fold_div_compare (loc, code, type, arg0, arg1);
8725 if (tem != NULL_TREE)
8726 return tem;
8729 return NULL_TREE;
8733 /* Subroutine of fold_binary. Optimize complex multiplications of the
8734 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8735 argument EXPR represents the expression "z" of type TYPE. */
8737 static tree
8738 fold_mult_zconjz (location_t loc, tree type, tree expr)
8740 tree itype = TREE_TYPE (type);
8741 tree rpart, ipart, tem;
8743 if (TREE_CODE (expr) == COMPLEX_EXPR)
8745 rpart = TREE_OPERAND (expr, 0);
8746 ipart = TREE_OPERAND (expr, 1);
8748 else if (TREE_CODE (expr) == COMPLEX_CST)
8750 rpart = TREE_REALPART (expr);
8751 ipart = TREE_IMAGPART (expr);
8753 else
8755 expr = save_expr (expr);
8756 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8757 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8760 rpart = save_expr (rpart);
8761 ipart = save_expr (ipart);
8762 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8763 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8764 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8765 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8766 build_zero_cst (itype));
8770 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8771 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8773 static bool
8774 vec_cst_ctor_to_array (tree arg, tree *elts)
8776 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8778 if (TREE_CODE (arg) == VECTOR_CST)
8780 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8781 elts[i] = VECTOR_CST_ELT (arg, i);
8783 else if (TREE_CODE (arg) == CONSTRUCTOR)
8785 constructor_elt *elt;
8787 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8788 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8789 return false;
8790 else
8791 elts[i] = elt->value;
8793 else
8794 return false;
8795 for (; i < nelts; i++)
8796 elts[i]
8797 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8798 return true;
8801 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8802 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8803 NULL_TREE otherwise. */
8805 static tree
8806 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8808 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8809 tree *elts;
8810 bool need_ctor = false;
8812 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8813 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8814 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8815 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8816 return NULL_TREE;
8818 elts = XALLOCAVEC (tree, nelts * 3);
8819 if (!vec_cst_ctor_to_array (arg0, elts)
8820 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8821 return NULL_TREE;
8823 for (i = 0; i < nelts; i++)
8825 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8826 need_ctor = true;
8827 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8830 if (need_ctor)
8832 vec<constructor_elt, va_gc> *v;
8833 vec_alloc (v, nelts);
8834 for (i = 0; i < nelts; i++)
8835 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8836 return build_constructor (type, v);
8838 else
8839 return build_vector (type, &elts[2 * nelts]);
8842 /* Try to fold a pointer difference of type TYPE two address expressions of
8843 array references AREF0 and AREF1 using location LOC. Return a
8844 simplified expression for the difference or NULL_TREE. */
8846 static tree
8847 fold_addr_of_array_ref_difference (location_t loc, tree type,
8848 tree aref0, tree aref1)
8850 tree base0 = TREE_OPERAND (aref0, 0);
8851 tree base1 = TREE_OPERAND (aref1, 0);
8852 tree base_offset = build_int_cst (type, 0);
8854 /* If the bases are array references as well, recurse. If the bases
8855 are pointer indirections compute the difference of the pointers.
8856 If the bases are equal, we are set. */
8857 if ((TREE_CODE (base0) == ARRAY_REF
8858 && TREE_CODE (base1) == ARRAY_REF
8859 && (base_offset
8860 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8861 || (INDIRECT_REF_P (base0)
8862 && INDIRECT_REF_P (base1)
8863 && (base_offset
8864 = fold_binary_loc (loc, MINUS_EXPR, type,
8865 fold_convert (type, TREE_OPERAND (base0, 0)),
8866 fold_convert (type,
8867 TREE_OPERAND (base1, 0)))))
8868 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8870 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8871 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8872 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8873 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8874 return fold_build2_loc (loc, PLUS_EXPR, type,
8875 base_offset,
8876 fold_build2_loc (loc, MULT_EXPR, type,
8877 diff, esz));
8879 return NULL_TREE;
8882 /* If the real or vector real constant CST of type TYPE has an exact
8883 inverse, return it, else return NULL. */
8885 tree
8886 exact_inverse (tree type, tree cst)
8888 REAL_VALUE_TYPE r;
8889 tree unit_type, *elts;
8890 machine_mode mode;
8891 unsigned vec_nelts, i;
8893 switch (TREE_CODE (cst))
8895 case REAL_CST:
8896 r = TREE_REAL_CST (cst);
8898 if (exact_real_inverse (TYPE_MODE (type), &r))
8899 return build_real (type, r);
8901 return NULL_TREE;
8903 case VECTOR_CST:
8904 vec_nelts = VECTOR_CST_NELTS (cst);
8905 elts = XALLOCAVEC (tree, vec_nelts);
8906 unit_type = TREE_TYPE (type);
8907 mode = TYPE_MODE (unit_type);
8909 for (i = 0; i < vec_nelts; i++)
8911 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8912 if (!exact_real_inverse (mode, &r))
8913 return NULL_TREE;
8914 elts[i] = build_real (unit_type, r);
8917 return build_vector (type, elts);
8919 default:
8920 return NULL_TREE;
8924 /* Mask out the tz least significant bits of X of type TYPE where
8925 tz is the number of trailing zeroes in Y. */
8926 static wide_int
8927 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8929 int tz = wi::ctz (y);
8930 if (tz > 0)
8931 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8932 return x;
8935 /* Return true when T is an address and is known to be nonzero.
8936 For floating point we further ensure that T is not denormal.
8937 Similar logic is present in nonzero_address in rtlanal.h.
8939 If the return value is based on the assumption that signed overflow
8940 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8941 change *STRICT_OVERFLOW_P. */
8943 static bool
8944 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8946 tree type = TREE_TYPE (t);
8947 enum tree_code code;
8949 /* Doing something useful for floating point would need more work. */
8950 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8951 return false;
8953 code = TREE_CODE (t);
8954 switch (TREE_CODE_CLASS (code))
8956 case tcc_unary:
8957 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8958 strict_overflow_p);
8959 case tcc_binary:
8960 case tcc_comparison:
8961 return tree_binary_nonzero_warnv_p (code, type,
8962 TREE_OPERAND (t, 0),
8963 TREE_OPERAND (t, 1),
8964 strict_overflow_p);
8965 case tcc_constant:
8966 case tcc_declaration:
8967 case tcc_reference:
8968 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8970 default:
8971 break;
8974 switch (code)
8976 case TRUTH_NOT_EXPR:
8977 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8978 strict_overflow_p);
8980 case TRUTH_AND_EXPR:
8981 case TRUTH_OR_EXPR:
8982 case TRUTH_XOR_EXPR:
8983 return tree_binary_nonzero_warnv_p (code, type,
8984 TREE_OPERAND (t, 0),
8985 TREE_OPERAND (t, 1),
8986 strict_overflow_p);
8988 case COND_EXPR:
8989 case CONSTRUCTOR:
8990 case OBJ_TYPE_REF:
8991 case ASSERT_EXPR:
8992 case ADDR_EXPR:
8993 case WITH_SIZE_EXPR:
8994 case SSA_NAME:
8995 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8997 case COMPOUND_EXPR:
8998 case MODIFY_EXPR:
8999 case BIND_EXPR:
9000 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9001 strict_overflow_p);
9003 case SAVE_EXPR:
9004 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9005 strict_overflow_p);
9007 case CALL_EXPR:
9009 tree fndecl = get_callee_fndecl (t);
9010 if (!fndecl) return false;
9011 if (flag_delete_null_pointer_checks && !flag_check_new
9012 && DECL_IS_OPERATOR_NEW (fndecl)
9013 && !TREE_NOTHROW (fndecl))
9014 return true;
9015 if (flag_delete_null_pointer_checks
9016 && lookup_attribute ("returns_nonnull",
9017 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9018 return true;
9019 return alloca_call_p (t);
9022 default:
9023 break;
9025 return false;
9028 /* Return true when T is an address and is known to be nonzero.
9029 Handle warnings about undefined signed overflow. */
9031 bool
9032 tree_expr_nonzero_p (tree t)
9034 bool ret, strict_overflow_p;
9036 strict_overflow_p = false;
9037 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9038 if (strict_overflow_p)
9039 fold_overflow_warning (("assuming signed overflow does not occur when "
9040 "determining that expression is always "
9041 "non-zero"),
9042 WARN_STRICT_OVERFLOW_MISC);
9043 return ret;
9046 /* Return true if T is known not to be equal to an integer W. */
9048 bool
9049 expr_not_equal_to (tree t, const wide_int &w)
9051 wide_int min, max, nz;
9052 value_range_type rtype;
9053 switch (TREE_CODE (t))
9055 case INTEGER_CST:
9056 return wi::ne_p (t, w);
9058 case SSA_NAME:
9059 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9060 return false;
9061 rtype = get_range_info (t, &min, &max);
9062 if (rtype == VR_RANGE)
9064 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9065 return true;
9066 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9067 return true;
9069 else if (rtype == VR_ANTI_RANGE
9070 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9071 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9072 return true;
9073 /* If T has some known zero bits and W has any of those bits set,
9074 then T is known not to be equal to W. */
9075 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9076 TYPE_PRECISION (TREE_TYPE (t))), 0))
9077 return true;
9078 return false;
9080 default:
9081 return false;
9085 /* Fold a binary expression of code CODE and type TYPE with operands
9086 OP0 and OP1. LOC is the location of the resulting expression.
9087 Return the folded expression if folding is successful. Otherwise,
9088 return NULL_TREE. */
9090 tree
9091 fold_binary_loc (location_t loc,
9092 enum tree_code code, tree type, tree op0, tree op1)
9094 enum tree_code_class kind = TREE_CODE_CLASS (code);
9095 tree arg0, arg1, tem;
9096 tree t1 = NULL_TREE;
9097 bool strict_overflow_p;
9098 unsigned int prec;
9100 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9101 && TREE_CODE_LENGTH (code) == 2
9102 && op0 != NULL_TREE
9103 && op1 != NULL_TREE);
9105 arg0 = op0;
9106 arg1 = op1;
9108 /* Strip any conversions that don't change the mode. This is
9109 safe for every expression, except for a comparison expression
9110 because its signedness is derived from its operands. So, in
9111 the latter case, only strip conversions that don't change the
9112 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9113 preserved.
9115 Note that this is done as an internal manipulation within the
9116 constant folder, in order to find the simplest representation
9117 of the arguments so that their form can be studied. In any
9118 cases, the appropriate type conversions should be put back in
9119 the tree that will get out of the constant folder. */
9121 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9123 STRIP_SIGN_NOPS (arg0);
9124 STRIP_SIGN_NOPS (arg1);
9126 else
9128 STRIP_NOPS (arg0);
9129 STRIP_NOPS (arg1);
9132 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9133 constant but we can't do arithmetic on them. */
9134 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9136 tem = const_binop (code, type, arg0, arg1);
9137 if (tem != NULL_TREE)
9139 if (TREE_TYPE (tem) != type)
9140 tem = fold_convert_loc (loc, type, tem);
9141 return tem;
9145 /* If this is a commutative operation, and ARG0 is a constant, move it
9146 to ARG1 to reduce the number of tests below. */
9147 if (commutative_tree_code (code)
9148 && tree_swap_operands_p (arg0, arg1))
9149 return fold_build2_loc (loc, code, type, op1, op0);
9151 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9152 to ARG1 to reduce the number of tests below. */
9153 if (kind == tcc_comparison
9154 && tree_swap_operands_p (arg0, arg1))
9155 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9157 tem = generic_simplify (loc, code, type, op0, op1);
9158 if (tem)
9159 return tem;
9161 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9163 First check for cases where an arithmetic operation is applied to a
9164 compound, conditional, or comparison operation. Push the arithmetic
9165 operation inside the compound or conditional to see if any folding
9166 can then be done. Convert comparison to conditional for this purpose.
9167 The also optimizes non-constant cases that used to be done in
9168 expand_expr.
9170 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9171 one of the operands is a comparison and the other is a comparison, a
9172 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9173 code below would make the expression more complex. Change it to a
9174 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9175 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9177 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9178 || code == EQ_EXPR || code == NE_EXPR)
9179 && TREE_CODE (type) != VECTOR_TYPE
9180 && ((truth_value_p (TREE_CODE (arg0))
9181 && (truth_value_p (TREE_CODE (arg1))
9182 || (TREE_CODE (arg1) == BIT_AND_EXPR
9183 && integer_onep (TREE_OPERAND (arg1, 1)))))
9184 || (truth_value_p (TREE_CODE (arg1))
9185 && (truth_value_p (TREE_CODE (arg0))
9186 || (TREE_CODE (arg0) == BIT_AND_EXPR
9187 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9189 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9190 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9191 : TRUTH_XOR_EXPR,
9192 boolean_type_node,
9193 fold_convert_loc (loc, boolean_type_node, arg0),
9194 fold_convert_loc (loc, boolean_type_node, arg1));
9196 if (code == EQ_EXPR)
9197 tem = invert_truthvalue_loc (loc, tem);
9199 return fold_convert_loc (loc, type, tem);
9202 if (TREE_CODE_CLASS (code) == tcc_binary
9203 || TREE_CODE_CLASS (code) == tcc_comparison)
9205 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9207 tem = fold_build2_loc (loc, code, type,
9208 fold_convert_loc (loc, TREE_TYPE (op0),
9209 TREE_OPERAND (arg0, 1)), op1);
9210 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9211 tem);
9213 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9215 tem = fold_build2_loc (loc, code, type, op0,
9216 fold_convert_loc (loc, TREE_TYPE (op1),
9217 TREE_OPERAND (arg1, 1)));
9218 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9219 tem);
9222 if (TREE_CODE (arg0) == COND_EXPR
9223 || TREE_CODE (arg0) == VEC_COND_EXPR
9224 || COMPARISON_CLASS_P (arg0))
9226 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9227 arg0, arg1,
9228 /*cond_first_p=*/1);
9229 if (tem != NULL_TREE)
9230 return tem;
9233 if (TREE_CODE (arg1) == COND_EXPR
9234 || TREE_CODE (arg1) == VEC_COND_EXPR
9235 || COMPARISON_CLASS_P (arg1))
9237 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9238 arg1, arg0,
9239 /*cond_first_p=*/0);
9240 if (tem != NULL_TREE)
9241 return tem;
9245 switch (code)
9247 case MEM_REF:
9248 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9249 if (TREE_CODE (arg0) == ADDR_EXPR
9250 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9252 tree iref = TREE_OPERAND (arg0, 0);
9253 return fold_build2 (MEM_REF, type,
9254 TREE_OPERAND (iref, 0),
9255 int_const_binop (PLUS_EXPR, arg1,
9256 TREE_OPERAND (iref, 1)));
9259 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9260 if (TREE_CODE (arg0) == ADDR_EXPR
9261 && handled_component_p (TREE_OPERAND (arg0, 0)))
9263 tree base;
9264 HOST_WIDE_INT coffset;
9265 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9266 &coffset);
9267 if (!base)
9268 return NULL_TREE;
9269 return fold_build2 (MEM_REF, type,
9270 build_fold_addr_expr (base),
9271 int_const_binop (PLUS_EXPR, arg1,
9272 size_int (coffset)));
9275 return NULL_TREE;
9277 case POINTER_PLUS_EXPR:
9278 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9279 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9280 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9281 return fold_convert_loc (loc, type,
9282 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9283 fold_convert_loc (loc, sizetype,
9284 arg1),
9285 fold_convert_loc (loc, sizetype,
9286 arg0)));
9288 return NULL_TREE;
9290 case PLUS_EXPR:
9291 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9293 /* X + (X / CST) * -CST is X % CST. */
9294 if (TREE_CODE (arg1) == MULT_EXPR
9295 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9296 && operand_equal_p (arg0,
9297 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9299 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9300 tree cst1 = TREE_OPERAND (arg1, 1);
9301 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9302 cst1, cst0);
9303 if (sum && integer_zerop (sum))
9304 return fold_convert_loc (loc, type,
9305 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9306 TREE_TYPE (arg0), arg0,
9307 cst0));
9311 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9312 one. Make sure the type is not saturating and has the signedness of
9313 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9314 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9315 if ((TREE_CODE (arg0) == MULT_EXPR
9316 || TREE_CODE (arg1) == MULT_EXPR)
9317 && !TYPE_SATURATING (type)
9318 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9319 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9320 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9322 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9323 if (tem)
9324 return tem;
9327 if (! FLOAT_TYPE_P (type))
9329 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9330 (plus (plus (mult) (mult)) (foo)) so that we can
9331 take advantage of the factoring cases below. */
9332 if (ANY_INTEGRAL_TYPE_P (type)
9333 && TYPE_OVERFLOW_WRAPS (type)
9334 && (((TREE_CODE (arg0) == PLUS_EXPR
9335 || TREE_CODE (arg0) == MINUS_EXPR)
9336 && TREE_CODE (arg1) == MULT_EXPR)
9337 || ((TREE_CODE (arg1) == PLUS_EXPR
9338 || TREE_CODE (arg1) == MINUS_EXPR)
9339 && TREE_CODE (arg0) == MULT_EXPR)))
9341 tree parg0, parg1, parg, marg;
9342 enum tree_code pcode;
9344 if (TREE_CODE (arg1) == MULT_EXPR)
9345 parg = arg0, marg = arg1;
9346 else
9347 parg = arg1, marg = arg0;
9348 pcode = TREE_CODE (parg);
9349 parg0 = TREE_OPERAND (parg, 0);
9350 parg1 = TREE_OPERAND (parg, 1);
9351 STRIP_NOPS (parg0);
9352 STRIP_NOPS (parg1);
9354 if (TREE_CODE (parg0) == MULT_EXPR
9355 && TREE_CODE (parg1) != MULT_EXPR)
9356 return fold_build2_loc (loc, pcode, type,
9357 fold_build2_loc (loc, PLUS_EXPR, type,
9358 fold_convert_loc (loc, type,
9359 parg0),
9360 fold_convert_loc (loc, type,
9361 marg)),
9362 fold_convert_loc (loc, type, parg1));
9363 if (TREE_CODE (parg0) != MULT_EXPR
9364 && TREE_CODE (parg1) == MULT_EXPR)
9365 return
9366 fold_build2_loc (loc, PLUS_EXPR, type,
9367 fold_convert_loc (loc, type, parg0),
9368 fold_build2_loc (loc, pcode, type,
9369 fold_convert_loc (loc, type, marg),
9370 fold_convert_loc (loc, type,
9371 parg1)));
9374 else
9376 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9377 to __complex__ ( x, y ). This is not the same for SNaNs or
9378 if signed zeros are involved. */
9379 if (!HONOR_SNANS (element_mode (arg0))
9380 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9381 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9383 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9384 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9385 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9386 bool arg0rz = false, arg0iz = false;
9387 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9388 || (arg0i && (arg0iz = real_zerop (arg0i))))
9390 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9391 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9392 if (arg0rz && arg1i && real_zerop (arg1i))
9394 tree rp = arg1r ? arg1r
9395 : build1 (REALPART_EXPR, rtype, arg1);
9396 tree ip = arg0i ? arg0i
9397 : build1 (IMAGPART_EXPR, rtype, arg0);
9398 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9400 else if (arg0iz && arg1r && real_zerop (arg1r))
9402 tree rp = arg0r ? arg0r
9403 : build1 (REALPART_EXPR, rtype, arg0);
9404 tree ip = arg1i ? arg1i
9405 : build1 (IMAGPART_EXPR, rtype, arg1);
9406 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9411 if (flag_unsafe_math_optimizations
9412 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9413 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9414 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9415 return tem;
9417 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9418 We associate floats only if the user has specified
9419 -fassociative-math. */
9420 if (flag_associative_math
9421 && TREE_CODE (arg1) == PLUS_EXPR
9422 && TREE_CODE (arg0) != MULT_EXPR)
9424 tree tree10 = TREE_OPERAND (arg1, 0);
9425 tree tree11 = TREE_OPERAND (arg1, 1);
9426 if (TREE_CODE (tree11) == MULT_EXPR
9427 && TREE_CODE (tree10) == MULT_EXPR)
9429 tree tree0;
9430 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9431 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9434 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9435 We associate floats only if the user has specified
9436 -fassociative-math. */
9437 if (flag_associative_math
9438 && TREE_CODE (arg0) == PLUS_EXPR
9439 && TREE_CODE (arg1) != MULT_EXPR)
9441 tree tree00 = TREE_OPERAND (arg0, 0);
9442 tree tree01 = TREE_OPERAND (arg0, 1);
9443 if (TREE_CODE (tree01) == MULT_EXPR
9444 && TREE_CODE (tree00) == MULT_EXPR)
9446 tree tree0;
9447 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9448 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9453 bit_rotate:
9454 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9455 is a rotate of A by C1 bits. */
9456 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9457 is a rotate of A by B bits. */
9459 enum tree_code code0, code1;
9460 tree rtype;
9461 code0 = TREE_CODE (arg0);
9462 code1 = TREE_CODE (arg1);
9463 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9464 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9465 && operand_equal_p (TREE_OPERAND (arg0, 0),
9466 TREE_OPERAND (arg1, 0), 0)
9467 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9468 TYPE_UNSIGNED (rtype))
9469 /* Only create rotates in complete modes. Other cases are not
9470 expanded properly. */
9471 && (element_precision (rtype)
9472 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9474 tree tree01, tree11;
9475 enum tree_code code01, code11;
9477 tree01 = TREE_OPERAND (arg0, 1);
9478 tree11 = TREE_OPERAND (arg1, 1);
9479 STRIP_NOPS (tree01);
9480 STRIP_NOPS (tree11);
9481 code01 = TREE_CODE (tree01);
9482 code11 = TREE_CODE (tree11);
9483 if (code01 == INTEGER_CST
9484 && code11 == INTEGER_CST
9485 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9486 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9488 tem = build2_loc (loc, LROTATE_EXPR,
9489 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9490 TREE_OPERAND (arg0, 0),
9491 code0 == LSHIFT_EXPR
9492 ? TREE_OPERAND (arg0, 1)
9493 : TREE_OPERAND (arg1, 1));
9494 return fold_convert_loc (loc, type, tem);
9496 else if (code11 == MINUS_EXPR)
9498 tree tree110, tree111;
9499 tree110 = TREE_OPERAND (tree11, 0);
9500 tree111 = TREE_OPERAND (tree11, 1);
9501 STRIP_NOPS (tree110);
9502 STRIP_NOPS (tree111);
9503 if (TREE_CODE (tree110) == INTEGER_CST
9504 && 0 == compare_tree_int (tree110,
9505 element_precision
9506 (TREE_TYPE (TREE_OPERAND
9507 (arg0, 0))))
9508 && operand_equal_p (tree01, tree111, 0))
9509 return
9510 fold_convert_loc (loc, type,
9511 build2 ((code0 == LSHIFT_EXPR
9512 ? LROTATE_EXPR
9513 : RROTATE_EXPR),
9514 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9515 TREE_OPERAND (arg0, 0),
9516 TREE_OPERAND (arg0, 1)));
9518 else if (code01 == MINUS_EXPR)
9520 tree tree010, tree011;
9521 tree010 = TREE_OPERAND (tree01, 0);
9522 tree011 = TREE_OPERAND (tree01, 1);
9523 STRIP_NOPS (tree010);
9524 STRIP_NOPS (tree011);
9525 if (TREE_CODE (tree010) == INTEGER_CST
9526 && 0 == compare_tree_int (tree010,
9527 element_precision
9528 (TREE_TYPE (TREE_OPERAND
9529 (arg0, 0))))
9530 && operand_equal_p (tree11, tree011, 0))
9531 return fold_convert_loc
9532 (loc, type,
9533 build2 ((code0 != LSHIFT_EXPR
9534 ? LROTATE_EXPR
9535 : RROTATE_EXPR),
9536 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9537 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9542 associate:
9543 /* In most languages, can't associate operations on floats through
9544 parentheses. Rather than remember where the parentheses were, we
9545 don't associate floats at all, unless the user has specified
9546 -fassociative-math.
9547 And, we need to make sure type is not saturating. */
9549 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9550 && !TYPE_SATURATING (type))
9552 tree var0, con0, lit0, minus_lit0;
9553 tree var1, con1, lit1, minus_lit1;
9554 tree atype = type;
9555 bool ok = true;
9557 /* Split both trees into variables, constants, and literals. Then
9558 associate each group together, the constants with literals,
9559 then the result with variables. This increases the chances of
9560 literals being recombined later and of generating relocatable
9561 expressions for the sum of a constant and literal. */
9562 var0 = split_tree (loc, arg0, type, code,
9563 &con0, &lit0, &minus_lit0, 0);
9564 var1 = split_tree (loc, arg1, type, code,
9565 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9567 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9568 if (code == MINUS_EXPR)
9569 code = PLUS_EXPR;
9571 /* With undefined overflow prefer doing association in a type
9572 which wraps on overflow, if that is one of the operand types. */
9573 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9574 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9576 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9577 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9578 atype = TREE_TYPE (arg0);
9579 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9580 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9581 atype = TREE_TYPE (arg1);
9582 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9585 /* With undefined overflow we can only associate constants with one
9586 variable, and constants whose association doesn't overflow. */
9587 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9588 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9590 if (var0 && var1)
9592 tree tmp0 = var0;
9593 tree tmp1 = var1;
9594 bool one_neg = false;
9596 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9598 tmp0 = TREE_OPERAND (tmp0, 0);
9599 one_neg = !one_neg;
9601 if (CONVERT_EXPR_P (tmp0)
9602 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9603 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9604 <= TYPE_PRECISION (atype)))
9605 tmp0 = TREE_OPERAND (tmp0, 0);
9606 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9608 tmp1 = TREE_OPERAND (tmp1, 0);
9609 one_neg = !one_neg;
9611 if (CONVERT_EXPR_P (tmp1)
9612 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9613 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9614 <= TYPE_PRECISION (atype)))
9615 tmp1 = TREE_OPERAND (tmp1, 0);
9616 /* The only case we can still associate with two variables
9617 is if they cancel out. */
9618 if (!one_neg
9619 || !operand_equal_p (tmp0, tmp1, 0))
9620 ok = false;
9624 /* Only do something if we found more than two objects. Otherwise,
9625 nothing has changed and we risk infinite recursion. */
9626 if (ok
9627 && (2 < ((var0 != 0) + (var1 != 0)
9628 + (con0 != 0) + (con1 != 0)
9629 + (lit0 != 0) + (lit1 != 0)
9630 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9632 bool any_overflows = false;
9633 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9634 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9635 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9636 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9637 var0 = associate_trees (loc, var0, var1, code, atype);
9638 con0 = associate_trees (loc, con0, con1, code, atype);
9639 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9640 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9641 code, atype);
9643 /* Preserve the MINUS_EXPR if the negative part of the literal is
9644 greater than the positive part. Otherwise, the multiplicative
9645 folding code (i.e extract_muldiv) may be fooled in case
9646 unsigned constants are subtracted, like in the following
9647 example: ((X*2 + 4) - 8U)/2. */
9648 if (minus_lit0 && lit0)
9650 if (TREE_CODE (lit0) == INTEGER_CST
9651 && TREE_CODE (minus_lit0) == INTEGER_CST
9652 && tree_int_cst_lt (lit0, minus_lit0))
9654 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9655 MINUS_EXPR, atype);
9656 lit0 = 0;
9658 else
9660 lit0 = associate_trees (loc, lit0, minus_lit0,
9661 MINUS_EXPR, atype);
9662 minus_lit0 = 0;
9666 /* Don't introduce overflows through reassociation. */
9667 if (!any_overflows
9668 && ((lit0 && TREE_OVERFLOW_P (lit0))
9669 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9670 return NULL_TREE;
9672 if (minus_lit0)
9674 if (con0 == 0)
9675 return
9676 fold_convert_loc (loc, type,
9677 associate_trees (loc, var0, minus_lit0,
9678 MINUS_EXPR, atype));
9679 else
9681 con0 = associate_trees (loc, con0, minus_lit0,
9682 MINUS_EXPR, atype);
9683 return
9684 fold_convert_loc (loc, type,
9685 associate_trees (loc, var0, con0,
9686 PLUS_EXPR, atype));
9690 con0 = associate_trees (loc, con0, lit0, code, atype);
9691 return
9692 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9693 code, atype));
9697 return NULL_TREE;
9699 case MINUS_EXPR:
9700 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9701 if (TREE_CODE (arg0) == NEGATE_EXPR
9702 && negate_expr_p (op1))
9703 return fold_build2_loc (loc, MINUS_EXPR, type,
9704 negate_expr (op1),
9705 fold_convert_loc (loc, type,
9706 TREE_OPERAND (arg0, 0)));
9708 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9709 __complex__ ( x, -y ). This is not the same for SNaNs or if
9710 signed zeros are involved. */
9711 if (!HONOR_SNANS (element_mode (arg0))
9712 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9713 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9715 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9716 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9717 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9718 bool arg0rz = false, arg0iz = false;
9719 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9720 || (arg0i && (arg0iz = real_zerop (arg0i))))
9722 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9723 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9724 if (arg0rz && arg1i && real_zerop (arg1i))
9726 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9727 arg1r ? arg1r
9728 : build1 (REALPART_EXPR, rtype, arg1));
9729 tree ip = arg0i ? arg0i
9730 : build1 (IMAGPART_EXPR, rtype, arg0);
9731 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9733 else if (arg0iz && arg1r && real_zerop (arg1r))
9735 tree rp = arg0r ? arg0r
9736 : build1 (REALPART_EXPR, rtype, arg0);
9737 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9738 arg1i ? arg1i
9739 : build1 (IMAGPART_EXPR, rtype, arg1));
9740 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9745 /* A - B -> A + (-B) if B is easily negatable. */
9746 if (negate_expr_p (op1)
9747 && ! TYPE_OVERFLOW_SANITIZED (type)
9748 && ((FLOAT_TYPE_P (type)
9749 /* Avoid this transformation if B is a positive REAL_CST. */
9750 && (TREE_CODE (op1) != REAL_CST
9751 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9752 || INTEGRAL_TYPE_P (type)))
9753 return fold_build2_loc (loc, PLUS_EXPR, type,
9754 fold_convert_loc (loc, type, arg0),
9755 negate_expr (op1));
9757 /* Fold &a[i] - &a[j] to i-j. */
9758 if (TREE_CODE (arg0) == ADDR_EXPR
9759 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9760 && TREE_CODE (arg1) == ADDR_EXPR
9761 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9763 tree tem = fold_addr_of_array_ref_difference (loc, type,
9764 TREE_OPERAND (arg0, 0),
9765 TREE_OPERAND (arg1, 0));
9766 if (tem)
9767 return tem;
9770 if (FLOAT_TYPE_P (type)
9771 && flag_unsafe_math_optimizations
9772 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9773 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9774 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9775 return tem;
9777 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9778 one. Make sure the type is not saturating and has the signedness of
9779 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9780 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9781 if ((TREE_CODE (arg0) == MULT_EXPR
9782 || TREE_CODE (arg1) == MULT_EXPR)
9783 && !TYPE_SATURATING (type)
9784 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9785 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9786 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9788 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9789 if (tem)
9790 return tem;
9793 goto associate;
9795 case MULT_EXPR:
9796 if (! FLOAT_TYPE_P (type))
9798 /* Transform x * -C into -x * C if x is easily negatable. */
9799 if (TREE_CODE (op1) == INTEGER_CST
9800 && tree_int_cst_sgn (op1) == -1
9801 && negate_expr_p (op0)
9802 && (tem = negate_expr (op1)) != op1
9803 && ! TREE_OVERFLOW (tem))
9804 return fold_build2_loc (loc, MULT_EXPR, type,
9805 fold_convert_loc (loc, type,
9806 negate_expr (op0)), tem);
9808 strict_overflow_p = false;
9809 if (TREE_CODE (arg1) == INTEGER_CST
9810 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9811 &strict_overflow_p)))
9813 if (strict_overflow_p)
9814 fold_overflow_warning (("assuming signed overflow does not "
9815 "occur when simplifying "
9816 "multiplication"),
9817 WARN_STRICT_OVERFLOW_MISC);
9818 return fold_convert_loc (loc, type, tem);
9821 /* Optimize z * conj(z) for integer complex numbers. */
9822 if (TREE_CODE (arg0) == CONJ_EXPR
9823 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9824 return fold_mult_zconjz (loc, type, arg1);
9825 if (TREE_CODE (arg1) == CONJ_EXPR
9826 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9827 return fold_mult_zconjz (loc, type, arg0);
9829 else
9831 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9832 This is not the same for NaNs or if signed zeros are
9833 involved. */
9834 if (!HONOR_NANS (arg0)
9835 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9836 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9837 && TREE_CODE (arg1) == COMPLEX_CST
9838 && real_zerop (TREE_REALPART (arg1)))
9840 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9841 if (real_onep (TREE_IMAGPART (arg1)))
9842 return
9843 fold_build2_loc (loc, COMPLEX_EXPR, type,
9844 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9845 rtype, arg0)),
9846 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9847 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9848 return
9849 fold_build2_loc (loc, COMPLEX_EXPR, type,
9850 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9851 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9852 rtype, arg0)));
9855 /* Optimize z * conj(z) for floating point complex numbers.
9856 Guarded by flag_unsafe_math_optimizations as non-finite
9857 imaginary components don't produce scalar results. */
9858 if (flag_unsafe_math_optimizations
9859 && TREE_CODE (arg0) == CONJ_EXPR
9860 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9861 return fold_mult_zconjz (loc, type, arg1);
9862 if (flag_unsafe_math_optimizations
9863 && TREE_CODE (arg1) == CONJ_EXPR
9864 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9865 return fold_mult_zconjz (loc, type, arg0);
9867 goto associate;
9869 case BIT_IOR_EXPR:
9870 /* Canonicalize (X & C1) | C2. */
9871 if (TREE_CODE (arg0) == BIT_AND_EXPR
9872 && TREE_CODE (arg1) == INTEGER_CST
9873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9875 int width = TYPE_PRECISION (type), w;
9876 wide_int c1 = TREE_OPERAND (arg0, 1);
9877 wide_int c2 = arg1;
9879 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9880 if ((c1 & c2) == c1)
9881 return omit_one_operand_loc (loc, type, arg1,
9882 TREE_OPERAND (arg0, 0));
9884 wide_int msk = wi::mask (width, false,
9885 TYPE_PRECISION (TREE_TYPE (arg1)));
9887 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9888 if (msk.and_not (c1 | c2) == 0)
9889 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9890 TREE_OPERAND (arg0, 0), arg1);
9892 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9893 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9894 mode which allows further optimizations. */
9895 c1 &= msk;
9896 c2 &= msk;
9897 wide_int c3 = c1.and_not (c2);
9898 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9900 wide_int mask = wi::mask (w, false,
9901 TYPE_PRECISION (type));
9902 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9904 c3 = mask;
9905 break;
9909 if (c3 != c1)
9910 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9911 fold_build2_loc (loc, BIT_AND_EXPR, type,
9912 TREE_OPERAND (arg0, 0),
9913 wide_int_to_tree (type,
9914 c3)),
9915 arg1);
9918 /* See if this can be simplified into a rotate first. If that
9919 is unsuccessful continue in the association code. */
9920 goto bit_rotate;
9922 case BIT_XOR_EXPR:
9923 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9924 if (TREE_CODE (arg0) == BIT_AND_EXPR
9925 && INTEGRAL_TYPE_P (type)
9926 && integer_onep (TREE_OPERAND (arg0, 1))
9927 && integer_onep (arg1))
9928 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9929 build_zero_cst (TREE_TYPE (arg0)));
9931 /* See if this can be simplified into a rotate first. If that
9932 is unsuccessful continue in the association code. */
9933 goto bit_rotate;
9935 case BIT_AND_EXPR:
9936 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9937 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9938 && INTEGRAL_TYPE_P (type)
9939 && integer_onep (TREE_OPERAND (arg0, 1))
9940 && integer_onep (arg1))
9942 tree tem2;
9943 tem = TREE_OPERAND (arg0, 0);
9944 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9945 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9946 tem, tem2);
9947 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9948 build_zero_cst (TREE_TYPE (tem)));
9950 /* Fold ~X & 1 as (X & 1) == 0. */
9951 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9952 && INTEGRAL_TYPE_P (type)
9953 && integer_onep (arg1))
9955 tree tem2;
9956 tem = TREE_OPERAND (arg0, 0);
9957 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9958 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9959 tem, tem2);
9960 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9961 build_zero_cst (TREE_TYPE (tem)));
9963 /* Fold !X & 1 as X == 0. */
9964 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9965 && integer_onep (arg1))
9967 tem = TREE_OPERAND (arg0, 0);
9968 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9969 build_zero_cst (TREE_TYPE (tem)));
9972 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9973 multiple of 1 << CST. */
9974 if (TREE_CODE (arg1) == INTEGER_CST)
9976 wide_int cst1 = arg1;
9977 wide_int ncst1 = -cst1;
9978 if ((cst1 & ncst1) == ncst1
9979 && multiple_of_p (type, arg0,
9980 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
9981 return fold_convert_loc (loc, type, arg0);
9984 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
9985 bits from CST2. */
9986 if (TREE_CODE (arg1) == INTEGER_CST
9987 && TREE_CODE (arg0) == MULT_EXPR
9988 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9990 wide_int warg1 = arg1;
9991 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
9993 if (masked == 0)
9994 return omit_two_operands_loc (loc, type, build_zero_cst (type),
9995 arg0, arg1);
9996 else if (masked != warg1)
9998 /* Avoid the transform if arg1 is a mask of some
9999 mode which allows further optimizations. */
10000 int pop = wi::popcount (warg1);
10001 if (!(pop >= BITS_PER_UNIT
10002 && pow2p_hwi (pop)
10003 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10004 return fold_build2_loc (loc, code, type, op0,
10005 wide_int_to_tree (type, masked));
10009 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10010 ((A & N) + B) & M -> (A + B) & M
10011 Similarly if (N & M) == 0,
10012 ((A | N) + B) & M -> (A + B) & M
10013 and for - instead of + (or unary - instead of +)
10014 and/or ^ instead of |.
10015 If B is constant and (B & M) == 0, fold into A & M. */
10016 if (TREE_CODE (arg1) == INTEGER_CST)
10018 wide_int cst1 = arg1;
10019 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10020 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10021 && (TREE_CODE (arg0) == PLUS_EXPR
10022 || TREE_CODE (arg0) == MINUS_EXPR
10023 || TREE_CODE (arg0) == NEGATE_EXPR)
10024 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10025 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10027 tree pmop[2];
10028 int which = 0;
10029 wide_int cst0;
10031 /* Now we know that arg0 is (C + D) or (C - D) or
10032 -C and arg1 (M) is == (1LL << cst) - 1.
10033 Store C into PMOP[0] and D into PMOP[1]. */
10034 pmop[0] = TREE_OPERAND (arg0, 0);
10035 pmop[1] = NULL;
10036 if (TREE_CODE (arg0) != NEGATE_EXPR)
10038 pmop[1] = TREE_OPERAND (arg0, 1);
10039 which = 1;
10042 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10043 which = -1;
10045 for (; which >= 0; which--)
10046 switch (TREE_CODE (pmop[which]))
10048 case BIT_AND_EXPR:
10049 case BIT_IOR_EXPR:
10050 case BIT_XOR_EXPR:
10051 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10052 != INTEGER_CST)
10053 break;
10054 cst0 = TREE_OPERAND (pmop[which], 1);
10055 cst0 &= cst1;
10056 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10058 if (cst0 != cst1)
10059 break;
10061 else if (cst0 != 0)
10062 break;
10063 /* If C or D is of the form (A & N) where
10064 (N & M) == M, or of the form (A | N) or
10065 (A ^ N) where (N & M) == 0, replace it with A. */
10066 pmop[which] = TREE_OPERAND (pmop[which], 0);
10067 break;
10068 case INTEGER_CST:
10069 /* If C or D is a N where (N & M) == 0, it can be
10070 omitted (assumed 0). */
10071 if ((TREE_CODE (arg0) == PLUS_EXPR
10072 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10073 && (cst1 & pmop[which]) == 0)
10074 pmop[which] = NULL;
10075 break;
10076 default:
10077 break;
10080 /* Only build anything new if we optimized one or both arguments
10081 above. */
10082 if (pmop[0] != TREE_OPERAND (arg0, 0)
10083 || (TREE_CODE (arg0) != NEGATE_EXPR
10084 && pmop[1] != TREE_OPERAND (arg0, 1)))
10086 tree utype = TREE_TYPE (arg0);
10087 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10089 /* Perform the operations in a type that has defined
10090 overflow behavior. */
10091 utype = unsigned_type_for (TREE_TYPE (arg0));
10092 if (pmop[0] != NULL)
10093 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10094 if (pmop[1] != NULL)
10095 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10098 if (TREE_CODE (arg0) == NEGATE_EXPR)
10099 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10100 else if (TREE_CODE (arg0) == PLUS_EXPR)
10102 if (pmop[0] != NULL && pmop[1] != NULL)
10103 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10104 pmop[0], pmop[1]);
10105 else if (pmop[0] != NULL)
10106 tem = pmop[0];
10107 else if (pmop[1] != NULL)
10108 tem = pmop[1];
10109 else
10110 return build_int_cst (type, 0);
10112 else if (pmop[0] == NULL)
10113 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10114 else
10115 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10116 pmop[0], pmop[1]);
10117 /* TEM is now the new binary +, - or unary - replacement. */
10118 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10119 fold_convert_loc (loc, utype, arg1));
10120 return fold_convert_loc (loc, type, tem);
10125 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10127 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10129 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10131 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10132 if (mask == -1)
10133 return
10134 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10137 goto associate;
10139 case RDIV_EXPR:
10140 /* Don't touch a floating-point divide by zero unless the mode
10141 of the constant can represent infinity. */
10142 if (TREE_CODE (arg1) == REAL_CST
10143 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10144 && real_zerop (arg1))
10145 return NULL_TREE;
10147 /* (-A) / (-B) -> A / B */
10148 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10149 return fold_build2_loc (loc, RDIV_EXPR, type,
10150 TREE_OPERAND (arg0, 0),
10151 negate_expr (arg1));
10152 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10153 return fold_build2_loc (loc, RDIV_EXPR, type,
10154 negate_expr (arg0),
10155 TREE_OPERAND (arg1, 0));
10156 return NULL_TREE;
10158 case TRUNC_DIV_EXPR:
10159 /* Fall through */
10161 case FLOOR_DIV_EXPR:
10162 /* Simplify A / (B << N) where A and B are positive and B is
10163 a power of 2, to A >> (N + log2(B)). */
10164 strict_overflow_p = false;
10165 if (TREE_CODE (arg1) == LSHIFT_EXPR
10166 && (TYPE_UNSIGNED (type)
10167 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10169 tree sval = TREE_OPERAND (arg1, 0);
10170 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10172 tree sh_cnt = TREE_OPERAND (arg1, 1);
10173 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10174 wi::exact_log2 (sval));
10176 if (strict_overflow_p)
10177 fold_overflow_warning (("assuming signed overflow does not "
10178 "occur when simplifying A / (B << N)"),
10179 WARN_STRICT_OVERFLOW_MISC);
10181 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10182 sh_cnt, pow2);
10183 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10184 fold_convert_loc (loc, type, arg0), sh_cnt);
10188 /* Fall through */
10190 case ROUND_DIV_EXPR:
10191 case CEIL_DIV_EXPR:
10192 case EXACT_DIV_EXPR:
10193 if (integer_zerop (arg1))
10194 return NULL_TREE;
10196 /* Convert -A / -B to A / B when the type is signed and overflow is
10197 undefined. */
10198 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10199 && TREE_CODE (arg0) == NEGATE_EXPR
10200 && negate_expr_p (op1))
10202 if (INTEGRAL_TYPE_P (type))
10203 fold_overflow_warning (("assuming signed overflow does not occur "
10204 "when distributing negation across "
10205 "division"),
10206 WARN_STRICT_OVERFLOW_MISC);
10207 return fold_build2_loc (loc, code, type,
10208 fold_convert_loc (loc, type,
10209 TREE_OPERAND (arg0, 0)),
10210 negate_expr (op1));
10212 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10213 && TREE_CODE (arg1) == NEGATE_EXPR
10214 && negate_expr_p (op0))
10216 if (INTEGRAL_TYPE_P (type))
10217 fold_overflow_warning (("assuming signed overflow does not occur "
10218 "when distributing negation across "
10219 "division"),
10220 WARN_STRICT_OVERFLOW_MISC);
10221 return fold_build2_loc (loc, code, type,
10222 negate_expr (op0),
10223 fold_convert_loc (loc, type,
10224 TREE_OPERAND (arg1, 0)));
10227 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10228 operation, EXACT_DIV_EXPR.
10230 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10231 At one time others generated faster code, it's not clear if they do
10232 after the last round to changes to the DIV code in expmed.c. */
10233 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10234 && multiple_of_p (type, arg0, arg1))
10235 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10236 fold_convert (type, arg0),
10237 fold_convert (type, arg1));
10239 strict_overflow_p = false;
10240 if (TREE_CODE (arg1) == INTEGER_CST
10241 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10242 &strict_overflow_p)))
10244 if (strict_overflow_p)
10245 fold_overflow_warning (("assuming signed overflow does not occur "
10246 "when simplifying division"),
10247 WARN_STRICT_OVERFLOW_MISC);
10248 return fold_convert_loc (loc, type, tem);
10251 return NULL_TREE;
10253 case CEIL_MOD_EXPR:
10254 case FLOOR_MOD_EXPR:
10255 case ROUND_MOD_EXPR:
10256 case TRUNC_MOD_EXPR:
10257 strict_overflow_p = false;
10258 if (TREE_CODE (arg1) == INTEGER_CST
10259 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10260 &strict_overflow_p)))
10262 if (strict_overflow_p)
10263 fold_overflow_warning (("assuming signed overflow does not occur "
10264 "when simplifying modulus"),
10265 WARN_STRICT_OVERFLOW_MISC);
10266 return fold_convert_loc (loc, type, tem);
10269 return NULL_TREE;
10271 case LROTATE_EXPR:
10272 case RROTATE_EXPR:
10273 case RSHIFT_EXPR:
10274 case LSHIFT_EXPR:
10275 /* Since negative shift count is not well-defined,
10276 don't try to compute it in the compiler. */
10277 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10278 return NULL_TREE;
10280 prec = element_precision (type);
10282 /* If we have a rotate of a bit operation with the rotate count and
10283 the second operand of the bit operation both constant,
10284 permute the two operations. */
10285 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10286 && (TREE_CODE (arg0) == BIT_AND_EXPR
10287 || TREE_CODE (arg0) == BIT_IOR_EXPR
10288 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10291 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10292 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10293 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10294 fold_build2_loc (loc, code, type,
10295 arg00, arg1),
10296 fold_build2_loc (loc, code, type,
10297 arg01, arg1));
10300 /* Two consecutive rotates adding up to the some integer
10301 multiple of the precision of the type can be ignored. */
10302 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10303 && TREE_CODE (arg0) == RROTATE_EXPR
10304 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10305 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10306 prec) == 0)
10307 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10309 return NULL_TREE;
10311 case MIN_EXPR:
10312 case MAX_EXPR:
10313 goto associate;
10315 case TRUTH_ANDIF_EXPR:
10316 /* Note that the operands of this must be ints
10317 and their values must be 0 or 1.
10318 ("true" is a fixed value perhaps depending on the language.) */
10319 /* If first arg is constant zero, return it. */
10320 if (integer_zerop (arg0))
10321 return fold_convert_loc (loc, type, arg0);
10322 /* FALLTHRU */
10323 case TRUTH_AND_EXPR:
10324 /* If either arg is constant true, drop it. */
10325 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10326 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10327 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10328 /* Preserve sequence points. */
10329 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10330 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10331 /* If second arg is constant zero, result is zero, but first arg
10332 must be evaluated. */
10333 if (integer_zerop (arg1))
10334 return omit_one_operand_loc (loc, type, arg1, arg0);
10335 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10336 case will be handled here. */
10337 if (integer_zerop (arg0))
10338 return omit_one_operand_loc (loc, type, arg0, arg1);
10340 /* !X && X is always false. */
10341 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10342 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10343 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10344 /* X && !X is always false. */
10345 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10346 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10347 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10349 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10350 means A >= Y && A != MAX, but in this case we know that
10351 A < X <= MAX. */
10353 if (!TREE_SIDE_EFFECTS (arg0)
10354 && !TREE_SIDE_EFFECTS (arg1))
10356 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10357 if (tem && !operand_equal_p (tem, arg0, 0))
10358 return fold_build2_loc (loc, code, type, tem, arg1);
10360 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10361 if (tem && !operand_equal_p (tem, arg1, 0))
10362 return fold_build2_loc (loc, code, type, arg0, tem);
10365 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10366 != NULL_TREE)
10367 return tem;
10369 return NULL_TREE;
10371 case TRUTH_ORIF_EXPR:
10372 /* Note that the operands of this must be ints
10373 and their values must be 0 or true.
10374 ("true" is a fixed value perhaps depending on the language.) */
10375 /* If first arg is constant true, return it. */
10376 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10377 return fold_convert_loc (loc, type, arg0);
10378 /* FALLTHRU */
10379 case TRUTH_OR_EXPR:
10380 /* If either arg is constant zero, drop it. */
10381 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10382 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10383 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10384 /* Preserve sequence points. */
10385 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10386 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10387 /* If second arg is constant true, result is true, but we must
10388 evaluate first arg. */
10389 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10390 return omit_one_operand_loc (loc, type, arg1, arg0);
10391 /* Likewise for first arg, but note this only occurs here for
10392 TRUTH_OR_EXPR. */
10393 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10394 return omit_one_operand_loc (loc, type, arg0, arg1);
10396 /* !X || X is always true. */
10397 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10398 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10399 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10400 /* X || !X is always true. */
10401 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10402 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10403 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10405 /* (X && !Y) || (!X && Y) is X ^ Y */
10406 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10407 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10409 tree a0, a1, l0, l1, n0, n1;
10411 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10412 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10414 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10415 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10417 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10418 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10420 if ((operand_equal_p (n0, a0, 0)
10421 && operand_equal_p (n1, a1, 0))
10422 || (operand_equal_p (n0, a1, 0)
10423 && operand_equal_p (n1, a0, 0)))
10424 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10427 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10428 != NULL_TREE)
10429 return tem;
10431 return NULL_TREE;
10433 case TRUTH_XOR_EXPR:
10434 /* If the second arg is constant zero, drop it. */
10435 if (integer_zerop (arg1))
10436 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10437 /* If the second arg is constant true, this is a logical inversion. */
10438 if (integer_onep (arg1))
10440 tem = invert_truthvalue_loc (loc, arg0);
10441 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10443 /* Identical arguments cancel to zero. */
10444 if (operand_equal_p (arg0, arg1, 0))
10445 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10447 /* !X ^ X is always true. */
10448 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10449 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10450 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10452 /* X ^ !X is always true. */
10453 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10454 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10455 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10457 return NULL_TREE;
10459 case EQ_EXPR:
10460 case NE_EXPR:
10461 STRIP_NOPS (arg0);
10462 STRIP_NOPS (arg1);
10464 tem = fold_comparison (loc, code, type, op0, op1);
10465 if (tem != NULL_TREE)
10466 return tem;
10468 /* bool_var != 1 becomes !bool_var. */
10469 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10470 && code == NE_EXPR)
10471 return fold_convert_loc (loc, type,
10472 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10473 TREE_TYPE (arg0), arg0));
10475 /* bool_var == 0 becomes !bool_var. */
10476 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10477 && code == EQ_EXPR)
10478 return fold_convert_loc (loc, type,
10479 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10480 TREE_TYPE (arg0), arg0));
10482 /* !exp != 0 becomes !exp */
10483 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10484 && code == NE_EXPR)
10485 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10487 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10488 if ((TREE_CODE (arg0) == PLUS_EXPR
10489 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10490 || TREE_CODE (arg0) == MINUS_EXPR)
10491 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10492 0)),
10493 arg1, 0)
10494 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10495 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10497 tree val = TREE_OPERAND (arg0, 1);
10498 val = fold_build2_loc (loc, code, type, val,
10499 build_int_cst (TREE_TYPE (val), 0));
10500 return omit_two_operands_loc (loc, type, val,
10501 TREE_OPERAND (arg0, 0), arg1);
10504 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10505 if ((TREE_CODE (arg1) == PLUS_EXPR
10506 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10507 || TREE_CODE (arg1) == MINUS_EXPR)
10508 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10509 0)),
10510 arg0, 0)
10511 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10512 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10514 tree val = TREE_OPERAND (arg1, 1);
10515 val = fold_build2_loc (loc, code, type, val,
10516 build_int_cst (TREE_TYPE (val), 0));
10517 return omit_two_operands_loc (loc, type, val,
10518 TREE_OPERAND (arg1, 0), arg0);
10521 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10522 if (TREE_CODE (arg0) == MINUS_EXPR
10523 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10524 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10525 1)),
10526 arg1, 0)
10527 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10528 return omit_two_operands_loc (loc, type,
10529 code == NE_EXPR
10530 ? boolean_true_node : boolean_false_node,
10531 TREE_OPERAND (arg0, 1), arg1);
10533 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10534 if (TREE_CODE (arg1) == MINUS_EXPR
10535 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10536 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10537 1)),
10538 arg0, 0)
10539 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10540 return omit_two_operands_loc (loc, type,
10541 code == NE_EXPR
10542 ? boolean_true_node : boolean_false_node,
10543 TREE_OPERAND (arg1, 1), arg0);
10545 /* If this is an EQ or NE comparison with zero and ARG0 is
10546 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10547 two operations, but the latter can be done in one less insn
10548 on machines that have only two-operand insns or on which a
10549 constant cannot be the first operand. */
10550 if (TREE_CODE (arg0) == BIT_AND_EXPR
10551 && integer_zerop (arg1))
10553 tree arg00 = TREE_OPERAND (arg0, 0);
10554 tree arg01 = TREE_OPERAND (arg0, 1);
10555 if (TREE_CODE (arg00) == LSHIFT_EXPR
10556 && integer_onep (TREE_OPERAND (arg00, 0)))
10558 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10559 arg01, TREE_OPERAND (arg00, 1));
10560 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10561 build_int_cst (TREE_TYPE (arg0), 1));
10562 return fold_build2_loc (loc, code, type,
10563 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10564 arg1);
10566 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10567 && integer_onep (TREE_OPERAND (arg01, 0)))
10569 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10570 arg00, TREE_OPERAND (arg01, 1));
10571 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10572 build_int_cst (TREE_TYPE (arg0), 1));
10573 return fold_build2_loc (loc, code, type,
10574 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10575 arg1);
10579 /* If this is an NE or EQ comparison of zero against the result of a
10580 signed MOD operation whose second operand is a power of 2, make
10581 the MOD operation unsigned since it is simpler and equivalent. */
10582 if (integer_zerop (arg1)
10583 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10584 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10585 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10586 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10587 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10588 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10590 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10591 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10592 fold_convert_loc (loc, newtype,
10593 TREE_OPERAND (arg0, 0)),
10594 fold_convert_loc (loc, newtype,
10595 TREE_OPERAND (arg0, 1)));
10597 return fold_build2_loc (loc, code, type, newmod,
10598 fold_convert_loc (loc, newtype, arg1));
10601 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10602 C1 is a valid shift constant, and C2 is a power of two, i.e.
10603 a single bit. */
10604 if (TREE_CODE (arg0) == BIT_AND_EXPR
10605 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10606 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10607 == INTEGER_CST
10608 && integer_pow2p (TREE_OPERAND (arg0, 1))
10609 && integer_zerop (arg1))
10611 tree itype = TREE_TYPE (arg0);
10612 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10613 prec = TYPE_PRECISION (itype);
10615 /* Check for a valid shift count. */
10616 if (wi::ltu_p (arg001, prec))
10618 tree arg01 = TREE_OPERAND (arg0, 1);
10619 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10620 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10621 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10622 can be rewritten as (X & (C2 << C1)) != 0. */
10623 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10625 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10626 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10627 return fold_build2_loc (loc, code, type, tem,
10628 fold_convert_loc (loc, itype, arg1));
10630 /* Otherwise, for signed (arithmetic) shifts,
10631 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10632 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10633 else if (!TYPE_UNSIGNED (itype))
10634 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10635 arg000, build_int_cst (itype, 0));
10636 /* Otherwise, of unsigned (logical) shifts,
10637 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10638 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10639 else
10640 return omit_one_operand_loc (loc, type,
10641 code == EQ_EXPR ? integer_one_node
10642 : integer_zero_node,
10643 arg000);
10647 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10648 Similarly for NE_EXPR. */
10649 if (TREE_CODE (arg0) == BIT_AND_EXPR
10650 && TREE_CODE (arg1) == INTEGER_CST
10651 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10653 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10654 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10655 TREE_OPERAND (arg0, 1));
10656 tree dandnotc
10657 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10658 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10659 notc);
10660 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10661 if (integer_nonzerop (dandnotc))
10662 return omit_one_operand_loc (loc, type, rslt, arg0);
10665 /* If this is a comparison of a field, we may be able to simplify it. */
10666 if ((TREE_CODE (arg0) == COMPONENT_REF
10667 || TREE_CODE (arg0) == BIT_FIELD_REF)
10668 /* Handle the constant case even without -O
10669 to make sure the warnings are given. */
10670 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10672 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10673 if (t1)
10674 return t1;
10677 /* Optimize comparisons of strlen vs zero to a compare of the
10678 first character of the string vs zero. To wit,
10679 strlen(ptr) == 0 => *ptr == 0
10680 strlen(ptr) != 0 => *ptr != 0
10681 Other cases should reduce to one of these two (or a constant)
10682 due to the return value of strlen being unsigned. */
10683 if (TREE_CODE (arg0) == CALL_EXPR
10684 && integer_zerop (arg1))
10686 tree fndecl = get_callee_fndecl (arg0);
10688 if (fndecl
10689 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10690 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10691 && call_expr_nargs (arg0) == 1
10692 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10694 tree iref = build_fold_indirect_ref_loc (loc,
10695 CALL_EXPR_ARG (arg0, 0));
10696 return fold_build2_loc (loc, code, type, iref,
10697 build_int_cst (TREE_TYPE (iref), 0));
10701 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10702 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10703 if (TREE_CODE (arg0) == RSHIFT_EXPR
10704 && integer_zerop (arg1)
10705 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10707 tree arg00 = TREE_OPERAND (arg0, 0);
10708 tree arg01 = TREE_OPERAND (arg0, 1);
10709 tree itype = TREE_TYPE (arg00);
10710 if (wi::eq_p (arg01, element_precision (itype) - 1))
10712 if (TYPE_UNSIGNED (itype))
10714 itype = signed_type_for (itype);
10715 arg00 = fold_convert_loc (loc, itype, arg00);
10717 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10718 type, arg00, build_zero_cst (itype));
10722 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10723 (X & C) == 0 when C is a single bit. */
10724 if (TREE_CODE (arg0) == BIT_AND_EXPR
10725 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10726 && integer_zerop (arg1)
10727 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10729 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10730 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10731 TREE_OPERAND (arg0, 1));
10732 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10733 type, tem,
10734 fold_convert_loc (loc, TREE_TYPE (arg0),
10735 arg1));
10738 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10739 constant C is a power of two, i.e. a single bit. */
10740 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10741 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10742 && integer_zerop (arg1)
10743 && integer_pow2p (TREE_OPERAND (arg0, 1))
10744 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10745 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10747 tree arg00 = TREE_OPERAND (arg0, 0);
10748 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10749 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10752 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10753 when is C is a power of two, i.e. a single bit. */
10754 if (TREE_CODE (arg0) == BIT_AND_EXPR
10755 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10756 && integer_zerop (arg1)
10757 && integer_pow2p (TREE_OPERAND (arg0, 1))
10758 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10759 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10761 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10762 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10763 arg000, TREE_OPERAND (arg0, 1));
10764 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10765 tem, build_int_cst (TREE_TYPE (tem), 0));
10768 if (integer_zerop (arg1)
10769 && tree_expr_nonzero_p (arg0))
10771 tree res = constant_boolean_node (code==NE_EXPR, type);
10772 return omit_one_operand_loc (loc, type, res, arg0);
10775 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10776 if (TREE_CODE (arg0) == BIT_AND_EXPR
10777 && TREE_CODE (arg1) == BIT_AND_EXPR)
10779 tree arg00 = TREE_OPERAND (arg0, 0);
10780 tree arg01 = TREE_OPERAND (arg0, 1);
10781 tree arg10 = TREE_OPERAND (arg1, 0);
10782 tree arg11 = TREE_OPERAND (arg1, 1);
10783 tree itype = TREE_TYPE (arg0);
10785 if (operand_equal_p (arg01, arg11, 0))
10786 return fold_build2_loc (loc, code, type,
10787 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10788 fold_build2_loc (loc,
10789 BIT_XOR_EXPR, itype,
10790 arg00, arg10),
10791 arg01),
10792 build_zero_cst (itype));
10794 if (operand_equal_p (arg01, arg10, 0))
10795 return fold_build2_loc (loc, code, type,
10796 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10797 fold_build2_loc (loc,
10798 BIT_XOR_EXPR, itype,
10799 arg00, arg11),
10800 arg01),
10801 build_zero_cst (itype));
10803 if (operand_equal_p (arg00, arg11, 0))
10804 return fold_build2_loc (loc, code, type,
10805 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10806 fold_build2_loc (loc,
10807 BIT_XOR_EXPR, itype,
10808 arg01, arg10),
10809 arg00),
10810 build_zero_cst (itype));
10812 if (operand_equal_p (arg00, arg10, 0))
10813 return fold_build2_loc (loc, code, type,
10814 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10815 fold_build2_loc (loc,
10816 BIT_XOR_EXPR, itype,
10817 arg01, arg11),
10818 arg00),
10819 build_zero_cst (itype));
10822 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10823 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10825 tree arg00 = TREE_OPERAND (arg0, 0);
10826 tree arg01 = TREE_OPERAND (arg0, 1);
10827 tree arg10 = TREE_OPERAND (arg1, 0);
10828 tree arg11 = TREE_OPERAND (arg1, 1);
10829 tree itype = TREE_TYPE (arg0);
10831 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10832 operand_equal_p guarantees no side-effects so we don't need
10833 to use omit_one_operand on Z. */
10834 if (operand_equal_p (arg01, arg11, 0))
10835 return fold_build2_loc (loc, code, type, arg00,
10836 fold_convert_loc (loc, TREE_TYPE (arg00),
10837 arg10));
10838 if (operand_equal_p (arg01, arg10, 0))
10839 return fold_build2_loc (loc, code, type, arg00,
10840 fold_convert_loc (loc, TREE_TYPE (arg00),
10841 arg11));
10842 if (operand_equal_p (arg00, arg11, 0))
10843 return fold_build2_loc (loc, code, type, arg01,
10844 fold_convert_loc (loc, TREE_TYPE (arg01),
10845 arg10));
10846 if (operand_equal_p (arg00, arg10, 0))
10847 return fold_build2_loc (loc, code, type, arg01,
10848 fold_convert_loc (loc, TREE_TYPE (arg01),
10849 arg11));
10851 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10852 if (TREE_CODE (arg01) == INTEGER_CST
10853 && TREE_CODE (arg11) == INTEGER_CST)
10855 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10856 fold_convert_loc (loc, itype, arg11));
10857 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10858 return fold_build2_loc (loc, code, type, tem,
10859 fold_convert_loc (loc, itype, arg10));
10863 /* Attempt to simplify equality/inequality comparisons of complex
10864 values. Only lower the comparison if the result is known or
10865 can be simplified to a single scalar comparison. */
10866 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10867 || TREE_CODE (arg0) == COMPLEX_CST)
10868 && (TREE_CODE (arg1) == COMPLEX_EXPR
10869 || TREE_CODE (arg1) == COMPLEX_CST))
10871 tree real0, imag0, real1, imag1;
10872 tree rcond, icond;
10874 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10876 real0 = TREE_OPERAND (arg0, 0);
10877 imag0 = TREE_OPERAND (arg0, 1);
10879 else
10881 real0 = TREE_REALPART (arg0);
10882 imag0 = TREE_IMAGPART (arg0);
10885 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10887 real1 = TREE_OPERAND (arg1, 0);
10888 imag1 = TREE_OPERAND (arg1, 1);
10890 else
10892 real1 = TREE_REALPART (arg1);
10893 imag1 = TREE_IMAGPART (arg1);
10896 rcond = fold_binary_loc (loc, code, type, real0, real1);
10897 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10899 if (integer_zerop (rcond))
10901 if (code == EQ_EXPR)
10902 return omit_two_operands_loc (loc, type, boolean_false_node,
10903 imag0, imag1);
10904 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10906 else
10908 if (code == NE_EXPR)
10909 return omit_two_operands_loc (loc, type, boolean_true_node,
10910 imag0, imag1);
10911 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10915 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10916 if (icond && TREE_CODE (icond) == INTEGER_CST)
10918 if (integer_zerop (icond))
10920 if (code == EQ_EXPR)
10921 return omit_two_operands_loc (loc, type, boolean_false_node,
10922 real0, real1);
10923 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10925 else
10927 if (code == NE_EXPR)
10928 return omit_two_operands_loc (loc, type, boolean_true_node,
10929 real0, real1);
10930 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10935 return NULL_TREE;
10937 case LT_EXPR:
10938 case GT_EXPR:
10939 case LE_EXPR:
10940 case GE_EXPR:
10941 tem = fold_comparison (loc, code, type, op0, op1);
10942 if (tem != NULL_TREE)
10943 return tem;
10945 /* Transform comparisons of the form X +- C CMP X. */
10946 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10947 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10948 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10949 && !HONOR_SNANS (arg0))
10950 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10951 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10953 tree arg01 = TREE_OPERAND (arg0, 1);
10954 enum tree_code code0 = TREE_CODE (arg0);
10955 int is_positive;
10957 if (TREE_CODE (arg01) == REAL_CST)
10958 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10959 else
10960 is_positive = tree_int_cst_sgn (arg01);
10962 /* (X - c) > X becomes false. */
10963 if (code == GT_EXPR
10964 && ((code0 == MINUS_EXPR && is_positive >= 0)
10965 || (code0 == PLUS_EXPR && is_positive <= 0)))
10967 if (TREE_CODE (arg01) == INTEGER_CST
10968 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10969 fold_overflow_warning (("assuming signed overflow does not "
10970 "occur when assuming that (X - c) > X "
10971 "is always false"),
10972 WARN_STRICT_OVERFLOW_ALL);
10973 return constant_boolean_node (0, type);
10976 /* Likewise (X + c) < X becomes false. */
10977 if (code == LT_EXPR
10978 && ((code0 == PLUS_EXPR && is_positive >= 0)
10979 || (code0 == MINUS_EXPR && is_positive <= 0)))
10981 if (TREE_CODE (arg01) == INTEGER_CST
10982 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10983 fold_overflow_warning (("assuming signed overflow does not "
10984 "occur when assuming that "
10985 "(X + c) < X is always false"),
10986 WARN_STRICT_OVERFLOW_ALL);
10987 return constant_boolean_node (0, type);
10990 /* Convert (X - c) <= X to true. */
10991 if (!HONOR_NANS (arg1)
10992 && code == LE_EXPR
10993 && ((code0 == MINUS_EXPR && is_positive >= 0)
10994 || (code0 == PLUS_EXPR && is_positive <= 0)))
10996 if (TREE_CODE (arg01) == INTEGER_CST
10997 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10998 fold_overflow_warning (("assuming signed overflow does not "
10999 "occur when assuming that "
11000 "(X - c) <= X is always true"),
11001 WARN_STRICT_OVERFLOW_ALL);
11002 return constant_boolean_node (1, type);
11005 /* Convert (X + c) >= X to true. */
11006 if (!HONOR_NANS (arg1)
11007 && code == GE_EXPR
11008 && ((code0 == PLUS_EXPR && is_positive >= 0)
11009 || (code0 == MINUS_EXPR && is_positive <= 0)))
11011 if (TREE_CODE (arg01) == INTEGER_CST
11012 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11013 fold_overflow_warning (("assuming signed overflow does not "
11014 "occur when assuming that "
11015 "(X + c) >= X is always true"),
11016 WARN_STRICT_OVERFLOW_ALL);
11017 return constant_boolean_node (1, type);
11020 if (TREE_CODE (arg01) == INTEGER_CST)
11022 /* Convert X + c > X and X - c < X to true for integers. */
11023 if (code == GT_EXPR
11024 && ((code0 == PLUS_EXPR && is_positive > 0)
11025 || (code0 == MINUS_EXPR && is_positive < 0)))
11027 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11028 fold_overflow_warning (("assuming signed overflow does "
11029 "not occur when assuming that "
11030 "(X + c) > X is always true"),
11031 WARN_STRICT_OVERFLOW_ALL);
11032 return constant_boolean_node (1, type);
11035 if (code == LT_EXPR
11036 && ((code0 == MINUS_EXPR && is_positive > 0)
11037 || (code0 == PLUS_EXPR && is_positive < 0)))
11039 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11040 fold_overflow_warning (("assuming signed overflow does "
11041 "not occur when assuming that "
11042 "(X - c) < X is always true"),
11043 WARN_STRICT_OVERFLOW_ALL);
11044 return constant_boolean_node (1, type);
11047 /* Convert X + c <= X and X - c >= X to false for integers. */
11048 if (code == LE_EXPR
11049 && ((code0 == PLUS_EXPR && is_positive > 0)
11050 || (code0 == MINUS_EXPR && is_positive < 0)))
11052 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11053 fold_overflow_warning (("assuming signed overflow does "
11054 "not occur when assuming that "
11055 "(X + c) <= X is always false"),
11056 WARN_STRICT_OVERFLOW_ALL);
11057 return constant_boolean_node (0, type);
11060 if (code == GE_EXPR
11061 && ((code0 == MINUS_EXPR && is_positive > 0)
11062 || (code0 == PLUS_EXPR && is_positive < 0)))
11064 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11065 fold_overflow_warning (("assuming signed overflow does "
11066 "not occur when assuming that "
11067 "(X - c) >= X is always false"),
11068 WARN_STRICT_OVERFLOW_ALL);
11069 return constant_boolean_node (0, type);
11074 /* If we are comparing an ABS_EXPR with a constant, we can
11075 convert all the cases into explicit comparisons, but they may
11076 well not be faster than doing the ABS and one comparison.
11077 But ABS (X) <= C is a range comparison, which becomes a subtraction
11078 and a comparison, and is probably faster. */
11079 if (code == LE_EXPR
11080 && TREE_CODE (arg1) == INTEGER_CST
11081 && TREE_CODE (arg0) == ABS_EXPR
11082 && ! TREE_SIDE_EFFECTS (arg0)
11083 && (0 != (tem = negate_expr (arg1)))
11084 && TREE_CODE (tem) == INTEGER_CST
11085 && !TREE_OVERFLOW (tem))
11086 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11087 build2 (GE_EXPR, type,
11088 TREE_OPERAND (arg0, 0), tem),
11089 build2 (LE_EXPR, type,
11090 TREE_OPERAND (arg0, 0), arg1));
11092 /* Convert ABS_EXPR<x> >= 0 to true. */
11093 strict_overflow_p = false;
11094 if (code == GE_EXPR
11095 && (integer_zerop (arg1)
11096 || (! HONOR_NANS (arg0)
11097 && real_zerop (arg1)))
11098 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11100 if (strict_overflow_p)
11101 fold_overflow_warning (("assuming signed overflow does not occur "
11102 "when simplifying comparison of "
11103 "absolute value and zero"),
11104 WARN_STRICT_OVERFLOW_CONDITIONAL);
11105 return omit_one_operand_loc (loc, type,
11106 constant_boolean_node (true, type),
11107 arg0);
11110 /* Convert ABS_EXPR<x> < 0 to false. */
11111 strict_overflow_p = false;
11112 if (code == LT_EXPR
11113 && (integer_zerop (arg1) || real_zerop (arg1))
11114 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11116 if (strict_overflow_p)
11117 fold_overflow_warning (("assuming signed overflow does not occur "
11118 "when simplifying comparison of "
11119 "absolute value and zero"),
11120 WARN_STRICT_OVERFLOW_CONDITIONAL);
11121 return omit_one_operand_loc (loc, type,
11122 constant_boolean_node (false, type),
11123 arg0);
11126 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11127 and similarly for >= into !=. */
11128 if ((code == LT_EXPR || code == GE_EXPR)
11129 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11130 && TREE_CODE (arg1) == LSHIFT_EXPR
11131 && integer_onep (TREE_OPERAND (arg1, 0)))
11132 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11133 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11134 TREE_OPERAND (arg1, 1)),
11135 build_zero_cst (TREE_TYPE (arg0)));
11137 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11138 otherwise Y might be >= # of bits in X's type and thus e.g.
11139 (unsigned char) (1 << Y) for Y 15 might be 0.
11140 If the cast is widening, then 1 << Y should have unsigned type,
11141 otherwise if Y is number of bits in the signed shift type minus 1,
11142 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11143 31 might be 0xffffffff80000000. */
11144 if ((code == LT_EXPR || code == GE_EXPR)
11145 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11146 && CONVERT_EXPR_P (arg1)
11147 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11148 && (element_precision (TREE_TYPE (arg1))
11149 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11150 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11151 || (element_precision (TREE_TYPE (arg1))
11152 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11153 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11155 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11156 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11157 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11158 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11159 build_zero_cst (TREE_TYPE (arg0)));
11162 return NULL_TREE;
11164 case UNORDERED_EXPR:
11165 case ORDERED_EXPR:
11166 case UNLT_EXPR:
11167 case UNLE_EXPR:
11168 case UNGT_EXPR:
11169 case UNGE_EXPR:
11170 case UNEQ_EXPR:
11171 case LTGT_EXPR:
11172 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11174 tree targ0 = strip_float_extensions (arg0);
11175 tree targ1 = strip_float_extensions (arg1);
11176 tree newtype = TREE_TYPE (targ0);
11178 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11179 newtype = TREE_TYPE (targ1);
11181 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11182 return fold_build2_loc (loc, code, type,
11183 fold_convert_loc (loc, newtype, targ0),
11184 fold_convert_loc (loc, newtype, targ1));
11187 return NULL_TREE;
11189 case COMPOUND_EXPR:
11190 /* When pedantic, a compound expression can be neither an lvalue
11191 nor an integer constant expression. */
11192 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11193 return NULL_TREE;
11194 /* Don't let (0, 0) be null pointer constant. */
11195 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11196 : fold_convert_loc (loc, type, arg1);
11197 return pedantic_non_lvalue_loc (loc, tem);
11199 case ASSERT_EXPR:
11200 /* An ASSERT_EXPR should never be passed to fold_binary. */
11201 gcc_unreachable ();
11203 default:
11204 return NULL_TREE;
11205 } /* switch (code) */
11208 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11209 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11210 of GOTO_EXPR. */
11212 static tree
11213 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11215 switch (TREE_CODE (*tp))
11217 case LABEL_EXPR:
11218 return *tp;
11220 case GOTO_EXPR:
11221 *walk_subtrees = 0;
11223 /* fall through */
11225 default:
11226 return NULL_TREE;
11230 /* Return whether the sub-tree ST contains a label which is accessible from
11231 outside the sub-tree. */
11233 static bool
11234 contains_label_p (tree st)
11236 return
11237 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11240 /* Fold a ternary expression of code CODE and type TYPE with operands
11241 OP0, OP1, and OP2. Return the folded expression if folding is
11242 successful. Otherwise, return NULL_TREE. */
11244 tree
11245 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11246 tree op0, tree op1, tree op2)
11248 tree tem;
11249 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11250 enum tree_code_class kind = TREE_CODE_CLASS (code);
11252 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11253 && TREE_CODE_LENGTH (code) == 3);
11255 /* If this is a commutative operation, and OP0 is a constant, move it
11256 to OP1 to reduce the number of tests below. */
11257 if (commutative_ternary_tree_code (code)
11258 && tree_swap_operands_p (op0, op1))
11259 return fold_build3_loc (loc, code, type, op1, op0, op2);
11261 tem = generic_simplify (loc, code, type, op0, op1, op2);
11262 if (tem)
11263 return tem;
11265 /* Strip any conversions that don't change the mode. This is safe
11266 for every expression, except for a comparison expression because
11267 its signedness is derived from its operands. So, in the latter
11268 case, only strip conversions that don't change the signedness.
11270 Note that this is done as an internal manipulation within the
11271 constant folder, in order to find the simplest representation of
11272 the arguments so that their form can be studied. In any cases,
11273 the appropriate type conversions should be put back in the tree
11274 that will get out of the constant folder. */
11275 if (op0)
11277 arg0 = op0;
11278 STRIP_NOPS (arg0);
11281 if (op1)
11283 arg1 = op1;
11284 STRIP_NOPS (arg1);
11287 if (op2)
11289 arg2 = op2;
11290 STRIP_NOPS (arg2);
11293 switch (code)
11295 case COMPONENT_REF:
11296 if (TREE_CODE (arg0) == CONSTRUCTOR
11297 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11299 unsigned HOST_WIDE_INT idx;
11300 tree field, value;
11301 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11302 if (field == arg1)
11303 return value;
11305 return NULL_TREE;
11307 case COND_EXPR:
11308 case VEC_COND_EXPR:
11309 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11310 so all simple results must be passed through pedantic_non_lvalue. */
11311 if (TREE_CODE (arg0) == INTEGER_CST)
11313 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11314 tem = integer_zerop (arg0) ? op2 : op1;
11315 /* Only optimize constant conditions when the selected branch
11316 has the same type as the COND_EXPR. This avoids optimizing
11317 away "c ? x : throw", where the throw has a void type.
11318 Avoid throwing away that operand which contains label. */
11319 if ((!TREE_SIDE_EFFECTS (unused_op)
11320 || !contains_label_p (unused_op))
11321 && (! VOID_TYPE_P (TREE_TYPE (tem))
11322 || VOID_TYPE_P (type)))
11323 return pedantic_non_lvalue_loc (loc, tem);
11324 return NULL_TREE;
11326 else if (TREE_CODE (arg0) == VECTOR_CST)
11328 if ((TREE_CODE (arg1) == VECTOR_CST
11329 || TREE_CODE (arg1) == CONSTRUCTOR)
11330 && (TREE_CODE (arg2) == VECTOR_CST
11331 || TREE_CODE (arg2) == CONSTRUCTOR))
11333 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11334 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11335 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11336 for (i = 0; i < nelts; i++)
11338 tree val = VECTOR_CST_ELT (arg0, i);
11339 if (integer_all_onesp (val))
11340 sel[i] = i;
11341 else if (integer_zerop (val))
11342 sel[i] = nelts + i;
11343 else /* Currently unreachable. */
11344 return NULL_TREE;
11346 tree t = fold_vec_perm (type, arg1, arg2, sel);
11347 if (t != NULL_TREE)
11348 return t;
11352 /* If we have A op B ? A : C, we may be able to convert this to a
11353 simpler expression, depending on the operation and the values
11354 of B and C. Signed zeros prevent all of these transformations,
11355 for reasons given above each one.
11357 Also try swapping the arguments and inverting the conditional. */
11358 if (COMPARISON_CLASS_P (arg0)
11359 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11360 arg1, TREE_OPERAND (arg0, 1))
11361 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11363 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11364 if (tem)
11365 return tem;
11368 if (COMPARISON_CLASS_P (arg0)
11369 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11370 op2,
11371 TREE_OPERAND (arg0, 1))
11372 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11374 location_t loc0 = expr_location_or (arg0, loc);
11375 tem = fold_invert_truthvalue (loc0, arg0);
11376 if (tem && COMPARISON_CLASS_P (tem))
11378 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11379 if (tem)
11380 return tem;
11384 /* If the second operand is simpler than the third, swap them
11385 since that produces better jump optimization results. */
11386 if (truth_value_p (TREE_CODE (arg0))
11387 && tree_swap_operands_p (op1, op2))
11389 location_t loc0 = expr_location_or (arg0, loc);
11390 /* See if this can be inverted. If it can't, possibly because
11391 it was a floating-point inequality comparison, don't do
11392 anything. */
11393 tem = fold_invert_truthvalue (loc0, arg0);
11394 if (tem)
11395 return fold_build3_loc (loc, code, type, tem, op2, op1);
11398 /* Convert A ? 1 : 0 to simply A. */
11399 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11400 : (integer_onep (op1)
11401 && !VECTOR_TYPE_P (type)))
11402 && integer_zerop (op2)
11403 /* If we try to convert OP0 to our type, the
11404 call to fold will try to move the conversion inside
11405 a COND, which will recurse. In that case, the COND_EXPR
11406 is probably the best choice, so leave it alone. */
11407 && type == TREE_TYPE (arg0))
11408 return pedantic_non_lvalue_loc (loc, arg0);
11410 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11411 over COND_EXPR in cases such as floating point comparisons. */
11412 if (integer_zerop (op1)
11413 && code == COND_EXPR
11414 && integer_onep (op2)
11415 && !VECTOR_TYPE_P (type)
11416 && truth_value_p (TREE_CODE (arg0)))
11417 return pedantic_non_lvalue_loc (loc,
11418 fold_convert_loc (loc, type,
11419 invert_truthvalue_loc (loc,
11420 arg0)));
11422 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11423 if (TREE_CODE (arg0) == LT_EXPR
11424 && integer_zerop (TREE_OPERAND (arg0, 1))
11425 && integer_zerop (op2)
11426 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11428 /* sign_bit_p looks through both zero and sign extensions,
11429 but for this optimization only sign extensions are
11430 usable. */
11431 tree tem2 = TREE_OPERAND (arg0, 0);
11432 while (tem != tem2)
11434 if (TREE_CODE (tem2) != NOP_EXPR
11435 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11437 tem = NULL_TREE;
11438 break;
11440 tem2 = TREE_OPERAND (tem2, 0);
11442 /* sign_bit_p only checks ARG1 bits within A's precision.
11443 If <sign bit of A> has wider type than A, bits outside
11444 of A's precision in <sign bit of A> need to be checked.
11445 If they are all 0, this optimization needs to be done
11446 in unsigned A's type, if they are all 1 in signed A's type,
11447 otherwise this can't be done. */
11448 if (tem
11449 && TYPE_PRECISION (TREE_TYPE (tem))
11450 < TYPE_PRECISION (TREE_TYPE (arg1))
11451 && TYPE_PRECISION (TREE_TYPE (tem))
11452 < TYPE_PRECISION (type))
11454 int inner_width, outer_width;
11455 tree tem_type;
11457 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11458 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11459 if (outer_width > TYPE_PRECISION (type))
11460 outer_width = TYPE_PRECISION (type);
11462 wide_int mask = wi::shifted_mask
11463 (inner_width, outer_width - inner_width, false,
11464 TYPE_PRECISION (TREE_TYPE (arg1)));
11466 wide_int common = mask & arg1;
11467 if (common == mask)
11469 tem_type = signed_type_for (TREE_TYPE (tem));
11470 tem = fold_convert_loc (loc, tem_type, tem);
11472 else if (common == 0)
11474 tem_type = unsigned_type_for (TREE_TYPE (tem));
11475 tem = fold_convert_loc (loc, tem_type, tem);
11477 else
11478 tem = NULL;
11481 if (tem)
11482 return
11483 fold_convert_loc (loc, type,
11484 fold_build2_loc (loc, BIT_AND_EXPR,
11485 TREE_TYPE (tem), tem,
11486 fold_convert_loc (loc,
11487 TREE_TYPE (tem),
11488 arg1)));
11491 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11492 already handled above. */
11493 if (TREE_CODE (arg0) == BIT_AND_EXPR
11494 && integer_onep (TREE_OPERAND (arg0, 1))
11495 && integer_zerop (op2)
11496 && integer_pow2p (arg1))
11498 tree tem = TREE_OPERAND (arg0, 0);
11499 STRIP_NOPS (tem);
11500 if (TREE_CODE (tem) == RSHIFT_EXPR
11501 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11502 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11503 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11504 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11505 TREE_OPERAND (tem, 0), arg1);
11508 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11509 is probably obsolete because the first operand should be a
11510 truth value (that's why we have the two cases above), but let's
11511 leave it in until we can confirm this for all front-ends. */
11512 if (integer_zerop (op2)
11513 && TREE_CODE (arg0) == NE_EXPR
11514 && integer_zerop (TREE_OPERAND (arg0, 1))
11515 && integer_pow2p (arg1)
11516 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11517 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11518 arg1, OEP_ONLY_CONST))
11519 return pedantic_non_lvalue_loc (loc,
11520 fold_convert_loc (loc, type,
11521 TREE_OPERAND (arg0, 0)));
11523 /* Disable the transformations below for vectors, since
11524 fold_binary_op_with_conditional_arg may undo them immediately,
11525 yielding an infinite loop. */
11526 if (code == VEC_COND_EXPR)
11527 return NULL_TREE;
11529 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11530 if (integer_zerop (op2)
11531 && truth_value_p (TREE_CODE (arg0))
11532 && truth_value_p (TREE_CODE (arg1))
11533 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11534 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11535 : TRUTH_ANDIF_EXPR,
11536 type, fold_convert_loc (loc, type, arg0), arg1);
11538 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11539 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11540 && truth_value_p (TREE_CODE (arg0))
11541 && truth_value_p (TREE_CODE (arg1))
11542 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11544 location_t loc0 = expr_location_or (arg0, loc);
11545 /* Only perform transformation if ARG0 is easily inverted. */
11546 tem = fold_invert_truthvalue (loc0, arg0);
11547 if (tem)
11548 return fold_build2_loc (loc, code == VEC_COND_EXPR
11549 ? BIT_IOR_EXPR
11550 : TRUTH_ORIF_EXPR,
11551 type, fold_convert_loc (loc, type, tem),
11552 arg1);
11555 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11556 if (integer_zerop (arg1)
11557 && truth_value_p (TREE_CODE (arg0))
11558 && truth_value_p (TREE_CODE (op2))
11559 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11561 location_t loc0 = expr_location_or (arg0, loc);
11562 /* Only perform transformation if ARG0 is easily inverted. */
11563 tem = fold_invert_truthvalue (loc0, arg0);
11564 if (tem)
11565 return fold_build2_loc (loc, code == VEC_COND_EXPR
11566 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11567 type, fold_convert_loc (loc, type, tem),
11568 op2);
11571 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11572 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11573 && truth_value_p (TREE_CODE (arg0))
11574 && truth_value_p (TREE_CODE (op2))
11575 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11576 return fold_build2_loc (loc, code == VEC_COND_EXPR
11577 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11578 type, fold_convert_loc (loc, type, arg0), op2);
11580 return NULL_TREE;
11582 case CALL_EXPR:
11583 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11584 of fold_ternary on them. */
11585 gcc_unreachable ();
11587 case BIT_FIELD_REF:
11588 if (TREE_CODE (arg0) == VECTOR_CST
11589 && (type == TREE_TYPE (TREE_TYPE (arg0))
11590 || (TREE_CODE (type) == VECTOR_TYPE
11591 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11593 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11594 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11595 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11596 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11598 if (n != 0
11599 && (idx % width) == 0
11600 && (n % width) == 0
11601 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11603 idx = idx / width;
11604 n = n / width;
11606 if (TREE_CODE (arg0) == VECTOR_CST)
11608 if (n == 1)
11609 return VECTOR_CST_ELT (arg0, idx);
11611 tree *vals = XALLOCAVEC (tree, n);
11612 for (unsigned i = 0; i < n; ++i)
11613 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11614 return build_vector (type, vals);
11619 /* On constants we can use native encode/interpret to constant
11620 fold (nearly) all BIT_FIELD_REFs. */
11621 if (CONSTANT_CLASS_P (arg0)
11622 && can_native_interpret_type_p (type)
11623 && BITS_PER_UNIT == 8)
11625 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11626 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11627 /* Limit us to a reasonable amount of work. To relax the
11628 other limitations we need bit-shifting of the buffer
11629 and rounding up the size. */
11630 if (bitpos % BITS_PER_UNIT == 0
11631 && bitsize % BITS_PER_UNIT == 0
11632 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11634 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11635 unsigned HOST_WIDE_INT len
11636 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11637 bitpos / BITS_PER_UNIT);
11638 if (len > 0
11639 && len * BITS_PER_UNIT >= bitsize)
11641 tree v = native_interpret_expr (type, b,
11642 bitsize / BITS_PER_UNIT);
11643 if (v)
11644 return v;
11649 return NULL_TREE;
11651 case FMA_EXPR:
11652 /* For integers we can decompose the FMA if possible. */
11653 if (TREE_CODE (arg0) == INTEGER_CST
11654 && TREE_CODE (arg1) == INTEGER_CST)
11655 return fold_build2_loc (loc, PLUS_EXPR, type,
11656 const_binop (MULT_EXPR, arg0, arg1), arg2);
11657 if (integer_zerop (arg2))
11658 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11660 return fold_fma (loc, type, arg0, arg1, arg2);
11662 case VEC_PERM_EXPR:
11663 if (TREE_CODE (arg2) == VECTOR_CST)
11665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11666 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11667 unsigned char *sel2 = sel + nelts;
11668 bool need_mask_canon = false;
11669 bool need_mask_canon2 = false;
11670 bool all_in_vec0 = true;
11671 bool all_in_vec1 = true;
11672 bool maybe_identity = true;
11673 bool single_arg = (op0 == op1);
11674 bool changed = false;
11676 mask2 = 2 * nelts - 1;
11677 mask = single_arg ? (nelts - 1) : mask2;
11678 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11679 for (i = 0; i < nelts; i++)
11681 tree val = VECTOR_CST_ELT (arg2, i);
11682 if (TREE_CODE (val) != INTEGER_CST)
11683 return NULL_TREE;
11685 /* Make sure that the perm value is in an acceptable
11686 range. */
11687 wide_int t = val;
11688 need_mask_canon |= wi::gtu_p (t, mask);
11689 need_mask_canon2 |= wi::gtu_p (t, mask2);
11690 sel[i] = t.to_uhwi () & mask;
11691 sel2[i] = t.to_uhwi () & mask2;
11693 if (sel[i] < nelts)
11694 all_in_vec1 = false;
11695 else
11696 all_in_vec0 = false;
11698 if ((sel[i] & (nelts-1)) != i)
11699 maybe_identity = false;
11702 if (maybe_identity)
11704 if (all_in_vec0)
11705 return op0;
11706 if (all_in_vec1)
11707 return op1;
11710 if (all_in_vec0)
11711 op1 = op0;
11712 else if (all_in_vec1)
11714 op0 = op1;
11715 for (i = 0; i < nelts; i++)
11716 sel[i] -= nelts;
11717 need_mask_canon = true;
11720 if ((TREE_CODE (op0) == VECTOR_CST
11721 || TREE_CODE (op0) == CONSTRUCTOR)
11722 && (TREE_CODE (op1) == VECTOR_CST
11723 || TREE_CODE (op1) == CONSTRUCTOR))
11725 tree t = fold_vec_perm (type, op0, op1, sel);
11726 if (t != NULL_TREE)
11727 return t;
11730 if (op0 == op1 && !single_arg)
11731 changed = true;
11733 /* Some targets are deficient and fail to expand a single
11734 argument permutation while still allowing an equivalent
11735 2-argument version. */
11736 if (need_mask_canon && arg2 == op2
11737 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11738 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11740 need_mask_canon = need_mask_canon2;
11741 sel = sel2;
11744 if (need_mask_canon && arg2 == op2)
11746 tree *tsel = XALLOCAVEC (tree, nelts);
11747 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11748 for (i = 0; i < nelts; i++)
11749 tsel[i] = build_int_cst (eltype, sel[i]);
11750 op2 = build_vector (TREE_TYPE (arg2), tsel);
11751 changed = true;
11754 if (changed)
11755 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11757 return NULL_TREE;
11759 case BIT_INSERT_EXPR:
11760 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11761 if (TREE_CODE (arg0) == INTEGER_CST
11762 && TREE_CODE (arg1) == INTEGER_CST)
11764 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11765 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11766 wide_int tem = wi::bit_and (arg0,
11767 wi::shifted_mask (bitpos, bitsize, true,
11768 TYPE_PRECISION (type)));
11769 wide_int tem2
11770 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11771 bitsize), bitpos);
11772 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11774 else if (TREE_CODE (arg0) == VECTOR_CST
11775 && CONSTANT_CLASS_P (arg1)
11776 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11777 TREE_TYPE (arg1)))
11779 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11780 unsigned HOST_WIDE_INT elsize
11781 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11782 if (bitpos % elsize == 0)
11784 unsigned k = bitpos / elsize;
11785 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11786 return arg0;
11787 else
11789 tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11790 memcpy (elts, VECTOR_CST_ELTS (arg0),
11791 sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11792 elts[k] = arg1;
11793 return build_vector (type, elts);
11797 return NULL_TREE;
11799 default:
11800 return NULL_TREE;
11801 } /* switch (code) */
11804 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11805 of an array (or vector). */
11807 tree
11808 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11810 tree index_type = NULL_TREE;
11811 offset_int low_bound = 0;
11813 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11815 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11816 if (domain_type && TYPE_MIN_VALUE (domain_type))
11818 /* Static constructors for variably sized objects makes no sense. */
11819 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11820 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11821 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11825 if (index_type)
11826 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11827 TYPE_SIGN (index_type));
11829 offset_int index = low_bound - 1;
11830 if (index_type)
11831 index = wi::ext (index, TYPE_PRECISION (index_type),
11832 TYPE_SIGN (index_type));
11834 offset_int max_index;
11835 unsigned HOST_WIDE_INT cnt;
11836 tree cfield, cval;
11838 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11840 /* Array constructor might explicitly set index, or specify a range,
11841 or leave index NULL meaning that it is next index after previous
11842 one. */
11843 if (cfield)
11845 if (TREE_CODE (cfield) == INTEGER_CST)
11846 max_index = index = wi::to_offset (cfield);
11847 else
11849 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11850 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11851 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11854 else
11856 index += 1;
11857 if (index_type)
11858 index = wi::ext (index, TYPE_PRECISION (index_type),
11859 TYPE_SIGN (index_type));
11860 max_index = index;
11863 /* Do we have match? */
11864 if (wi::cmpu (access_index, index) >= 0
11865 && wi::cmpu (access_index, max_index) <= 0)
11866 return cval;
11868 return NULL_TREE;
11871 /* Perform constant folding and related simplification of EXPR.
11872 The related simplifications include x*1 => x, x*0 => 0, etc.,
11873 and application of the associative law.
11874 NOP_EXPR conversions may be removed freely (as long as we
11875 are careful not to change the type of the overall expression).
11876 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11877 but we can constant-fold them if they have constant operands. */
11879 #ifdef ENABLE_FOLD_CHECKING
11880 # define fold(x) fold_1 (x)
11881 static tree fold_1 (tree);
11882 static
11883 #endif
11884 tree
11885 fold (tree expr)
11887 const tree t = expr;
11888 enum tree_code code = TREE_CODE (t);
11889 enum tree_code_class kind = TREE_CODE_CLASS (code);
11890 tree tem;
11891 location_t loc = EXPR_LOCATION (expr);
11893 /* Return right away if a constant. */
11894 if (kind == tcc_constant)
11895 return t;
11897 /* CALL_EXPR-like objects with variable numbers of operands are
11898 treated specially. */
11899 if (kind == tcc_vl_exp)
11901 if (code == CALL_EXPR)
11903 tem = fold_call_expr (loc, expr, false);
11904 return tem ? tem : expr;
11906 return expr;
11909 if (IS_EXPR_CODE_CLASS (kind))
11911 tree type = TREE_TYPE (t);
11912 tree op0, op1, op2;
11914 switch (TREE_CODE_LENGTH (code))
11916 case 1:
11917 op0 = TREE_OPERAND (t, 0);
11918 tem = fold_unary_loc (loc, code, type, op0);
11919 return tem ? tem : expr;
11920 case 2:
11921 op0 = TREE_OPERAND (t, 0);
11922 op1 = TREE_OPERAND (t, 1);
11923 tem = fold_binary_loc (loc, code, type, op0, op1);
11924 return tem ? tem : expr;
11925 case 3:
11926 op0 = TREE_OPERAND (t, 0);
11927 op1 = TREE_OPERAND (t, 1);
11928 op2 = TREE_OPERAND (t, 2);
11929 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11930 return tem ? tem : expr;
11931 default:
11932 break;
11936 switch (code)
11938 case ARRAY_REF:
11940 tree op0 = TREE_OPERAND (t, 0);
11941 tree op1 = TREE_OPERAND (t, 1);
11943 if (TREE_CODE (op1) == INTEGER_CST
11944 && TREE_CODE (op0) == CONSTRUCTOR
11945 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11947 tree val = get_array_ctor_element_at_index (op0,
11948 wi::to_offset (op1));
11949 if (val)
11950 return val;
11953 return t;
11956 /* Return a VECTOR_CST if possible. */
11957 case CONSTRUCTOR:
11959 tree type = TREE_TYPE (t);
11960 if (TREE_CODE (type) != VECTOR_TYPE)
11961 return t;
11963 unsigned i;
11964 tree val;
11965 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11966 if (! CONSTANT_CLASS_P (val))
11967 return t;
11969 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11972 case CONST_DECL:
11973 return fold (DECL_INITIAL (t));
11975 default:
11976 return t;
11977 } /* switch (code) */
11980 #ifdef ENABLE_FOLD_CHECKING
11981 #undef fold
11983 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11984 hash_table<nofree_ptr_hash<const tree_node> > *);
11985 static void fold_check_failed (const_tree, const_tree);
11986 void print_fold_checksum (const_tree);
11988 /* When --enable-checking=fold, compute a digest of expr before
11989 and after actual fold call to see if fold did not accidentally
11990 change original expr. */
11992 tree
11993 fold (tree expr)
11995 tree ret;
11996 struct md5_ctx ctx;
11997 unsigned char checksum_before[16], checksum_after[16];
11998 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12000 md5_init_ctx (&ctx);
12001 fold_checksum_tree (expr, &ctx, &ht);
12002 md5_finish_ctx (&ctx, checksum_before);
12003 ht.empty ();
12005 ret = fold_1 (expr);
12007 md5_init_ctx (&ctx);
12008 fold_checksum_tree (expr, &ctx, &ht);
12009 md5_finish_ctx (&ctx, checksum_after);
12011 if (memcmp (checksum_before, checksum_after, 16))
12012 fold_check_failed (expr, ret);
12014 return ret;
12017 void
12018 print_fold_checksum (const_tree expr)
12020 struct md5_ctx ctx;
12021 unsigned char checksum[16], cnt;
12022 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12024 md5_init_ctx (&ctx);
12025 fold_checksum_tree (expr, &ctx, &ht);
12026 md5_finish_ctx (&ctx, checksum);
12027 for (cnt = 0; cnt < 16; ++cnt)
12028 fprintf (stderr, "%02x", checksum[cnt]);
12029 putc ('\n', stderr);
12032 static void
12033 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12035 internal_error ("fold check: original tree changed by fold");
12038 static void
12039 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12040 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12042 const tree_node **slot;
12043 enum tree_code code;
12044 union tree_node buf;
12045 int i, len;
12047 recursive_label:
12048 if (expr == NULL)
12049 return;
12050 slot = ht->find_slot (expr, INSERT);
12051 if (*slot != NULL)
12052 return;
12053 *slot = expr;
12054 code = TREE_CODE (expr);
12055 if (TREE_CODE_CLASS (code) == tcc_declaration
12056 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12058 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12059 memcpy ((char *) &buf, expr, tree_size (expr));
12060 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12061 buf.decl_with_vis.symtab_node = NULL;
12062 expr = (tree) &buf;
12064 else if (TREE_CODE_CLASS (code) == tcc_type
12065 && (TYPE_POINTER_TO (expr)
12066 || TYPE_REFERENCE_TO (expr)
12067 || TYPE_CACHED_VALUES_P (expr)
12068 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12069 || TYPE_NEXT_VARIANT (expr)
12070 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12072 /* Allow these fields to be modified. */
12073 tree tmp;
12074 memcpy ((char *) &buf, expr, tree_size (expr));
12075 expr = tmp = (tree) &buf;
12076 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12077 TYPE_POINTER_TO (tmp) = NULL;
12078 TYPE_REFERENCE_TO (tmp) = NULL;
12079 TYPE_NEXT_VARIANT (tmp) = NULL;
12080 TYPE_ALIAS_SET (tmp) = -1;
12081 if (TYPE_CACHED_VALUES_P (tmp))
12083 TYPE_CACHED_VALUES_P (tmp) = 0;
12084 TYPE_CACHED_VALUES (tmp) = NULL;
12087 md5_process_bytes (expr, tree_size (expr), ctx);
12088 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12089 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12090 if (TREE_CODE_CLASS (code) != tcc_type
12091 && TREE_CODE_CLASS (code) != tcc_declaration
12092 && code != TREE_LIST
12093 && code != SSA_NAME
12094 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12095 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12096 switch (TREE_CODE_CLASS (code))
12098 case tcc_constant:
12099 switch (code)
12101 case STRING_CST:
12102 md5_process_bytes (TREE_STRING_POINTER (expr),
12103 TREE_STRING_LENGTH (expr), ctx);
12104 break;
12105 case COMPLEX_CST:
12106 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12107 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12108 break;
12109 case VECTOR_CST:
12110 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12111 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12112 break;
12113 default:
12114 break;
12116 break;
12117 case tcc_exceptional:
12118 switch (code)
12120 case TREE_LIST:
12121 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12122 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12123 expr = TREE_CHAIN (expr);
12124 goto recursive_label;
12125 break;
12126 case TREE_VEC:
12127 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12128 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12129 break;
12130 default:
12131 break;
12133 break;
12134 case tcc_expression:
12135 case tcc_reference:
12136 case tcc_comparison:
12137 case tcc_unary:
12138 case tcc_binary:
12139 case tcc_statement:
12140 case tcc_vl_exp:
12141 len = TREE_OPERAND_LENGTH (expr);
12142 for (i = 0; i < len; ++i)
12143 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12144 break;
12145 case tcc_declaration:
12146 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12147 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12148 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12150 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12151 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12152 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12153 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12154 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12157 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12159 if (TREE_CODE (expr) == FUNCTION_DECL)
12161 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12162 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12164 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12166 break;
12167 case tcc_type:
12168 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12169 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12170 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12171 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12172 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12173 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12174 if (INTEGRAL_TYPE_P (expr)
12175 || SCALAR_FLOAT_TYPE_P (expr))
12177 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12178 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12180 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12181 if (TREE_CODE (expr) == RECORD_TYPE
12182 || TREE_CODE (expr) == UNION_TYPE
12183 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12184 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12185 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12186 break;
12187 default:
12188 break;
12192 /* Helper function for outputting the checksum of a tree T. When
12193 debugging with gdb, you can "define mynext" to be "next" followed
12194 by "call debug_fold_checksum (op0)", then just trace down till the
12195 outputs differ. */
12197 DEBUG_FUNCTION void
12198 debug_fold_checksum (const_tree t)
12200 int i;
12201 unsigned char checksum[16];
12202 struct md5_ctx ctx;
12203 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12205 md5_init_ctx (&ctx);
12206 fold_checksum_tree (t, &ctx, &ht);
12207 md5_finish_ctx (&ctx, checksum);
12208 ht.empty ();
12210 for (i = 0; i < 16; i++)
12211 fprintf (stderr, "%d ", checksum[i]);
12213 fprintf (stderr, "\n");
12216 #endif
12218 /* Fold a unary tree expression with code CODE of type TYPE with an
12219 operand OP0. LOC is the location of the resulting expression.
12220 Return a folded expression if successful. Otherwise, return a tree
12221 expression with code CODE of type TYPE with an operand OP0. */
12223 tree
12224 fold_build1_stat_loc (location_t loc,
12225 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12227 tree tem;
12228 #ifdef ENABLE_FOLD_CHECKING
12229 unsigned char checksum_before[16], checksum_after[16];
12230 struct md5_ctx ctx;
12231 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12233 md5_init_ctx (&ctx);
12234 fold_checksum_tree (op0, &ctx, &ht);
12235 md5_finish_ctx (&ctx, checksum_before);
12236 ht.empty ();
12237 #endif
12239 tem = fold_unary_loc (loc, code, type, op0);
12240 if (!tem)
12241 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12243 #ifdef ENABLE_FOLD_CHECKING
12244 md5_init_ctx (&ctx);
12245 fold_checksum_tree (op0, &ctx, &ht);
12246 md5_finish_ctx (&ctx, checksum_after);
12248 if (memcmp (checksum_before, checksum_after, 16))
12249 fold_check_failed (op0, tem);
12250 #endif
12251 return tem;
12254 /* Fold a binary tree expression with code CODE of type TYPE with
12255 operands OP0 and OP1. LOC is the location of the resulting
12256 expression. Return a folded expression if successful. Otherwise,
12257 return a tree expression with code CODE of type TYPE with operands
12258 OP0 and OP1. */
12260 tree
12261 fold_build2_stat_loc (location_t loc,
12262 enum tree_code code, tree type, tree op0, tree op1
12263 MEM_STAT_DECL)
12265 tree tem;
12266 #ifdef ENABLE_FOLD_CHECKING
12267 unsigned char checksum_before_op0[16],
12268 checksum_before_op1[16],
12269 checksum_after_op0[16],
12270 checksum_after_op1[16];
12271 struct md5_ctx ctx;
12272 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12274 md5_init_ctx (&ctx);
12275 fold_checksum_tree (op0, &ctx, &ht);
12276 md5_finish_ctx (&ctx, checksum_before_op0);
12277 ht.empty ();
12279 md5_init_ctx (&ctx);
12280 fold_checksum_tree (op1, &ctx, &ht);
12281 md5_finish_ctx (&ctx, checksum_before_op1);
12282 ht.empty ();
12283 #endif
12285 tem = fold_binary_loc (loc, code, type, op0, op1);
12286 if (!tem)
12287 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12289 #ifdef ENABLE_FOLD_CHECKING
12290 md5_init_ctx (&ctx);
12291 fold_checksum_tree (op0, &ctx, &ht);
12292 md5_finish_ctx (&ctx, checksum_after_op0);
12293 ht.empty ();
12295 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12296 fold_check_failed (op0, tem);
12298 md5_init_ctx (&ctx);
12299 fold_checksum_tree (op1, &ctx, &ht);
12300 md5_finish_ctx (&ctx, checksum_after_op1);
12302 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12303 fold_check_failed (op1, tem);
12304 #endif
12305 return tem;
12308 /* Fold a ternary tree expression with code CODE of type TYPE with
12309 operands OP0, OP1, and OP2. Return a folded expression if
12310 successful. Otherwise, return a tree expression with code CODE of
12311 type TYPE with operands OP0, OP1, and OP2. */
12313 tree
12314 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12315 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12317 tree tem;
12318 #ifdef ENABLE_FOLD_CHECKING
12319 unsigned char checksum_before_op0[16],
12320 checksum_before_op1[16],
12321 checksum_before_op2[16],
12322 checksum_after_op0[16],
12323 checksum_after_op1[16],
12324 checksum_after_op2[16];
12325 struct md5_ctx ctx;
12326 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12328 md5_init_ctx (&ctx);
12329 fold_checksum_tree (op0, &ctx, &ht);
12330 md5_finish_ctx (&ctx, checksum_before_op0);
12331 ht.empty ();
12333 md5_init_ctx (&ctx);
12334 fold_checksum_tree (op1, &ctx, &ht);
12335 md5_finish_ctx (&ctx, checksum_before_op1);
12336 ht.empty ();
12338 md5_init_ctx (&ctx);
12339 fold_checksum_tree (op2, &ctx, &ht);
12340 md5_finish_ctx (&ctx, checksum_before_op2);
12341 ht.empty ();
12342 #endif
12344 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12345 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12346 if (!tem)
12347 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12349 #ifdef ENABLE_FOLD_CHECKING
12350 md5_init_ctx (&ctx);
12351 fold_checksum_tree (op0, &ctx, &ht);
12352 md5_finish_ctx (&ctx, checksum_after_op0);
12353 ht.empty ();
12355 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12356 fold_check_failed (op0, tem);
12358 md5_init_ctx (&ctx);
12359 fold_checksum_tree (op1, &ctx, &ht);
12360 md5_finish_ctx (&ctx, checksum_after_op1);
12361 ht.empty ();
12363 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12364 fold_check_failed (op1, tem);
12366 md5_init_ctx (&ctx);
12367 fold_checksum_tree (op2, &ctx, &ht);
12368 md5_finish_ctx (&ctx, checksum_after_op2);
12370 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12371 fold_check_failed (op2, tem);
12372 #endif
12373 return tem;
12376 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12377 arguments in ARGARRAY, and a null static chain.
12378 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12379 of type TYPE from the given operands as constructed by build_call_array. */
12381 tree
12382 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12383 int nargs, tree *argarray)
12385 tree tem;
12386 #ifdef ENABLE_FOLD_CHECKING
12387 unsigned char checksum_before_fn[16],
12388 checksum_before_arglist[16],
12389 checksum_after_fn[16],
12390 checksum_after_arglist[16];
12391 struct md5_ctx ctx;
12392 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12393 int i;
12395 md5_init_ctx (&ctx);
12396 fold_checksum_tree (fn, &ctx, &ht);
12397 md5_finish_ctx (&ctx, checksum_before_fn);
12398 ht.empty ();
12400 md5_init_ctx (&ctx);
12401 for (i = 0; i < nargs; i++)
12402 fold_checksum_tree (argarray[i], &ctx, &ht);
12403 md5_finish_ctx (&ctx, checksum_before_arglist);
12404 ht.empty ();
12405 #endif
12407 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12408 if (!tem)
12409 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12411 #ifdef ENABLE_FOLD_CHECKING
12412 md5_init_ctx (&ctx);
12413 fold_checksum_tree (fn, &ctx, &ht);
12414 md5_finish_ctx (&ctx, checksum_after_fn);
12415 ht.empty ();
12417 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12418 fold_check_failed (fn, tem);
12420 md5_init_ctx (&ctx);
12421 for (i = 0; i < nargs; i++)
12422 fold_checksum_tree (argarray[i], &ctx, &ht);
12423 md5_finish_ctx (&ctx, checksum_after_arglist);
12425 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12426 fold_check_failed (NULL_TREE, tem);
12427 #endif
12428 return tem;
12431 /* Perform constant folding and related simplification of initializer
12432 expression EXPR. These behave identically to "fold_buildN" but ignore
12433 potential run-time traps and exceptions that fold must preserve. */
12435 #define START_FOLD_INIT \
12436 int saved_signaling_nans = flag_signaling_nans;\
12437 int saved_trapping_math = flag_trapping_math;\
12438 int saved_rounding_math = flag_rounding_math;\
12439 int saved_trapv = flag_trapv;\
12440 int saved_folding_initializer = folding_initializer;\
12441 flag_signaling_nans = 0;\
12442 flag_trapping_math = 0;\
12443 flag_rounding_math = 0;\
12444 flag_trapv = 0;\
12445 folding_initializer = 1;
12447 #define END_FOLD_INIT \
12448 flag_signaling_nans = saved_signaling_nans;\
12449 flag_trapping_math = saved_trapping_math;\
12450 flag_rounding_math = saved_rounding_math;\
12451 flag_trapv = saved_trapv;\
12452 folding_initializer = saved_folding_initializer;
12454 tree
12455 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12456 tree type, tree op)
12458 tree result;
12459 START_FOLD_INIT;
12461 result = fold_build1_loc (loc, code, type, op);
12463 END_FOLD_INIT;
12464 return result;
12467 tree
12468 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12469 tree type, tree op0, tree op1)
12471 tree result;
12472 START_FOLD_INIT;
12474 result = fold_build2_loc (loc, code, type, op0, op1);
12476 END_FOLD_INIT;
12477 return result;
12480 tree
12481 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12482 int nargs, tree *argarray)
12484 tree result;
12485 START_FOLD_INIT;
12487 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12489 END_FOLD_INIT;
12490 return result;
12493 #undef START_FOLD_INIT
12494 #undef END_FOLD_INIT
12496 /* Determine if first argument is a multiple of second argument. Return 0 if
12497 it is not, or we cannot easily determined it to be.
12499 An example of the sort of thing we care about (at this point; this routine
12500 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12501 fold cases do now) is discovering that
12503 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12505 is a multiple of
12507 SAVE_EXPR (J * 8)
12509 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12511 This code also handles discovering that
12513 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12515 is a multiple of 8 so we don't have to worry about dealing with a
12516 possible remainder.
12518 Note that we *look* inside a SAVE_EXPR only to determine how it was
12519 calculated; it is not safe for fold to do much of anything else with the
12520 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12521 at run time. For example, the latter example above *cannot* be implemented
12522 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12523 evaluation time of the original SAVE_EXPR is not necessarily the same at
12524 the time the new expression is evaluated. The only optimization of this
12525 sort that would be valid is changing
12527 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12529 divided by 8 to
12531 SAVE_EXPR (I) * SAVE_EXPR (J)
12533 (where the same SAVE_EXPR (J) is used in the original and the
12534 transformed version). */
12537 multiple_of_p (tree type, const_tree top, const_tree bottom)
12539 gimple *stmt;
12540 tree t1, op1, op2;
12542 if (operand_equal_p (top, bottom, 0))
12543 return 1;
12545 if (TREE_CODE (type) != INTEGER_TYPE)
12546 return 0;
12548 switch (TREE_CODE (top))
12550 case BIT_AND_EXPR:
12551 /* Bitwise and provides a power of two multiple. If the mask is
12552 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12553 if (!integer_pow2p (bottom))
12554 return 0;
12555 /* FALLTHRU */
12557 case MULT_EXPR:
12558 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12559 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12561 case MINUS_EXPR:
12562 /* It is impossible to prove if op0 - op1 is multiple of bottom
12563 precisely, so be conservative here checking if both op0 and op1
12564 are multiple of bottom. Note we check the second operand first
12565 since it's usually simpler. */
12566 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12567 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12569 case PLUS_EXPR:
12570 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12571 as op0 - 3 if the expression has unsigned type. For example,
12572 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12573 op1 = TREE_OPERAND (top, 1);
12574 if (TYPE_UNSIGNED (type)
12575 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12576 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12577 return (multiple_of_p (type, op1, bottom)
12578 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12580 case LSHIFT_EXPR:
12581 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12583 op1 = TREE_OPERAND (top, 1);
12584 /* const_binop may not detect overflow correctly,
12585 so check for it explicitly here. */
12586 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12587 && 0 != (t1 = fold_convert (type,
12588 const_binop (LSHIFT_EXPR,
12589 size_one_node,
12590 op1)))
12591 && !TREE_OVERFLOW (t1))
12592 return multiple_of_p (type, t1, bottom);
12594 return 0;
12596 case NOP_EXPR:
12597 /* Can't handle conversions from non-integral or wider integral type. */
12598 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12599 || (TYPE_PRECISION (type)
12600 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12601 return 0;
12603 /* fall through */
12605 case SAVE_EXPR:
12606 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12608 case COND_EXPR:
12609 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12610 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12612 case INTEGER_CST:
12613 if (TREE_CODE (bottom) != INTEGER_CST
12614 || integer_zerop (bottom)
12615 || (TYPE_UNSIGNED (type)
12616 && (tree_int_cst_sgn (top) < 0
12617 || tree_int_cst_sgn (bottom) < 0)))
12618 return 0;
12619 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12620 SIGNED);
12622 case SSA_NAME:
12623 if (TREE_CODE (bottom) == INTEGER_CST
12624 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12625 && gimple_code (stmt) == GIMPLE_ASSIGN)
12627 enum tree_code code = gimple_assign_rhs_code (stmt);
12629 /* Check for special cases to see if top is defined as multiple
12630 of bottom:
12632 top = (X & ~(bottom - 1) ; bottom is power of 2
12636 Y = X % bottom
12637 top = X - Y. */
12638 if (code == BIT_AND_EXPR
12639 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12640 && TREE_CODE (op2) == INTEGER_CST
12641 && integer_pow2p (bottom)
12642 && wi::multiple_of_p (wi::to_widest (op2),
12643 wi::to_widest (bottom), UNSIGNED))
12644 return 1;
12646 op1 = gimple_assign_rhs1 (stmt);
12647 if (code == MINUS_EXPR
12648 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12649 && TREE_CODE (op2) == SSA_NAME
12650 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12651 && gimple_code (stmt) == GIMPLE_ASSIGN
12652 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12653 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12654 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12655 return 1;
12658 /* fall through */
12660 default:
12661 return 0;
12665 #define tree_expr_nonnegative_warnv_p(X, Y) \
12666 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12668 #define RECURSE(X) \
12669 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12671 /* Return true if CODE or TYPE is known to be non-negative. */
12673 static bool
12674 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12676 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12677 && truth_value_p (code))
12678 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12679 have a signed:1 type (where the value is -1 and 0). */
12680 return true;
12681 return false;
12684 /* Return true if (CODE OP0) is known to be non-negative. If the return
12685 value is based on the assumption that signed overflow is undefined,
12686 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12687 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12689 bool
12690 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12691 bool *strict_overflow_p, int depth)
12693 if (TYPE_UNSIGNED (type))
12694 return true;
12696 switch (code)
12698 case ABS_EXPR:
12699 /* We can't return 1 if flag_wrapv is set because
12700 ABS_EXPR<INT_MIN> = INT_MIN. */
12701 if (!ANY_INTEGRAL_TYPE_P (type))
12702 return true;
12703 if (TYPE_OVERFLOW_UNDEFINED (type))
12705 *strict_overflow_p = true;
12706 return true;
12708 break;
12710 case NON_LVALUE_EXPR:
12711 case FLOAT_EXPR:
12712 case FIX_TRUNC_EXPR:
12713 return RECURSE (op0);
12715 CASE_CONVERT:
12717 tree inner_type = TREE_TYPE (op0);
12718 tree outer_type = type;
12720 if (TREE_CODE (outer_type) == REAL_TYPE)
12722 if (TREE_CODE (inner_type) == REAL_TYPE)
12723 return RECURSE (op0);
12724 if (INTEGRAL_TYPE_P (inner_type))
12726 if (TYPE_UNSIGNED (inner_type))
12727 return true;
12728 return RECURSE (op0);
12731 else if (INTEGRAL_TYPE_P (outer_type))
12733 if (TREE_CODE (inner_type) == REAL_TYPE)
12734 return RECURSE (op0);
12735 if (INTEGRAL_TYPE_P (inner_type))
12736 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12737 && TYPE_UNSIGNED (inner_type);
12740 break;
12742 default:
12743 return tree_simple_nonnegative_warnv_p (code, type);
12746 /* We don't know sign of `t', so be conservative and return false. */
12747 return false;
12750 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12751 value is based on the assumption that signed overflow is undefined,
12752 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12753 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12755 bool
12756 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12757 tree op1, bool *strict_overflow_p,
12758 int depth)
12760 if (TYPE_UNSIGNED (type))
12761 return true;
12763 switch (code)
12765 case POINTER_PLUS_EXPR:
12766 case PLUS_EXPR:
12767 if (FLOAT_TYPE_P (type))
12768 return RECURSE (op0) && RECURSE (op1);
12770 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12771 both unsigned and at least 2 bits shorter than the result. */
12772 if (TREE_CODE (type) == INTEGER_TYPE
12773 && TREE_CODE (op0) == NOP_EXPR
12774 && TREE_CODE (op1) == NOP_EXPR)
12776 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12777 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12778 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12779 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12781 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12782 TYPE_PRECISION (inner2)) + 1;
12783 return prec < TYPE_PRECISION (type);
12786 break;
12788 case MULT_EXPR:
12789 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12791 /* x * x is always non-negative for floating point x
12792 or without overflow. */
12793 if (operand_equal_p (op0, op1, 0)
12794 || (RECURSE (op0) && RECURSE (op1)))
12796 if (ANY_INTEGRAL_TYPE_P (type)
12797 && TYPE_OVERFLOW_UNDEFINED (type))
12798 *strict_overflow_p = true;
12799 return true;
12803 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12804 both unsigned and their total bits is shorter than the result. */
12805 if (TREE_CODE (type) == INTEGER_TYPE
12806 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12807 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12809 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12810 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12811 : TREE_TYPE (op0);
12812 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12813 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12814 : TREE_TYPE (op1);
12816 bool unsigned0 = TYPE_UNSIGNED (inner0);
12817 bool unsigned1 = TYPE_UNSIGNED (inner1);
12819 if (TREE_CODE (op0) == INTEGER_CST)
12820 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12822 if (TREE_CODE (op1) == INTEGER_CST)
12823 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12825 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12826 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12828 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12829 ? tree_int_cst_min_precision (op0, UNSIGNED)
12830 : TYPE_PRECISION (inner0);
12832 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12833 ? tree_int_cst_min_precision (op1, UNSIGNED)
12834 : TYPE_PRECISION (inner1);
12836 return precision0 + precision1 < TYPE_PRECISION (type);
12839 return false;
12841 case BIT_AND_EXPR:
12842 case MAX_EXPR:
12843 return RECURSE (op0) || RECURSE (op1);
12845 case BIT_IOR_EXPR:
12846 case BIT_XOR_EXPR:
12847 case MIN_EXPR:
12848 case RDIV_EXPR:
12849 case TRUNC_DIV_EXPR:
12850 case CEIL_DIV_EXPR:
12851 case FLOOR_DIV_EXPR:
12852 case ROUND_DIV_EXPR:
12853 return RECURSE (op0) && RECURSE (op1);
12855 case TRUNC_MOD_EXPR:
12856 return RECURSE (op0);
12858 case FLOOR_MOD_EXPR:
12859 return RECURSE (op1);
12861 case CEIL_MOD_EXPR:
12862 case ROUND_MOD_EXPR:
12863 default:
12864 return tree_simple_nonnegative_warnv_p (code, type);
12867 /* We don't know sign of `t', so be conservative and return false. */
12868 return false;
12871 /* Return true if T is known to be non-negative. If the return
12872 value is based on the assumption that signed overflow is undefined,
12873 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12874 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12876 bool
12877 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12879 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12880 return true;
12882 switch (TREE_CODE (t))
12884 case INTEGER_CST:
12885 return tree_int_cst_sgn (t) >= 0;
12887 case REAL_CST:
12888 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12890 case FIXED_CST:
12891 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12893 case COND_EXPR:
12894 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12896 case SSA_NAME:
12897 /* Limit the depth of recursion to avoid quadratic behavior.
12898 This is expected to catch almost all occurrences in practice.
12899 If this code misses important cases that unbounded recursion
12900 would not, passes that need this information could be revised
12901 to provide it through dataflow propagation. */
12902 return (!name_registered_for_update_p (t)
12903 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12904 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12905 strict_overflow_p, depth));
12907 default:
12908 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12912 /* Return true if T is known to be non-negative. If the return
12913 value is based on the assumption that signed overflow is undefined,
12914 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12915 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12917 bool
12918 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12919 bool *strict_overflow_p, int depth)
12921 switch (fn)
12923 CASE_CFN_ACOS:
12924 CASE_CFN_ACOSH:
12925 CASE_CFN_CABS:
12926 CASE_CFN_COSH:
12927 CASE_CFN_ERFC:
12928 CASE_CFN_EXP:
12929 CASE_CFN_EXP10:
12930 CASE_CFN_EXP2:
12931 CASE_CFN_FABS:
12932 CASE_CFN_FDIM:
12933 CASE_CFN_HYPOT:
12934 CASE_CFN_POW10:
12935 CASE_CFN_FFS:
12936 CASE_CFN_PARITY:
12937 CASE_CFN_POPCOUNT:
12938 CASE_CFN_CLZ:
12939 CASE_CFN_CLRSB:
12940 case CFN_BUILT_IN_BSWAP32:
12941 case CFN_BUILT_IN_BSWAP64:
12942 /* Always true. */
12943 return true;
12945 CASE_CFN_SQRT:
12946 /* sqrt(-0.0) is -0.0. */
12947 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12948 return true;
12949 return RECURSE (arg0);
12951 CASE_CFN_ASINH:
12952 CASE_CFN_ATAN:
12953 CASE_CFN_ATANH:
12954 CASE_CFN_CBRT:
12955 CASE_CFN_CEIL:
12956 CASE_CFN_ERF:
12957 CASE_CFN_EXPM1:
12958 CASE_CFN_FLOOR:
12959 CASE_CFN_FMOD:
12960 CASE_CFN_FREXP:
12961 CASE_CFN_ICEIL:
12962 CASE_CFN_IFLOOR:
12963 CASE_CFN_IRINT:
12964 CASE_CFN_IROUND:
12965 CASE_CFN_LCEIL:
12966 CASE_CFN_LDEXP:
12967 CASE_CFN_LFLOOR:
12968 CASE_CFN_LLCEIL:
12969 CASE_CFN_LLFLOOR:
12970 CASE_CFN_LLRINT:
12971 CASE_CFN_LLROUND:
12972 CASE_CFN_LRINT:
12973 CASE_CFN_LROUND:
12974 CASE_CFN_MODF:
12975 CASE_CFN_NEARBYINT:
12976 CASE_CFN_RINT:
12977 CASE_CFN_ROUND:
12978 CASE_CFN_SCALB:
12979 CASE_CFN_SCALBLN:
12980 CASE_CFN_SCALBN:
12981 CASE_CFN_SIGNBIT:
12982 CASE_CFN_SIGNIFICAND:
12983 CASE_CFN_SINH:
12984 CASE_CFN_TANH:
12985 CASE_CFN_TRUNC:
12986 /* True if the 1st argument is nonnegative. */
12987 return RECURSE (arg0);
12989 CASE_CFN_FMAX:
12990 /* True if the 1st OR 2nd arguments are nonnegative. */
12991 return RECURSE (arg0) || RECURSE (arg1);
12993 CASE_CFN_FMIN:
12994 /* True if the 1st AND 2nd arguments are nonnegative. */
12995 return RECURSE (arg0) && RECURSE (arg1);
12997 CASE_CFN_COPYSIGN:
12998 /* True if the 2nd argument is nonnegative. */
12999 return RECURSE (arg1);
13001 CASE_CFN_POWI:
13002 /* True if the 1st argument is nonnegative or the second
13003 argument is an even integer. */
13004 if (TREE_CODE (arg1) == INTEGER_CST
13005 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13006 return true;
13007 return RECURSE (arg0);
13009 CASE_CFN_POW:
13010 /* True if the 1st argument is nonnegative or the second
13011 argument is an even integer valued real. */
13012 if (TREE_CODE (arg1) == REAL_CST)
13014 REAL_VALUE_TYPE c;
13015 HOST_WIDE_INT n;
13017 c = TREE_REAL_CST (arg1);
13018 n = real_to_integer (&c);
13019 if ((n & 1) == 0)
13021 REAL_VALUE_TYPE cint;
13022 real_from_integer (&cint, VOIDmode, n, SIGNED);
13023 if (real_identical (&c, &cint))
13024 return true;
13027 return RECURSE (arg0);
13029 default:
13030 break;
13032 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13035 /* Return true if T is known to be non-negative. If the return
13036 value is based on the assumption that signed overflow is undefined,
13037 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13038 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13040 static bool
13041 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13043 enum tree_code code = TREE_CODE (t);
13044 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13045 return true;
13047 switch (code)
13049 case TARGET_EXPR:
13051 tree temp = TARGET_EXPR_SLOT (t);
13052 t = TARGET_EXPR_INITIAL (t);
13054 /* If the initializer is non-void, then it's a normal expression
13055 that will be assigned to the slot. */
13056 if (!VOID_TYPE_P (t))
13057 return RECURSE (t);
13059 /* Otherwise, the initializer sets the slot in some way. One common
13060 way is an assignment statement at the end of the initializer. */
13061 while (1)
13063 if (TREE_CODE (t) == BIND_EXPR)
13064 t = expr_last (BIND_EXPR_BODY (t));
13065 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13066 || TREE_CODE (t) == TRY_CATCH_EXPR)
13067 t = expr_last (TREE_OPERAND (t, 0));
13068 else if (TREE_CODE (t) == STATEMENT_LIST)
13069 t = expr_last (t);
13070 else
13071 break;
13073 if (TREE_CODE (t) == MODIFY_EXPR
13074 && TREE_OPERAND (t, 0) == temp)
13075 return RECURSE (TREE_OPERAND (t, 1));
13077 return false;
13080 case CALL_EXPR:
13082 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13083 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13085 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13086 get_call_combined_fn (t),
13087 arg0,
13088 arg1,
13089 strict_overflow_p, depth);
13091 case COMPOUND_EXPR:
13092 case MODIFY_EXPR:
13093 return RECURSE (TREE_OPERAND (t, 1));
13095 case BIND_EXPR:
13096 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13098 case SAVE_EXPR:
13099 return RECURSE (TREE_OPERAND (t, 0));
13101 default:
13102 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13106 #undef RECURSE
13107 #undef tree_expr_nonnegative_warnv_p
13109 /* Return true if T is known to be non-negative. If the return
13110 value is based on the assumption that signed overflow is undefined,
13111 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13112 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13114 bool
13115 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13117 enum tree_code code;
13118 if (t == error_mark_node)
13119 return false;
13121 code = TREE_CODE (t);
13122 switch (TREE_CODE_CLASS (code))
13124 case tcc_binary:
13125 case tcc_comparison:
13126 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13127 TREE_TYPE (t),
13128 TREE_OPERAND (t, 0),
13129 TREE_OPERAND (t, 1),
13130 strict_overflow_p, depth);
13132 case tcc_unary:
13133 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13134 TREE_TYPE (t),
13135 TREE_OPERAND (t, 0),
13136 strict_overflow_p, depth);
13138 case tcc_constant:
13139 case tcc_declaration:
13140 case tcc_reference:
13141 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13143 default:
13144 break;
13147 switch (code)
13149 case TRUTH_AND_EXPR:
13150 case TRUTH_OR_EXPR:
13151 case TRUTH_XOR_EXPR:
13152 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13153 TREE_TYPE (t),
13154 TREE_OPERAND (t, 0),
13155 TREE_OPERAND (t, 1),
13156 strict_overflow_p, depth);
13157 case TRUTH_NOT_EXPR:
13158 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13159 TREE_TYPE (t),
13160 TREE_OPERAND (t, 0),
13161 strict_overflow_p, depth);
13163 case COND_EXPR:
13164 case CONSTRUCTOR:
13165 case OBJ_TYPE_REF:
13166 case ASSERT_EXPR:
13167 case ADDR_EXPR:
13168 case WITH_SIZE_EXPR:
13169 case SSA_NAME:
13170 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13172 default:
13173 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13177 /* Return true if `t' is known to be non-negative. Handle warnings
13178 about undefined signed overflow. */
13180 bool
13181 tree_expr_nonnegative_p (tree t)
13183 bool ret, strict_overflow_p;
13185 strict_overflow_p = false;
13186 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13187 if (strict_overflow_p)
13188 fold_overflow_warning (("assuming signed overflow does not occur when "
13189 "determining that expression is always "
13190 "non-negative"),
13191 WARN_STRICT_OVERFLOW_MISC);
13192 return ret;
13196 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13197 For floating point we further ensure that T is not denormal.
13198 Similar logic is present in nonzero_address in rtlanal.h.
13200 If the return value is based on the assumption that signed overflow
13201 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13202 change *STRICT_OVERFLOW_P. */
13204 bool
13205 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13206 bool *strict_overflow_p)
13208 switch (code)
13210 case ABS_EXPR:
13211 return tree_expr_nonzero_warnv_p (op0,
13212 strict_overflow_p);
13214 case NOP_EXPR:
13216 tree inner_type = TREE_TYPE (op0);
13217 tree outer_type = type;
13219 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13220 && tree_expr_nonzero_warnv_p (op0,
13221 strict_overflow_p));
13223 break;
13225 case NON_LVALUE_EXPR:
13226 return tree_expr_nonzero_warnv_p (op0,
13227 strict_overflow_p);
13229 default:
13230 break;
13233 return false;
13236 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13237 For floating point we further ensure that T is not denormal.
13238 Similar logic is present in nonzero_address in rtlanal.h.
13240 If the return value is based on the assumption that signed overflow
13241 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13242 change *STRICT_OVERFLOW_P. */
13244 bool
13245 tree_binary_nonzero_warnv_p (enum tree_code code,
13246 tree type,
13247 tree op0,
13248 tree op1, bool *strict_overflow_p)
13250 bool sub_strict_overflow_p;
13251 switch (code)
13253 case POINTER_PLUS_EXPR:
13254 case PLUS_EXPR:
13255 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13257 /* With the presence of negative values it is hard
13258 to say something. */
13259 sub_strict_overflow_p = false;
13260 if (!tree_expr_nonnegative_warnv_p (op0,
13261 &sub_strict_overflow_p)
13262 || !tree_expr_nonnegative_warnv_p (op1,
13263 &sub_strict_overflow_p))
13264 return false;
13265 /* One of operands must be positive and the other non-negative. */
13266 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13267 overflows, on a twos-complement machine the sum of two
13268 nonnegative numbers can never be zero. */
13269 return (tree_expr_nonzero_warnv_p (op0,
13270 strict_overflow_p)
13271 || tree_expr_nonzero_warnv_p (op1,
13272 strict_overflow_p));
13274 break;
13276 case MULT_EXPR:
13277 if (TYPE_OVERFLOW_UNDEFINED (type))
13279 if (tree_expr_nonzero_warnv_p (op0,
13280 strict_overflow_p)
13281 && tree_expr_nonzero_warnv_p (op1,
13282 strict_overflow_p))
13284 *strict_overflow_p = true;
13285 return true;
13288 break;
13290 case MIN_EXPR:
13291 sub_strict_overflow_p = false;
13292 if (tree_expr_nonzero_warnv_p (op0,
13293 &sub_strict_overflow_p)
13294 && tree_expr_nonzero_warnv_p (op1,
13295 &sub_strict_overflow_p))
13297 if (sub_strict_overflow_p)
13298 *strict_overflow_p = true;
13300 break;
13302 case MAX_EXPR:
13303 sub_strict_overflow_p = false;
13304 if (tree_expr_nonzero_warnv_p (op0,
13305 &sub_strict_overflow_p))
13307 if (sub_strict_overflow_p)
13308 *strict_overflow_p = true;
13310 /* When both operands are nonzero, then MAX must be too. */
13311 if (tree_expr_nonzero_warnv_p (op1,
13312 strict_overflow_p))
13313 return true;
13315 /* MAX where operand 0 is positive is positive. */
13316 return tree_expr_nonnegative_warnv_p (op0,
13317 strict_overflow_p);
13319 /* MAX where operand 1 is positive is positive. */
13320 else if (tree_expr_nonzero_warnv_p (op1,
13321 &sub_strict_overflow_p)
13322 && tree_expr_nonnegative_warnv_p (op1,
13323 &sub_strict_overflow_p))
13325 if (sub_strict_overflow_p)
13326 *strict_overflow_p = true;
13327 return true;
13329 break;
13331 case BIT_IOR_EXPR:
13332 return (tree_expr_nonzero_warnv_p (op1,
13333 strict_overflow_p)
13334 || tree_expr_nonzero_warnv_p (op0,
13335 strict_overflow_p));
13337 default:
13338 break;
13341 return false;
13344 /* Return true when T is an address and is known to be nonzero.
13345 For floating point we further ensure that T is not denormal.
13346 Similar logic is present in nonzero_address in rtlanal.h.
13348 If the return value is based on the assumption that signed overflow
13349 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13350 change *STRICT_OVERFLOW_P. */
13352 bool
13353 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13355 bool sub_strict_overflow_p;
13356 switch (TREE_CODE (t))
13358 case INTEGER_CST:
13359 return !integer_zerop (t);
13361 case ADDR_EXPR:
13363 tree base = TREE_OPERAND (t, 0);
13365 if (!DECL_P (base))
13366 base = get_base_address (base);
13368 if (base && TREE_CODE (base) == TARGET_EXPR)
13369 base = TARGET_EXPR_SLOT (base);
13371 if (!base)
13372 return false;
13374 /* For objects in symbol table check if we know they are non-zero.
13375 Don't do anything for variables and functions before symtab is built;
13376 it is quite possible that they will be declared weak later. */
13377 int nonzero_addr = maybe_nonzero_address (base);
13378 if (nonzero_addr >= 0)
13379 return nonzero_addr;
13381 /* Constants are never weak. */
13382 if (CONSTANT_CLASS_P (base))
13383 return true;
13385 return false;
13388 case COND_EXPR:
13389 sub_strict_overflow_p = false;
13390 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13391 &sub_strict_overflow_p)
13392 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13393 &sub_strict_overflow_p))
13395 if (sub_strict_overflow_p)
13396 *strict_overflow_p = true;
13397 return true;
13399 break;
13401 default:
13402 break;
13404 return false;
13407 #define integer_valued_real_p(X) \
13408 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13410 #define RECURSE(X) \
13411 ((integer_valued_real_p) (X, depth + 1))
13413 /* Return true if the floating point result of (CODE OP0) has an
13414 integer value. We also allow +Inf, -Inf and NaN to be considered
13415 integer values. Return false for signaling NaN.
13417 DEPTH is the current nesting depth of the query. */
13419 bool
13420 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13422 switch (code)
13424 case FLOAT_EXPR:
13425 return true;
13427 case ABS_EXPR:
13428 return RECURSE (op0);
13430 CASE_CONVERT:
13432 tree type = TREE_TYPE (op0);
13433 if (TREE_CODE (type) == INTEGER_TYPE)
13434 return true;
13435 if (TREE_CODE (type) == REAL_TYPE)
13436 return RECURSE (op0);
13437 break;
13440 default:
13441 break;
13443 return false;
13446 /* Return true if the floating point result of (CODE OP0 OP1) has an
13447 integer value. We also allow +Inf, -Inf and NaN to be considered
13448 integer values. Return false for signaling NaN.
13450 DEPTH is the current nesting depth of the query. */
13452 bool
13453 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13455 switch (code)
13457 case PLUS_EXPR:
13458 case MINUS_EXPR:
13459 case MULT_EXPR:
13460 case MIN_EXPR:
13461 case MAX_EXPR:
13462 return RECURSE (op0) && RECURSE (op1);
13464 default:
13465 break;
13467 return false;
13470 /* Return true if the floating point result of calling FNDECL with arguments
13471 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13472 considered integer values. Return false for signaling NaN. If FNDECL
13473 takes fewer than 2 arguments, the remaining ARGn are null.
13475 DEPTH is the current nesting depth of the query. */
13477 bool
13478 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13480 switch (fn)
13482 CASE_CFN_CEIL:
13483 CASE_CFN_FLOOR:
13484 CASE_CFN_NEARBYINT:
13485 CASE_CFN_RINT:
13486 CASE_CFN_ROUND:
13487 CASE_CFN_TRUNC:
13488 return true;
13490 CASE_CFN_FMIN:
13491 CASE_CFN_FMAX:
13492 return RECURSE (arg0) && RECURSE (arg1);
13494 default:
13495 break;
13497 return false;
13500 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13501 has an integer value. We also allow +Inf, -Inf and NaN to be
13502 considered integer values. Return false for signaling NaN.
13504 DEPTH is the current nesting depth of the query. */
13506 bool
13507 integer_valued_real_single_p (tree t, int depth)
13509 switch (TREE_CODE (t))
13511 case REAL_CST:
13512 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13514 case COND_EXPR:
13515 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13517 case SSA_NAME:
13518 /* Limit the depth of recursion to avoid quadratic behavior.
13519 This is expected to catch almost all occurrences in practice.
13520 If this code misses important cases that unbounded recursion
13521 would not, passes that need this information could be revised
13522 to provide it through dataflow propagation. */
13523 return (!name_registered_for_update_p (t)
13524 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13525 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13526 depth));
13528 default:
13529 break;
13531 return false;
13534 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13535 has an integer value. We also allow +Inf, -Inf and NaN to be
13536 considered integer values. Return false for signaling NaN.
13538 DEPTH is the current nesting depth of the query. */
13540 static bool
13541 integer_valued_real_invalid_p (tree t, int depth)
13543 switch (TREE_CODE (t))
13545 case COMPOUND_EXPR:
13546 case MODIFY_EXPR:
13547 case BIND_EXPR:
13548 return RECURSE (TREE_OPERAND (t, 1));
13550 case SAVE_EXPR:
13551 return RECURSE (TREE_OPERAND (t, 0));
13553 default:
13554 break;
13556 return false;
13559 #undef RECURSE
13560 #undef integer_valued_real_p
13562 /* Return true if the floating point expression T has an integer value.
13563 We also allow +Inf, -Inf and NaN to be considered integer values.
13564 Return false for signaling NaN.
13566 DEPTH is the current nesting depth of the query. */
13568 bool
13569 integer_valued_real_p (tree t, int depth)
13571 if (t == error_mark_node)
13572 return false;
13574 tree_code code = TREE_CODE (t);
13575 switch (TREE_CODE_CLASS (code))
13577 case tcc_binary:
13578 case tcc_comparison:
13579 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13580 TREE_OPERAND (t, 1), depth);
13582 case tcc_unary:
13583 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13585 case tcc_constant:
13586 case tcc_declaration:
13587 case tcc_reference:
13588 return integer_valued_real_single_p (t, depth);
13590 default:
13591 break;
13594 switch (code)
13596 case COND_EXPR:
13597 case SSA_NAME:
13598 return integer_valued_real_single_p (t, depth);
13600 case CALL_EXPR:
13602 tree arg0 = (call_expr_nargs (t) > 0
13603 ? CALL_EXPR_ARG (t, 0)
13604 : NULL_TREE);
13605 tree arg1 = (call_expr_nargs (t) > 1
13606 ? CALL_EXPR_ARG (t, 1)
13607 : NULL_TREE);
13608 return integer_valued_real_call_p (get_call_combined_fn (t),
13609 arg0, arg1, depth);
13612 default:
13613 return integer_valued_real_invalid_p (t, depth);
13617 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13618 attempt to fold the expression to a constant without modifying TYPE,
13619 OP0 or OP1.
13621 If the expression could be simplified to a constant, then return
13622 the constant. If the expression would not be simplified to a
13623 constant, then return NULL_TREE. */
13625 tree
13626 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13628 tree tem = fold_binary (code, type, op0, op1);
13629 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13632 /* Given the components of a unary expression CODE, TYPE and OP0,
13633 attempt to fold the expression to a constant without modifying
13634 TYPE or OP0.
13636 If the expression could be simplified to a constant, then return
13637 the constant. If the expression would not be simplified to a
13638 constant, then return NULL_TREE. */
13640 tree
13641 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13643 tree tem = fold_unary (code, type, op0);
13644 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13647 /* If EXP represents referencing an element in a constant string
13648 (either via pointer arithmetic or array indexing), return the
13649 tree representing the value accessed, otherwise return NULL. */
13651 tree
13652 fold_read_from_constant_string (tree exp)
13654 if ((TREE_CODE (exp) == INDIRECT_REF
13655 || TREE_CODE (exp) == ARRAY_REF)
13656 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13658 tree exp1 = TREE_OPERAND (exp, 0);
13659 tree index;
13660 tree string;
13661 location_t loc = EXPR_LOCATION (exp);
13663 if (TREE_CODE (exp) == INDIRECT_REF)
13664 string = string_constant (exp1, &index);
13665 else
13667 tree low_bound = array_ref_low_bound (exp);
13668 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13670 /* Optimize the special-case of a zero lower bound.
13672 We convert the low_bound to sizetype to avoid some problems
13673 with constant folding. (E.g. suppose the lower bound is 1,
13674 and its mode is QI. Without the conversion,l (ARRAY
13675 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13676 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13677 if (! integer_zerop (low_bound))
13678 index = size_diffop_loc (loc, index,
13679 fold_convert_loc (loc, sizetype, low_bound));
13681 string = exp1;
13684 if (string
13685 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13686 && TREE_CODE (string) == STRING_CST
13687 && TREE_CODE (index) == INTEGER_CST
13688 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13689 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13690 == MODE_INT)
13691 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13692 return build_int_cst_type (TREE_TYPE (exp),
13693 (TREE_STRING_POINTER (string)
13694 [TREE_INT_CST_LOW (index)]));
13696 return NULL;
13699 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13700 an integer constant, real, or fixed-point constant.
13702 TYPE is the type of the result. */
13704 static tree
13705 fold_negate_const (tree arg0, tree type)
13707 tree t = NULL_TREE;
13709 switch (TREE_CODE (arg0))
13711 case INTEGER_CST:
13713 bool overflow;
13714 wide_int val = wi::neg (arg0, &overflow);
13715 t = force_fit_type (type, val, 1,
13716 (overflow | TREE_OVERFLOW (arg0))
13717 && !TYPE_UNSIGNED (type));
13718 break;
13721 case REAL_CST:
13722 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13723 break;
13725 case FIXED_CST:
13727 FIXED_VALUE_TYPE f;
13728 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13729 &(TREE_FIXED_CST (arg0)), NULL,
13730 TYPE_SATURATING (type));
13731 t = build_fixed (type, f);
13732 /* Propagate overflow flags. */
13733 if (overflow_p | TREE_OVERFLOW (arg0))
13734 TREE_OVERFLOW (t) = 1;
13735 break;
13738 default:
13739 gcc_unreachable ();
13742 return t;
13745 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13746 an integer constant or real constant.
13748 TYPE is the type of the result. */
13750 tree
13751 fold_abs_const (tree arg0, tree type)
13753 tree t = NULL_TREE;
13755 switch (TREE_CODE (arg0))
13757 case INTEGER_CST:
13759 /* If the value is unsigned or non-negative, then the absolute value
13760 is the same as the ordinary value. */
13761 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13762 t = arg0;
13764 /* If the value is negative, then the absolute value is
13765 its negation. */
13766 else
13768 bool overflow;
13769 wide_int val = wi::neg (arg0, &overflow);
13770 t = force_fit_type (type, val, -1,
13771 overflow | TREE_OVERFLOW (arg0));
13774 break;
13776 case REAL_CST:
13777 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13778 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13779 else
13780 t = arg0;
13781 break;
13783 default:
13784 gcc_unreachable ();
13787 return t;
13790 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13791 constant. TYPE is the type of the result. */
13793 static tree
13794 fold_not_const (const_tree arg0, tree type)
13796 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13798 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13801 /* Given CODE, a relational operator, the target type, TYPE and two
13802 constant operands OP0 and OP1, return the result of the
13803 relational operation. If the result is not a compile time
13804 constant, then return NULL_TREE. */
13806 static tree
13807 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13809 int result, invert;
13811 /* From here on, the only cases we handle are when the result is
13812 known to be a constant. */
13814 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13816 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13817 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13819 /* Handle the cases where either operand is a NaN. */
13820 if (real_isnan (c0) || real_isnan (c1))
13822 switch (code)
13824 case EQ_EXPR:
13825 case ORDERED_EXPR:
13826 result = 0;
13827 break;
13829 case NE_EXPR:
13830 case UNORDERED_EXPR:
13831 case UNLT_EXPR:
13832 case UNLE_EXPR:
13833 case UNGT_EXPR:
13834 case UNGE_EXPR:
13835 case UNEQ_EXPR:
13836 result = 1;
13837 break;
13839 case LT_EXPR:
13840 case LE_EXPR:
13841 case GT_EXPR:
13842 case GE_EXPR:
13843 case LTGT_EXPR:
13844 if (flag_trapping_math)
13845 return NULL_TREE;
13846 result = 0;
13847 break;
13849 default:
13850 gcc_unreachable ();
13853 return constant_boolean_node (result, type);
13856 return constant_boolean_node (real_compare (code, c0, c1), type);
13859 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13861 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13862 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13863 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13866 /* Handle equality/inequality of complex constants. */
13867 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13869 tree rcond = fold_relational_const (code, type,
13870 TREE_REALPART (op0),
13871 TREE_REALPART (op1));
13872 tree icond = fold_relational_const (code, type,
13873 TREE_IMAGPART (op0),
13874 TREE_IMAGPART (op1));
13875 if (code == EQ_EXPR)
13876 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13877 else if (code == NE_EXPR)
13878 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13879 else
13880 return NULL_TREE;
13883 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13885 if (!VECTOR_TYPE_P (type))
13887 /* Have vector comparison with scalar boolean result. */
13888 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13889 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13890 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13892 tree elem0 = VECTOR_CST_ELT (op0, i);
13893 tree elem1 = VECTOR_CST_ELT (op1, i);
13894 tree tmp = fold_relational_const (code, type, elem0, elem1);
13895 if (tmp == NULL_TREE)
13896 return NULL_TREE;
13897 if (integer_zerop (tmp))
13898 return constant_boolean_node (false, type);
13900 return constant_boolean_node (true, type);
13902 unsigned count = VECTOR_CST_NELTS (op0);
13903 tree *elts = XALLOCAVEC (tree, count);
13904 gcc_assert (VECTOR_CST_NELTS (op1) == count
13905 && TYPE_VECTOR_SUBPARTS (type) == count);
13907 for (unsigned i = 0; i < count; i++)
13909 tree elem_type = TREE_TYPE (type);
13910 tree elem0 = VECTOR_CST_ELT (op0, i);
13911 tree elem1 = VECTOR_CST_ELT (op1, i);
13913 tree tem = fold_relational_const (code, elem_type,
13914 elem0, elem1);
13916 if (tem == NULL_TREE)
13917 return NULL_TREE;
13919 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13922 return build_vector (type, elts);
13925 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13927 To compute GT, swap the arguments and do LT.
13928 To compute GE, do LT and invert the result.
13929 To compute LE, swap the arguments, do LT and invert the result.
13930 To compute NE, do EQ and invert the result.
13932 Therefore, the code below must handle only EQ and LT. */
13934 if (code == LE_EXPR || code == GT_EXPR)
13936 std::swap (op0, op1);
13937 code = swap_tree_comparison (code);
13940 /* Note that it is safe to invert for real values here because we
13941 have already handled the one case that it matters. */
13943 invert = 0;
13944 if (code == NE_EXPR || code == GE_EXPR)
13946 invert = 1;
13947 code = invert_tree_comparison (code, false);
13950 /* Compute a result for LT or EQ if args permit;
13951 Otherwise return T. */
13952 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13954 if (code == EQ_EXPR)
13955 result = tree_int_cst_equal (op0, op1);
13956 else
13957 result = tree_int_cst_lt (op0, op1);
13959 else
13960 return NULL_TREE;
13962 if (invert)
13963 result ^= 1;
13964 return constant_boolean_node (result, type);
13967 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13968 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13969 itself. */
13971 tree
13972 fold_build_cleanup_point_expr (tree type, tree expr)
13974 /* If the expression does not have side effects then we don't have to wrap
13975 it with a cleanup point expression. */
13976 if (!TREE_SIDE_EFFECTS (expr))
13977 return expr;
13979 /* If the expression is a return, check to see if the expression inside the
13980 return has no side effects or the right hand side of the modify expression
13981 inside the return. If either don't have side effects set we don't need to
13982 wrap the expression in a cleanup point expression. Note we don't check the
13983 left hand side of the modify because it should always be a return decl. */
13984 if (TREE_CODE (expr) == RETURN_EXPR)
13986 tree op = TREE_OPERAND (expr, 0);
13987 if (!op || !TREE_SIDE_EFFECTS (op))
13988 return expr;
13989 op = TREE_OPERAND (op, 1);
13990 if (!TREE_SIDE_EFFECTS (op))
13991 return expr;
13994 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13997 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13998 of an indirection through OP0, or NULL_TREE if no simplification is
13999 possible. */
14001 tree
14002 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14004 tree sub = op0;
14005 tree subtype;
14007 STRIP_NOPS (sub);
14008 subtype = TREE_TYPE (sub);
14009 if (!POINTER_TYPE_P (subtype))
14010 return NULL_TREE;
14012 if (TREE_CODE (sub) == ADDR_EXPR)
14014 tree op = TREE_OPERAND (sub, 0);
14015 tree optype = TREE_TYPE (op);
14016 /* *&CONST_DECL -> to the value of the const decl. */
14017 if (TREE_CODE (op) == CONST_DECL)
14018 return DECL_INITIAL (op);
14019 /* *&p => p; make sure to handle *&"str"[cst] here. */
14020 if (type == optype)
14022 tree fop = fold_read_from_constant_string (op);
14023 if (fop)
14024 return fop;
14025 else
14026 return op;
14028 /* *(foo *)&fooarray => fooarray[0] */
14029 else if (TREE_CODE (optype) == ARRAY_TYPE
14030 && type == TREE_TYPE (optype)
14031 && (!in_gimple_form
14032 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14034 tree type_domain = TYPE_DOMAIN (optype);
14035 tree min_val = size_zero_node;
14036 if (type_domain && TYPE_MIN_VALUE (type_domain))
14037 min_val = TYPE_MIN_VALUE (type_domain);
14038 if (in_gimple_form
14039 && TREE_CODE (min_val) != INTEGER_CST)
14040 return NULL_TREE;
14041 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14042 NULL_TREE, NULL_TREE);
14044 /* *(foo *)&complexfoo => __real__ complexfoo */
14045 else if (TREE_CODE (optype) == COMPLEX_TYPE
14046 && type == TREE_TYPE (optype))
14047 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14048 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14049 else if (TREE_CODE (optype) == VECTOR_TYPE
14050 && type == TREE_TYPE (optype))
14052 tree part_width = TYPE_SIZE (type);
14053 tree index = bitsize_int (0);
14054 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14058 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14059 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14061 tree op00 = TREE_OPERAND (sub, 0);
14062 tree op01 = TREE_OPERAND (sub, 1);
14064 STRIP_NOPS (op00);
14065 if (TREE_CODE (op00) == ADDR_EXPR)
14067 tree op00type;
14068 op00 = TREE_OPERAND (op00, 0);
14069 op00type = TREE_TYPE (op00);
14071 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14072 if (TREE_CODE (op00type) == VECTOR_TYPE
14073 && type == TREE_TYPE (op00type))
14075 tree part_width = TYPE_SIZE (type);
14076 unsigned HOST_WIDE_INT max_offset
14077 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14078 * TYPE_VECTOR_SUBPARTS (op00type));
14079 if (tree_int_cst_sign_bit (op01) == 0
14080 && compare_tree_int (op01, max_offset) == -1)
14082 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14083 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14084 tree index = bitsize_int (indexi);
14085 return fold_build3_loc (loc,
14086 BIT_FIELD_REF, type, op00,
14087 part_width, index);
14090 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14091 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14092 && type == TREE_TYPE (op00type))
14094 tree size = TYPE_SIZE_UNIT (type);
14095 if (tree_int_cst_equal (size, op01))
14096 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14098 /* ((foo *)&fooarray)[1] => fooarray[1] */
14099 else if (TREE_CODE (op00type) == ARRAY_TYPE
14100 && type == TREE_TYPE (op00type))
14102 tree type_domain = TYPE_DOMAIN (op00type);
14103 tree min_val = size_zero_node;
14104 if (type_domain && TYPE_MIN_VALUE (type_domain))
14105 min_val = TYPE_MIN_VALUE (type_domain);
14106 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14107 TYPE_SIZE_UNIT (type));
14108 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14109 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14110 NULL_TREE, NULL_TREE);
14115 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14116 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14117 && type == TREE_TYPE (TREE_TYPE (subtype))
14118 && (!in_gimple_form
14119 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14121 tree type_domain;
14122 tree min_val = size_zero_node;
14123 sub = build_fold_indirect_ref_loc (loc, sub);
14124 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14125 if (type_domain && TYPE_MIN_VALUE (type_domain))
14126 min_val = TYPE_MIN_VALUE (type_domain);
14127 if (in_gimple_form
14128 && TREE_CODE (min_val) != INTEGER_CST)
14129 return NULL_TREE;
14130 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14131 NULL_TREE);
14134 return NULL_TREE;
14137 /* Builds an expression for an indirection through T, simplifying some
14138 cases. */
14140 tree
14141 build_fold_indirect_ref_loc (location_t loc, tree t)
14143 tree type = TREE_TYPE (TREE_TYPE (t));
14144 tree sub = fold_indirect_ref_1 (loc, type, t);
14146 if (sub)
14147 return sub;
14149 return build1_loc (loc, INDIRECT_REF, type, t);
14152 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14154 tree
14155 fold_indirect_ref_loc (location_t loc, tree t)
14157 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14159 if (sub)
14160 return sub;
14161 else
14162 return t;
14165 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14166 whose result is ignored. The type of the returned tree need not be
14167 the same as the original expression. */
14169 tree
14170 fold_ignored_result (tree t)
14172 if (!TREE_SIDE_EFFECTS (t))
14173 return integer_zero_node;
14175 for (;;)
14176 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14178 case tcc_unary:
14179 t = TREE_OPERAND (t, 0);
14180 break;
14182 case tcc_binary:
14183 case tcc_comparison:
14184 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14185 t = TREE_OPERAND (t, 0);
14186 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14187 t = TREE_OPERAND (t, 1);
14188 else
14189 return t;
14190 break;
14192 case tcc_expression:
14193 switch (TREE_CODE (t))
14195 case COMPOUND_EXPR:
14196 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14197 return t;
14198 t = TREE_OPERAND (t, 0);
14199 break;
14201 case COND_EXPR:
14202 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14203 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14204 return t;
14205 t = TREE_OPERAND (t, 0);
14206 break;
14208 default:
14209 return t;
14211 break;
14213 default:
14214 return t;
14218 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14220 tree
14221 round_up_loc (location_t loc, tree value, unsigned int divisor)
14223 tree div = NULL_TREE;
14225 if (divisor == 1)
14226 return value;
14228 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14229 have to do anything. Only do this when we are not given a const,
14230 because in that case, this check is more expensive than just
14231 doing it. */
14232 if (TREE_CODE (value) != INTEGER_CST)
14234 div = build_int_cst (TREE_TYPE (value), divisor);
14236 if (multiple_of_p (TREE_TYPE (value), value, div))
14237 return value;
14240 /* If divisor is a power of two, simplify this to bit manipulation. */
14241 if (pow2_or_zerop (divisor))
14243 if (TREE_CODE (value) == INTEGER_CST)
14245 wide_int val = value;
14246 bool overflow_p;
14248 if ((val & (divisor - 1)) == 0)
14249 return value;
14251 overflow_p = TREE_OVERFLOW (value);
14252 val += divisor - 1;
14253 val &= - (int) divisor;
14254 if (val == 0)
14255 overflow_p = true;
14257 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14259 else
14261 tree t;
14263 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14264 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14265 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14266 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14269 else
14271 if (!div)
14272 div = build_int_cst (TREE_TYPE (value), divisor);
14273 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14274 value = size_binop_loc (loc, MULT_EXPR, value, div);
14277 return value;
14280 /* Likewise, but round down. */
14282 tree
14283 round_down_loc (location_t loc, tree value, int divisor)
14285 tree div = NULL_TREE;
14287 gcc_assert (divisor > 0);
14288 if (divisor == 1)
14289 return value;
14291 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14292 have to do anything. Only do this when we are not given a const,
14293 because in that case, this check is more expensive than just
14294 doing it. */
14295 if (TREE_CODE (value) != INTEGER_CST)
14297 div = build_int_cst (TREE_TYPE (value), divisor);
14299 if (multiple_of_p (TREE_TYPE (value), value, div))
14300 return value;
14303 /* If divisor is a power of two, simplify this to bit manipulation. */
14304 if (pow2_or_zerop (divisor))
14306 tree t;
14308 t = build_int_cst (TREE_TYPE (value), -divisor);
14309 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14311 else
14313 if (!div)
14314 div = build_int_cst (TREE_TYPE (value), divisor);
14315 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14316 value = size_binop_loc (loc, MULT_EXPR, value, div);
14319 return value;
14322 /* Returns the pointer to the base of the object addressed by EXP and
14323 extracts the information about the offset of the access, storing it
14324 to PBITPOS and POFFSET. */
14326 static tree
14327 split_address_to_core_and_offset (tree exp,
14328 HOST_WIDE_INT *pbitpos, tree *poffset)
14330 tree core;
14331 machine_mode mode;
14332 int unsignedp, reversep, volatilep;
14333 HOST_WIDE_INT bitsize;
14334 location_t loc = EXPR_LOCATION (exp);
14336 if (TREE_CODE (exp) == ADDR_EXPR)
14338 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14339 poffset, &mode, &unsignedp, &reversep,
14340 &volatilep);
14341 core = build_fold_addr_expr_loc (loc, core);
14343 else
14345 core = exp;
14346 *pbitpos = 0;
14347 *poffset = NULL_TREE;
14350 return core;
14353 /* Returns true if addresses of E1 and E2 differ by a constant, false
14354 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14356 bool
14357 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14359 tree core1, core2;
14360 HOST_WIDE_INT bitpos1, bitpos2;
14361 tree toffset1, toffset2, tdiff, type;
14363 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14364 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14366 if (bitpos1 % BITS_PER_UNIT != 0
14367 || bitpos2 % BITS_PER_UNIT != 0
14368 || !operand_equal_p (core1, core2, 0))
14369 return false;
14371 if (toffset1 && toffset2)
14373 type = TREE_TYPE (toffset1);
14374 if (type != TREE_TYPE (toffset2))
14375 toffset2 = fold_convert (type, toffset2);
14377 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14378 if (!cst_and_fits_in_hwi (tdiff))
14379 return false;
14381 *diff = int_cst_value (tdiff);
14383 else if (toffset1 || toffset2)
14385 /* If only one of the offsets is non-constant, the difference cannot
14386 be a constant. */
14387 return false;
14389 else
14390 *diff = 0;
14392 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14393 return true;
14396 /* Return OFF converted to a pointer offset type suitable as offset for
14397 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14398 tree
14399 convert_to_ptrofftype_loc (location_t loc, tree off)
14401 return fold_convert_loc (loc, sizetype, off);
14404 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14405 tree
14406 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14408 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14409 ptr, convert_to_ptrofftype_loc (loc, off));
14412 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14413 tree
14414 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14416 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14417 ptr, size_int (off));
14420 /* Return a char pointer for a C string if it is a string constant
14421 or sum of string constant and integer constant. We only support
14422 string constants properly terminated with '\0' character.
14423 If STRLEN is a valid pointer, length (including terminating character)
14424 of returned string is stored to the argument. */
14426 const char *
14427 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14429 tree offset_node;
14431 if (strlen)
14432 *strlen = 0;
14434 src = string_constant (src, &offset_node);
14435 if (src == 0)
14436 return NULL;
14438 unsigned HOST_WIDE_INT offset = 0;
14439 if (offset_node != NULL_TREE)
14441 if (!tree_fits_uhwi_p (offset_node))
14442 return NULL;
14443 else
14444 offset = tree_to_uhwi (offset_node);
14447 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14448 const char *string = TREE_STRING_POINTER (src);
14450 /* Support only properly null-terminated strings. */
14451 if (string_length == 0
14452 || string[string_length - 1] != '\0'
14453 || offset >= string_length)
14454 return NULL;
14456 if (strlen)
14457 *strlen = string_length - offset;
14458 return string + offset;
14461 #if CHECKING_P
14463 namespace selftest {
14465 /* Helper functions for writing tests of folding trees. */
14467 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14469 static void
14470 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14471 tree constant)
14473 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14476 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14477 wrapping WRAPPED_EXPR. */
14479 static void
14480 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14481 tree wrapped_expr)
14483 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14484 ASSERT_NE (wrapped_expr, result);
14485 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14486 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14489 /* Verify that various arithmetic binary operations are folded
14490 correctly. */
14492 static void
14493 test_arithmetic_folding ()
14495 tree type = integer_type_node;
14496 tree x = create_tmp_var_raw (type, "x");
14497 tree zero = build_zero_cst (type);
14498 tree one = build_int_cst (type, 1);
14500 /* Addition. */
14501 /* 1 <-- (0 + 1) */
14502 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14503 one);
14504 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14505 one);
14507 /* (nonlvalue)x <-- (x + 0) */
14508 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14511 /* Subtraction. */
14512 /* 0 <-- (x - x) */
14513 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14514 zero);
14515 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14518 /* Multiplication. */
14519 /* 0 <-- (x * 0) */
14520 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14521 zero);
14523 /* (nonlvalue)x <-- (x * 1) */
14524 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14528 /* Verify that various binary operations on vectors are folded
14529 correctly. */
14531 static void
14532 test_vector_folding ()
14534 tree inner_type = integer_type_node;
14535 tree type = build_vector_type (inner_type, 4);
14536 tree zero = build_zero_cst (type);
14537 tree one = build_one_cst (type);
14539 /* Verify equality tests that return a scalar boolean result. */
14540 tree res_type = boolean_type_node;
14541 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14542 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14543 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14544 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14547 /* Run all of the selftests within this file. */
14549 void
14550 fold_const_c_tests ()
14552 test_arithmetic_folding ();
14553 test_vector_folding ();
14556 } // namespace selftest
14558 #endif /* CHECKING_P */