libgo: add misc/cgo files
[official-gcc.git] / gcc / fold-const.c
blob379a30ea285acf6d78042bc1a3a5c152539b2b75
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
83 /* Nonzero if we are folding constants inside an initializer; zero
84 otherwise. */
85 int folding_initializer = 0;
87 /* The following constants represent a bit based encoding of GCC's
88 comparison operators. This encoding simplifies transformations
89 on relational comparison operators, such as AND and OR. */
90 enum comparison_code {
91 COMPCODE_FALSE = 0,
92 COMPCODE_LT = 1,
93 COMPCODE_EQ = 2,
94 COMPCODE_LE = 3,
95 COMPCODE_GT = 4,
96 COMPCODE_LTGT = 5,
97 COMPCODE_GE = 6,
98 COMPCODE_ORD = 7,
99 COMPCODE_UNORD = 8,
100 COMPCODE_UNLT = 9,
101 COMPCODE_UNEQ = 10,
102 COMPCODE_UNLE = 11,
103 COMPCODE_UNGT = 12,
104 COMPCODE_NE = 13,
105 COMPCODE_UNGE = 14,
106 COMPCODE_TRUE = 15
109 static bool negate_expr_p (tree);
110 static tree negate_expr (tree);
111 static tree split_tree (location_t, tree, tree, enum tree_code,
112 tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int operand_equal_for_comparison_p (tree, tree, tree);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static bool vec_cst_ctor_to_array (tree, tree *);
142 static tree fold_negate_expr (location_t, tree);
145 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
146 Otherwise, return LOC. */
148 static location_t
149 expr_location_or (tree t, location_t loc)
151 location_t tloc = EXPR_LOCATION (t);
152 return tloc == UNKNOWN_LOCATION ? loc : tloc;
155 /* Similar to protected_set_expr_location, but never modify x in place,
156 if location can and needs to be set, unshare it. */
158 static inline tree
159 protected_set_expr_location_unshare (tree x, location_t loc)
161 if (CAN_HAVE_LOCATION_P (x)
162 && EXPR_LOCATION (x) != loc
163 && !(TREE_CODE (x) == SAVE_EXPR
164 || TREE_CODE (x) == TARGET_EXPR
165 || TREE_CODE (x) == BIND_EXPR))
167 x = copy_node (x);
168 SET_EXPR_LOCATION (x, loc);
170 return x;
173 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
174 division and returns the quotient. Otherwise returns
175 NULL_TREE. */
177 tree
178 div_if_zero_remainder (const_tree arg1, const_tree arg2)
180 widest_int quo;
182 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
183 SIGNED, &quo))
184 return wide_int_to_tree (TREE_TYPE (arg1), quo);
186 return NULL_TREE;
189 /* This is nonzero if we should defer warnings about undefined
190 overflow. This facility exists because these warnings are a
191 special case. The code to estimate loop iterations does not want
192 to issue any warnings, since it works with expressions which do not
193 occur in user code. Various bits of cleanup code call fold(), but
194 only use the result if it has certain characteristics (e.g., is a
195 constant); that code only wants to issue a warning if the result is
196 used. */
198 static int fold_deferring_overflow_warnings;
200 /* If a warning about undefined overflow is deferred, this is the
201 warning. Note that this may cause us to turn two warnings into
202 one, but that is fine since it is sufficient to only give one
203 warning per expression. */
205 static const char* fold_deferred_overflow_warning;
207 /* If a warning about undefined overflow is deferred, this is the
208 level at which the warning should be emitted. */
210 static enum warn_strict_overflow_code fold_deferred_overflow_code;
212 /* Start deferring overflow warnings. We could use a stack here to
213 permit nested calls, but at present it is not necessary. */
215 void
216 fold_defer_overflow_warnings (void)
218 ++fold_deferring_overflow_warnings;
221 /* Stop deferring overflow warnings. If there is a pending warning,
222 and ISSUE is true, then issue the warning if appropriate. STMT is
223 the statement with which the warning should be associated (used for
224 location information); STMT may be NULL. CODE is the level of the
225 warning--a warn_strict_overflow_code value. This function will use
226 the smaller of CODE and the deferred code when deciding whether to
227 issue the warning. CODE may be zero to mean to always use the
228 deferred code. */
230 void
231 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
233 const char *warnmsg;
234 location_t locus;
236 gcc_assert (fold_deferring_overflow_warnings > 0);
237 --fold_deferring_overflow_warnings;
238 if (fold_deferring_overflow_warnings > 0)
240 if (fold_deferred_overflow_warning != NULL
241 && code != 0
242 && code < (int) fold_deferred_overflow_code)
243 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
244 return;
247 warnmsg = fold_deferred_overflow_warning;
248 fold_deferred_overflow_warning = NULL;
250 if (!issue || warnmsg == NULL)
251 return;
253 if (gimple_no_warning_p (stmt))
254 return;
256 /* Use the smallest code level when deciding to issue the
257 warning. */
258 if (code == 0 || code > (int) fold_deferred_overflow_code)
259 code = fold_deferred_overflow_code;
261 if (!issue_strict_overflow_warning (code))
262 return;
264 if (stmt == NULL)
265 locus = input_location;
266 else
267 locus = gimple_location (stmt);
268 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 /* Stop deferring overflow warnings, ignoring any deferred
272 warnings. */
274 void
275 fold_undefer_and_ignore_overflow_warnings (void)
277 fold_undefer_overflow_warnings (false, NULL, 0);
280 /* Whether we are deferring overflow warnings. */
282 bool
283 fold_deferring_overflow_warnings_p (void)
285 return fold_deferring_overflow_warnings > 0;
288 /* This is called when we fold something based on the fact that signed
289 overflow is undefined. */
291 void
292 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
294 if (fold_deferring_overflow_warnings > 0)
296 if (fold_deferred_overflow_warning == NULL
297 || wc < fold_deferred_overflow_code)
299 fold_deferred_overflow_warning = gmsgid;
300 fold_deferred_overflow_code = wc;
303 else if (issue_strict_overflow_warning (wc))
304 warning (OPT_Wstrict_overflow, gmsgid);
307 /* Return true if the built-in mathematical function specified by CODE
308 is odd, i.e. -f(x) == f(-x). */
310 bool
311 negate_mathfn_p (combined_fn fn)
313 switch (fn)
315 CASE_CFN_ASIN:
316 CASE_CFN_ASINH:
317 CASE_CFN_ATAN:
318 CASE_CFN_ATANH:
319 CASE_CFN_CASIN:
320 CASE_CFN_CASINH:
321 CASE_CFN_CATAN:
322 CASE_CFN_CATANH:
323 CASE_CFN_CBRT:
324 CASE_CFN_CPROJ:
325 CASE_CFN_CSIN:
326 CASE_CFN_CSINH:
327 CASE_CFN_CTAN:
328 CASE_CFN_CTANH:
329 CASE_CFN_ERF:
330 CASE_CFN_LLROUND:
331 CASE_CFN_LROUND:
332 CASE_CFN_ROUND:
333 CASE_CFN_SIN:
334 CASE_CFN_SINH:
335 CASE_CFN_TAN:
336 CASE_CFN_TANH:
337 CASE_CFN_TRUNC:
338 return true;
340 CASE_CFN_LLRINT:
341 CASE_CFN_LRINT:
342 CASE_CFN_NEARBYINT:
343 CASE_CFN_RINT:
344 return !flag_rounding_math;
346 default:
347 break;
349 return false;
352 /* Check whether we may negate an integer constant T without causing
353 overflow. */
355 bool
356 may_negate_without_overflow_p (const_tree t)
358 tree type;
360 gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 type = TREE_TYPE (t);
363 if (TYPE_UNSIGNED (type))
364 return false;
366 return !wi::only_sign_bit_p (t);
369 /* Determine whether an expression T can be cheaply negated using
370 the function negate_expr without introducing undefined overflow. */
372 static bool
373 negate_expr_p (tree t)
375 tree type;
377 if (t == 0)
378 return false;
380 type = TREE_TYPE (t);
382 STRIP_SIGN_NOPS (t);
383 switch (TREE_CODE (t))
385 case INTEGER_CST:
386 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
387 return true;
389 /* Check that -CST will not overflow type. */
390 return may_negate_without_overflow_p (t);
391 case BIT_NOT_EXPR:
392 return (INTEGRAL_TYPE_P (type)
393 && TYPE_OVERFLOW_WRAPS (type));
395 case FIXED_CST:
396 return true;
398 case NEGATE_EXPR:
399 return !TYPE_OVERFLOW_SANITIZED (type);
401 case REAL_CST:
402 /* We want to canonicalize to positive real constants. Pretend
403 that only negative ones can be easily negated. */
404 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 case COMPLEX_CST:
407 return negate_expr_p (TREE_REALPART (t))
408 && negate_expr_p (TREE_IMAGPART (t));
410 case VECTOR_CST:
412 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 return true;
415 int count = TYPE_VECTOR_SUBPARTS (type), i;
417 for (i = 0; i < count; i++)
418 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
419 return false;
421 return true;
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 || HONOR_SIGNED_ZEROS (element_mode (type))
434 || (INTEGRAL_TYPE_P (type)
435 && ! TYPE_OVERFLOW_WRAPS (type)))
436 return false;
437 /* -(A + B) -> (-B) - A. */
438 if (negate_expr_p (TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 && !HONOR_SIGNED_ZEROS (element_mode (type))
447 && (! INTEGRAL_TYPE_P (type)
448 || TYPE_OVERFLOW_WRAPS (type));
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (type))
452 break;
453 /* INT_MIN/n * n doesn't overflow while negating one operand it does
454 if n is a (negative) power of two. */
455 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 && wi::popcount (wi::abs (TREE_OPERAND (t, 0))) != 1)
459 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 && wi::popcount (wi::abs (TREE_OPERAND (t, 1))) != 1)))
461 break;
463 /* Fall through. */
465 case RDIV_EXPR:
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
469 break;
471 case TRUNC_DIV_EXPR:
472 case ROUND_DIV_EXPR:
473 case EXACT_DIV_EXPR:
474 if (TYPE_UNSIGNED (type))
475 break;
476 if (negate_expr_p (TREE_OPERAND (t, 0)))
477 return true;
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
483 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t, 1))))
485 return negate_expr_p (TREE_OPERAND (t, 1));
486 break;
488 case NOP_EXPR:
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type) == REAL_TYPE)
492 tree tem = strip_float_extensions (t);
493 if (tem != t)
494 return negate_expr_p (tem);
496 break;
498 case CALL_EXPR:
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t)))
501 return negate_expr_p (CALL_EXPR_ARG (t, 0));
502 break;
504 case RSHIFT_EXPR:
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 tree op1 = TREE_OPERAND (t, 1);
509 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
510 return true;
512 break;
514 default:
515 break;
517 return false;
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
523 returned. */
525 static tree
526 fold_negate_expr_1 (location_t loc, tree t)
528 tree type = TREE_TYPE (t);
529 tree tem;
531 switch (TREE_CODE (t))
533 /* Convert - (~A) to A + 1. */
534 case BIT_NOT_EXPR:
535 if (INTEGRAL_TYPE_P (type))
536 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
537 build_one_cst (type));
538 break;
540 case INTEGER_CST:
541 tem = fold_negate_const (t, type);
542 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
543 || (ANY_INTEGRAL_TYPE_P (type)
544 && !TYPE_OVERFLOW_TRAPS (type)
545 && TYPE_OVERFLOW_WRAPS (type))
546 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 return tem;
554 case FIXED_CST:
555 tem = fold_negate_const (t, type);
556 return tem;
558 case COMPLEX_CST:
560 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
561 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
562 if (rpart && ipart)
563 return build_complex (type, rpart, ipart);
565 break;
567 case VECTOR_CST:
569 int count = TYPE_VECTOR_SUBPARTS (type), i;
570 tree *elts = XALLOCAVEC (tree, count);
572 for (i = 0; i < count; i++)
574 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
575 if (elts[i] == NULL_TREE)
576 return NULL_TREE;
579 return build_vector (type, elts);
582 case COMPLEX_EXPR:
583 if (negate_expr_p (t))
584 return fold_build2_loc (loc, COMPLEX_EXPR, type,
585 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
586 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
587 break;
589 case CONJ_EXPR:
590 if (negate_expr_p (t))
591 return fold_build1_loc (loc, CONJ_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
593 break;
595 case NEGATE_EXPR:
596 if (!TYPE_OVERFLOW_SANITIZED (type))
597 return TREE_OPERAND (t, 0);
598 break;
600 case PLUS_EXPR:
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
602 && !HONOR_SIGNED_ZEROS (element_mode (type)))
604 /* -(A + B) -> (-B) - A. */
605 if (negate_expr_p (TREE_OPERAND (t, 1)))
607 tem = negate_expr (TREE_OPERAND (t, 1));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 0));
612 /* -(A + B) -> (-A) - B. */
613 if (negate_expr_p (TREE_OPERAND (t, 0)))
615 tem = negate_expr (TREE_OPERAND (t, 0));
616 return fold_build2_loc (loc, MINUS_EXPR, type,
617 tem, TREE_OPERAND (t, 1));
620 break;
622 case MINUS_EXPR:
623 /* - (A - B) -> B - A */
624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
625 && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
628 break;
630 case MULT_EXPR:
631 if (TYPE_UNSIGNED (type))
632 break;
634 /* Fall through. */
636 case RDIV_EXPR:
637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 TREE_OPERAND (t, 0), negate_expr (tem));
643 tem = TREE_OPERAND (t, 0);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 negate_expr (tem), TREE_OPERAND (t, 1));
648 break;
650 case TRUNC_DIV_EXPR:
651 case ROUND_DIV_EXPR:
652 case EXACT_DIV_EXPR:
653 if (TYPE_UNSIGNED (type))
654 break;
655 if (negate_expr_p (TREE_OPERAND (t, 0)))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (TREE_OPERAND (t, 0)),
658 TREE_OPERAND (t, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t, 1))))
666 && negate_expr_p (TREE_OPERAND (t, 1)))
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0),
669 negate_expr (TREE_OPERAND (t, 1)));
670 break;
672 case NOP_EXPR:
673 /* Convert -((double)float) into (double)(-float). */
674 if (TREE_CODE (type) == REAL_TYPE)
676 tem = strip_float_extensions (t);
677 if (tem != t && negate_expr_p (tem))
678 return fold_convert_loc (loc, type, negate_expr (tem));
680 break;
682 case CALL_EXPR:
683 /* Negate -f(x) as f(-x). */
684 if (negate_mathfn_p (get_call_combined_fn (t))
685 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
687 tree fndecl, arg;
689 fndecl = get_callee_fndecl (t);
690 arg = negate_expr (CALL_EXPR_ARG (t, 0));
691 return build_call_expr_loc (loc, fndecl, 1, arg);
693 break;
695 case RSHIFT_EXPR:
696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
699 tree op1 = TREE_OPERAND (t, 1);
700 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
702 tree ntype = TYPE_UNSIGNED (type)
703 ? signed_type_for (type)
704 : unsigned_type_for (type);
705 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
706 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
707 return fold_convert_loc (loc, type, temp);
710 break;
712 default:
713 break;
716 return NULL_TREE;
719 /* A wrapper for fold_negate_expr_1. */
721 static tree
722 fold_negate_expr (location_t loc, tree t)
724 tree type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
726 tree tem = fold_negate_expr_1 (loc, t);
727 if (tem == NULL_TREE)
728 return NULL_TREE;
729 return fold_convert_loc (loc, type, tem);
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
736 static tree
737 negate_expr (tree t)
739 tree type, tem;
740 location_t loc;
742 if (t == NULL_TREE)
743 return NULL_TREE;
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead. If a variable part is of pointer
769 type, it is negated after converting to TYPE. This prevents us from
770 generating illegal MINUS pointer expression. LOC is the location of
771 the converted variable part.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
778 static tree
779 split_tree (location_t loc, tree in, tree type, enum tree_code code,
780 tree *conp, tree *litp, tree *minus_litp, int negate_p)
782 tree var = 0;
784 *conp = 0;
785 *litp = 0;
786 *minus_litp = 0;
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
793 *litp = in;
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
802 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
803 || (code == MINUS_EXPR
804 && (TREE_CODE (in) == PLUS_EXPR
805 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p && *conp)
839 /* Convert to TYPE before negating. */
840 *conp = fold_convert_loc (loc, type, *conp);
841 *conp = negate_expr (*conp);
843 if (neg_var_p && var)
845 /* Convert to TYPE before negating. */
846 var = fold_convert_loc (loc, type, var);
847 var = negate_expr (var);
850 else if (TREE_CONSTANT (in))
851 *conp = in;
852 else if (TREE_CODE (in) == BIT_NOT_EXPR
853 && code == PLUS_EXPR)
855 /* -X - 1 is folded to ~X, undo that here. Do _not_ do this
856 when IN is constant. Convert to TYPE before negating. */
857 *minus_litp = build_one_cst (type);
858 var = negate_expr (fold_convert_loc (loc, type, TREE_OPERAND (in, 0)));
860 else
861 var = in;
863 if (negate_p)
865 if (*litp)
866 *minus_litp = *litp, *litp = 0;
867 else if (*minus_litp)
868 *litp = *minus_litp, *minus_litp = 0;
869 if (*conp)
871 /* Convert to TYPE before negating. */
872 *conp = fold_convert_loc (loc, type, *conp);
873 *conp = negate_expr (*conp);
875 if (var)
877 /* Convert to TYPE before negating. */
878 var = fold_convert_loc (loc, type, var);
879 var = negate_expr (var);
883 if (*litp
884 && TREE_OVERFLOW_P (*litp))
885 *litp = drop_tree_overflow (*litp);
886 if (*minus_litp
887 && TREE_OVERFLOW_P (*minus_litp))
888 *minus_litp = drop_tree_overflow (*minus_litp);
890 return var;
893 /* Re-associate trees split by the above function. T1 and T2 are
894 either expressions to associate or null. Return the new
895 expression, if any. LOC is the location of the new expression. If
896 we build an operation, do it in TYPE and with CODE. */
898 static tree
899 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
901 if (t1 == 0)
902 return t2;
903 else if (t2 == 0)
904 return t1;
906 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907 try to fold this since we will have infinite recursion. But do
908 deal with any NEGATE_EXPRs. */
909 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 if (code == PLUS_EXPR)
914 if (TREE_CODE (t1) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t2),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t1, 0)));
919 else if (TREE_CODE (t2) == NEGATE_EXPR)
920 return build2_loc (loc, MINUS_EXPR, type,
921 fold_convert_loc (loc, type, t1),
922 fold_convert_loc (loc, type,
923 TREE_OPERAND (t2, 0)));
924 else if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
927 else if (code == MINUS_EXPR)
929 if (integer_zerop (t2))
930 return fold_convert_loc (loc, type, t1);
933 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
934 fold_convert_loc (loc, type, t2));
937 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
938 fold_convert_loc (loc, type, t2));
941 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
942 for use in int_const_binop, size_binop and size_diffop. */
944 static bool
945 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
948 return false;
949 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
950 return false;
952 switch (code)
954 case LSHIFT_EXPR:
955 case RSHIFT_EXPR:
956 case LROTATE_EXPR:
957 case RROTATE_EXPR:
958 return true;
960 default:
961 break;
964 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
965 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
966 && TYPE_MODE (type1) == TYPE_MODE (type2);
970 /* Combine two integer constants ARG1 and ARG2 under operation CODE
971 to produce a new constant. Return NULL_TREE if we don't know how
972 to evaluate CODE at compile-time. */
974 static tree
975 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
976 int overflowable)
978 wide_int res;
979 tree t;
980 tree type = TREE_TYPE (arg1);
981 signop sign = TYPE_SIGN (type);
982 bool overflow = false;
984 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
986 switch (code)
988 case BIT_IOR_EXPR:
989 res = wi::bit_or (arg1, arg2);
990 break;
992 case BIT_XOR_EXPR:
993 res = wi::bit_xor (arg1, arg2);
994 break;
996 case BIT_AND_EXPR:
997 res = wi::bit_and (arg1, arg2);
998 break;
1000 case RSHIFT_EXPR:
1001 case LSHIFT_EXPR:
1002 if (wi::neg_p (arg2))
1004 arg2 = -arg2;
1005 if (code == RSHIFT_EXPR)
1006 code = LSHIFT_EXPR;
1007 else
1008 code = RSHIFT_EXPR;
1011 if (code == RSHIFT_EXPR)
1012 /* It's unclear from the C standard whether shifts can overflow.
1013 The following code ignores overflow; perhaps a C standard
1014 interpretation ruling is needed. */
1015 res = wi::rshift (arg1, arg2, sign);
1016 else
1017 res = wi::lshift (arg1, arg2);
1018 break;
1020 case RROTATE_EXPR:
1021 case LROTATE_EXPR:
1022 if (wi::neg_p (arg2))
1024 arg2 = -arg2;
1025 if (code == RROTATE_EXPR)
1026 code = LROTATE_EXPR;
1027 else
1028 code = RROTATE_EXPR;
1031 if (code == RROTATE_EXPR)
1032 res = wi::rrotate (arg1, arg2);
1033 else
1034 res = wi::lrotate (arg1, arg2);
1035 break;
1037 case PLUS_EXPR:
1038 res = wi::add (arg1, arg2, sign, &overflow);
1039 break;
1041 case MINUS_EXPR:
1042 res = wi::sub (arg1, arg2, sign, &overflow);
1043 break;
1045 case MULT_EXPR:
1046 res = wi::mul (arg1, arg2, sign, &overflow);
1047 break;
1049 case MULT_HIGHPART_EXPR:
1050 res = wi::mul_high (arg1, arg2, sign);
1051 break;
1053 case TRUNC_DIV_EXPR:
1054 case EXACT_DIV_EXPR:
1055 if (arg2 == 0)
1056 return NULL_TREE;
1057 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1058 break;
1060 case FLOOR_DIV_EXPR:
1061 if (arg2 == 0)
1062 return NULL_TREE;
1063 res = wi::div_floor (arg1, arg2, sign, &overflow);
1064 break;
1066 case CEIL_DIV_EXPR:
1067 if (arg2 == 0)
1068 return NULL_TREE;
1069 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1070 break;
1072 case ROUND_DIV_EXPR:
1073 if (arg2 == 0)
1074 return NULL_TREE;
1075 res = wi::div_round (arg1, arg2, sign, &overflow);
1076 break;
1078 case TRUNC_MOD_EXPR:
1079 if (arg2 == 0)
1080 return NULL_TREE;
1081 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1082 break;
1084 case FLOOR_MOD_EXPR:
1085 if (arg2 == 0)
1086 return NULL_TREE;
1087 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1088 break;
1090 case CEIL_MOD_EXPR:
1091 if (arg2 == 0)
1092 return NULL_TREE;
1093 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1094 break;
1096 case ROUND_MOD_EXPR:
1097 if (arg2 == 0)
1098 return NULL_TREE;
1099 res = wi::mod_round (arg1, arg2, sign, &overflow);
1100 break;
1102 case MIN_EXPR:
1103 res = wi::min (arg1, arg2, sign);
1104 break;
1106 case MAX_EXPR:
1107 res = wi::max (arg1, arg2, sign);
1108 break;
1110 default:
1111 return NULL_TREE;
1114 t = force_fit_type (type, res, overflowable,
1115 (((sign == SIGNED || overflowable == -1)
1116 && overflow)
1117 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1119 return t;
1122 tree
1123 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1125 return int_const_binop_1 (code, arg1, arg2, 1);
1128 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1129 constant. We assume ARG1 and ARG2 have the same data type, or at least
1130 are the same kind of constant and the same machine mode. Return zero if
1131 combining the constants is not allowed in the current operating mode. */
1133 static tree
1134 const_binop (enum tree_code code, tree arg1, tree arg2)
1136 /* Sanity check for the recursive cases. */
1137 if (!arg1 || !arg2)
1138 return NULL_TREE;
1140 STRIP_NOPS (arg1);
1141 STRIP_NOPS (arg2);
1143 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1145 if (code == POINTER_PLUS_EXPR)
1146 return int_const_binop (PLUS_EXPR,
1147 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1149 return int_const_binop (code, arg1, arg2);
1152 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1154 machine_mode mode;
1155 REAL_VALUE_TYPE d1;
1156 REAL_VALUE_TYPE d2;
1157 REAL_VALUE_TYPE value;
1158 REAL_VALUE_TYPE result;
1159 bool inexact;
1160 tree t, type;
1162 /* The following codes are handled by real_arithmetic. */
1163 switch (code)
1165 case PLUS_EXPR:
1166 case MINUS_EXPR:
1167 case MULT_EXPR:
1168 case RDIV_EXPR:
1169 case MIN_EXPR:
1170 case MAX_EXPR:
1171 break;
1173 default:
1174 return NULL_TREE;
1177 d1 = TREE_REAL_CST (arg1);
1178 d2 = TREE_REAL_CST (arg2);
1180 type = TREE_TYPE (arg1);
1181 mode = TYPE_MODE (type);
1183 /* Don't perform operation if we honor signaling NaNs and
1184 either operand is a signaling NaN. */
1185 if (HONOR_SNANS (mode)
1186 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1187 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1188 return NULL_TREE;
1190 /* Don't perform operation if it would raise a division
1191 by zero exception. */
1192 if (code == RDIV_EXPR
1193 && real_equal (&d2, &dconst0)
1194 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1195 return NULL_TREE;
1197 /* If either operand is a NaN, just return it. Otherwise, set up
1198 for floating-point trap; we return an overflow. */
1199 if (REAL_VALUE_ISNAN (d1))
1201 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1202 is off. */
1203 d1.signalling = 0;
1204 t = build_real (type, d1);
1205 return t;
1207 else if (REAL_VALUE_ISNAN (d2))
1209 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1210 is off. */
1211 d2.signalling = 0;
1212 t = build_real (type, d2);
1213 return t;
1216 inexact = real_arithmetic (&value, code, &d1, &d2);
1217 real_convert (&result, mode, &value);
1219 /* Don't constant fold this floating point operation if
1220 the result has overflowed and flag_trapping_math. */
1221 if (flag_trapping_math
1222 && MODE_HAS_INFINITIES (mode)
1223 && REAL_VALUE_ISINF (result)
1224 && !REAL_VALUE_ISINF (d1)
1225 && !REAL_VALUE_ISINF (d2))
1226 return NULL_TREE;
1228 /* Don't constant fold this floating point operation if the
1229 result may dependent upon the run-time rounding mode and
1230 flag_rounding_math is set, or if GCC's software emulation
1231 is unable to accurately represent the result. */
1232 if ((flag_rounding_math
1233 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1234 && (inexact || !real_identical (&result, &value)))
1235 return NULL_TREE;
1237 t = build_real (type, result);
1239 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1240 return t;
1243 if (TREE_CODE (arg1) == FIXED_CST)
1245 FIXED_VALUE_TYPE f1;
1246 FIXED_VALUE_TYPE f2;
1247 FIXED_VALUE_TYPE result;
1248 tree t, type;
1249 int sat_p;
1250 bool overflow_p;
1252 /* The following codes are handled by fixed_arithmetic. */
1253 switch (code)
1255 case PLUS_EXPR:
1256 case MINUS_EXPR:
1257 case MULT_EXPR:
1258 case TRUNC_DIV_EXPR:
1259 if (TREE_CODE (arg2) != FIXED_CST)
1260 return NULL_TREE;
1261 f2 = TREE_FIXED_CST (arg2);
1262 break;
1264 case LSHIFT_EXPR:
1265 case RSHIFT_EXPR:
1267 if (TREE_CODE (arg2) != INTEGER_CST)
1268 return NULL_TREE;
1269 wide_int w2 = arg2;
1270 f2.data.high = w2.elt (1);
1271 f2.data.low = w2.ulow ();
1272 f2.mode = SImode;
1274 break;
1276 default:
1277 return NULL_TREE;
1280 f1 = TREE_FIXED_CST (arg1);
1281 type = TREE_TYPE (arg1);
1282 sat_p = TYPE_SATURATING (type);
1283 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1284 t = build_fixed (type, result);
1285 /* Propagate overflow flags. */
1286 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1287 TREE_OVERFLOW (t) = 1;
1288 return t;
1291 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1293 tree type = TREE_TYPE (arg1);
1294 tree r1 = TREE_REALPART (arg1);
1295 tree i1 = TREE_IMAGPART (arg1);
1296 tree r2 = TREE_REALPART (arg2);
1297 tree i2 = TREE_IMAGPART (arg2);
1298 tree real, imag;
1300 switch (code)
1302 case PLUS_EXPR:
1303 case MINUS_EXPR:
1304 real = const_binop (code, r1, r2);
1305 imag = const_binop (code, i1, i2);
1306 break;
1308 case MULT_EXPR:
1309 if (COMPLEX_FLOAT_TYPE_P (type))
1310 return do_mpc_arg2 (arg1, arg2, type,
1311 /* do_nonfinite= */ folding_initializer,
1312 mpc_mul);
1314 real = const_binop (MINUS_EXPR,
1315 const_binop (MULT_EXPR, r1, r2),
1316 const_binop (MULT_EXPR, i1, i2));
1317 imag = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r1, i2),
1319 const_binop (MULT_EXPR, i1, r2));
1320 break;
1322 case RDIV_EXPR:
1323 if (COMPLEX_FLOAT_TYPE_P (type))
1324 return do_mpc_arg2 (arg1, arg2, type,
1325 /* do_nonfinite= */ folding_initializer,
1326 mpc_div);
1327 /* Fallthru. */
1328 case TRUNC_DIV_EXPR:
1329 case CEIL_DIV_EXPR:
1330 case FLOOR_DIV_EXPR:
1331 case ROUND_DIV_EXPR:
1332 if (flag_complex_method == 0)
1334 /* Keep this algorithm in sync with
1335 tree-complex.c:expand_complex_div_straight().
1337 Expand complex division to scalars, straightforward algorithm.
1338 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1339 t = br*br + bi*bi
1341 tree magsquared
1342 = const_binop (PLUS_EXPR,
1343 const_binop (MULT_EXPR, r2, r2),
1344 const_binop (MULT_EXPR, i2, i2));
1345 tree t1
1346 = const_binop (PLUS_EXPR,
1347 const_binop (MULT_EXPR, r1, r2),
1348 const_binop (MULT_EXPR, i1, i2));
1349 tree t2
1350 = const_binop (MINUS_EXPR,
1351 const_binop (MULT_EXPR, i1, r2),
1352 const_binop (MULT_EXPR, r1, i2));
1354 real = const_binop (code, t1, magsquared);
1355 imag = const_binop (code, t2, magsquared);
1357 else
1359 /* Keep this algorithm in sync with
1360 tree-complex.c:expand_complex_div_wide().
1362 Expand complex division to scalars, modified algorithm to minimize
1363 overflow with wide input ranges. */
1364 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1365 fold_abs_const (r2, TREE_TYPE (type)),
1366 fold_abs_const (i2, TREE_TYPE (type)));
1368 if (integer_nonzerop (compare))
1370 /* In the TRUE branch, we compute
1371 ratio = br/bi;
1372 div = (br * ratio) + bi;
1373 tr = (ar * ratio) + ai;
1374 ti = (ai * ratio) - ar;
1375 tr = tr / div;
1376 ti = ti / div; */
1377 tree ratio = const_binop (code, r2, i2);
1378 tree div = const_binop (PLUS_EXPR, i2,
1379 const_binop (MULT_EXPR, r2, ratio));
1380 real = const_binop (MULT_EXPR, r1, ratio);
1381 real = const_binop (PLUS_EXPR, real, i1);
1382 real = const_binop (code, real, div);
1384 imag = const_binop (MULT_EXPR, i1, ratio);
1385 imag = const_binop (MINUS_EXPR, imag, r1);
1386 imag = const_binop (code, imag, div);
1388 else
1390 /* In the FALSE branch, we compute
1391 ratio = d/c;
1392 divisor = (d * ratio) + c;
1393 tr = (b * ratio) + a;
1394 ti = b - (a * ratio);
1395 tr = tr / div;
1396 ti = ti / div; */
1397 tree ratio = const_binop (code, i2, r2);
1398 tree div = const_binop (PLUS_EXPR, r2,
1399 const_binop (MULT_EXPR, i2, ratio));
1401 real = const_binop (MULT_EXPR, i1, ratio);
1402 real = const_binop (PLUS_EXPR, real, r1);
1403 real = const_binop (code, real, div);
1405 imag = const_binop (MULT_EXPR, r1, ratio);
1406 imag = const_binop (MINUS_EXPR, i1, imag);
1407 imag = const_binop (code, imag, div);
1410 break;
1412 default:
1413 return NULL_TREE;
1416 if (real && imag)
1417 return build_complex (type, real, imag);
1420 if (TREE_CODE (arg1) == VECTOR_CST
1421 && TREE_CODE (arg2) == VECTOR_CST)
1423 tree type = TREE_TYPE (arg1);
1424 int count = TYPE_VECTOR_SUBPARTS (type), i;
1425 tree *elts = XALLOCAVEC (tree, count);
1427 for (i = 0; i < count; i++)
1429 tree elem1 = VECTOR_CST_ELT (arg1, i);
1430 tree elem2 = VECTOR_CST_ELT (arg2, i);
1432 elts[i] = const_binop (code, elem1, elem2);
1434 /* It is possible that const_binop cannot handle the given
1435 code and return NULL_TREE */
1436 if (elts[i] == NULL_TREE)
1437 return NULL_TREE;
1440 return build_vector (type, elts);
1443 /* Shifts allow a scalar offset for a vector. */
1444 if (TREE_CODE (arg1) == VECTOR_CST
1445 && TREE_CODE (arg2) == INTEGER_CST)
1447 tree type = TREE_TYPE (arg1);
1448 int count = TYPE_VECTOR_SUBPARTS (type), i;
1449 tree *elts = XALLOCAVEC (tree, count);
1451 for (i = 0; i < count; i++)
1453 tree elem1 = VECTOR_CST_ELT (arg1, i);
1455 elts[i] = const_binop (code, elem1, arg2);
1457 /* It is possible that const_binop cannot handle the given
1458 code and return NULL_TREE. */
1459 if (elts[i] == NULL_TREE)
1460 return NULL_TREE;
1463 return build_vector (type, elts);
1465 return NULL_TREE;
1468 /* Overload that adds a TYPE parameter to be able to dispatch
1469 to fold_relational_const. */
1471 tree
1472 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1474 if (TREE_CODE_CLASS (code) == tcc_comparison)
1475 return fold_relational_const (code, type, arg1, arg2);
1477 /* ??? Until we make the const_binop worker take the type of the
1478 result as argument put those cases that need it here. */
1479 switch (code)
1481 case COMPLEX_EXPR:
1482 if ((TREE_CODE (arg1) == REAL_CST
1483 && TREE_CODE (arg2) == REAL_CST)
1484 || (TREE_CODE (arg1) == INTEGER_CST
1485 && TREE_CODE (arg2) == INTEGER_CST))
1486 return build_complex (type, arg1, arg2);
1487 return NULL_TREE;
1489 case VEC_PACK_TRUNC_EXPR:
1490 case VEC_PACK_FIX_TRUNC_EXPR:
1492 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1493 tree *elts;
1495 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1496 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1497 if (TREE_CODE (arg1) != VECTOR_CST
1498 || TREE_CODE (arg2) != VECTOR_CST)
1499 return NULL_TREE;
1501 elts = XALLOCAVEC (tree, nelts);
1502 if (!vec_cst_ctor_to_array (arg1, elts)
1503 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1504 return NULL_TREE;
1506 for (i = 0; i < nelts; i++)
1508 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1509 ? NOP_EXPR : FIX_TRUNC_EXPR,
1510 TREE_TYPE (type), elts[i]);
1511 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1512 return NULL_TREE;
1515 return build_vector (type, elts);
1518 case VEC_WIDEN_MULT_LO_EXPR:
1519 case VEC_WIDEN_MULT_HI_EXPR:
1520 case VEC_WIDEN_MULT_EVEN_EXPR:
1521 case VEC_WIDEN_MULT_ODD_EXPR:
1523 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1524 unsigned int out, ofs, scale;
1525 tree *elts;
1527 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1528 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1529 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1530 return NULL_TREE;
1532 elts = XALLOCAVEC (tree, nelts * 4);
1533 if (!vec_cst_ctor_to_array (arg1, elts)
1534 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1535 return NULL_TREE;
1537 if (code == VEC_WIDEN_MULT_LO_EXPR)
1538 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1539 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1540 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1541 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1542 scale = 1, ofs = 0;
1543 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1544 scale = 1, ofs = 1;
1546 for (out = 0; out < nelts; out++)
1548 unsigned int in1 = (out << scale) + ofs;
1549 unsigned int in2 = in1 + nelts * 2;
1550 tree t1, t2;
1552 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1553 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1555 if (t1 == NULL_TREE || t2 == NULL_TREE)
1556 return NULL_TREE;
1557 elts[out] = const_binop (MULT_EXPR, t1, t2);
1558 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1559 return NULL_TREE;
1562 return build_vector (type, elts);
1565 default:;
1568 if (TREE_CODE_CLASS (code) != tcc_binary)
1569 return NULL_TREE;
1571 /* Make sure type and arg0 have the same saturating flag. */
1572 gcc_checking_assert (TYPE_SATURATING (type)
1573 == TYPE_SATURATING (TREE_TYPE (arg1)));
1575 return const_binop (code, arg1, arg2);
1578 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1579 Return zero if computing the constants is not possible. */
1581 tree
1582 const_unop (enum tree_code code, tree type, tree arg0)
1584 /* Don't perform the operation, other than NEGATE and ABS, if
1585 flag_signaling_nans is on and the operand is a signaling NaN. */
1586 if (TREE_CODE (arg0) == REAL_CST
1587 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1588 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1589 && code != NEGATE_EXPR
1590 && code != ABS_EXPR)
1591 return NULL_TREE;
1593 switch (code)
1595 CASE_CONVERT:
1596 case FLOAT_EXPR:
1597 case FIX_TRUNC_EXPR:
1598 case FIXED_CONVERT_EXPR:
1599 return fold_convert_const (code, type, arg0);
1601 case ADDR_SPACE_CONVERT_EXPR:
1602 /* If the source address is 0, and the source address space
1603 cannot have a valid object at 0, fold to dest type null. */
1604 if (integer_zerop (arg0)
1605 && !(targetm.addr_space.zero_address_valid
1606 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1607 return fold_convert_const (code, type, arg0);
1608 break;
1610 case VIEW_CONVERT_EXPR:
1611 return fold_view_convert_expr (type, arg0);
1613 case NEGATE_EXPR:
1615 /* Can't call fold_negate_const directly here as that doesn't
1616 handle all cases and we might not be able to negate some
1617 constants. */
1618 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1619 if (tem && CONSTANT_CLASS_P (tem))
1620 return tem;
1621 break;
1624 case ABS_EXPR:
1625 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1626 return fold_abs_const (arg0, type);
1627 break;
1629 case CONJ_EXPR:
1630 if (TREE_CODE (arg0) == COMPLEX_CST)
1632 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1633 TREE_TYPE (type));
1634 return build_complex (type, TREE_REALPART (arg0), ipart);
1636 break;
1638 case BIT_NOT_EXPR:
1639 if (TREE_CODE (arg0) == INTEGER_CST)
1640 return fold_not_const (arg0, type);
1641 /* Perform BIT_NOT_EXPR on each element individually. */
1642 else if (TREE_CODE (arg0) == VECTOR_CST)
1644 tree *elements;
1645 tree elem;
1646 unsigned count = VECTOR_CST_NELTS (arg0), i;
1648 elements = XALLOCAVEC (tree, count);
1649 for (i = 0; i < count; i++)
1651 elem = VECTOR_CST_ELT (arg0, i);
1652 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1653 if (elem == NULL_TREE)
1654 break;
1655 elements[i] = elem;
1657 if (i == count)
1658 return build_vector (type, elements);
1660 break;
1662 case TRUTH_NOT_EXPR:
1663 if (TREE_CODE (arg0) == INTEGER_CST)
1664 return constant_boolean_node (integer_zerop (arg0), type);
1665 break;
1667 case REALPART_EXPR:
1668 if (TREE_CODE (arg0) == COMPLEX_CST)
1669 return fold_convert (type, TREE_REALPART (arg0));
1670 break;
1672 case IMAGPART_EXPR:
1673 if (TREE_CODE (arg0) == COMPLEX_CST)
1674 return fold_convert (type, TREE_IMAGPART (arg0));
1675 break;
1677 case VEC_UNPACK_LO_EXPR:
1678 case VEC_UNPACK_HI_EXPR:
1679 case VEC_UNPACK_FLOAT_LO_EXPR:
1680 case VEC_UNPACK_FLOAT_HI_EXPR:
1682 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1683 tree *elts;
1684 enum tree_code subcode;
1686 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1687 if (TREE_CODE (arg0) != VECTOR_CST)
1688 return NULL_TREE;
1690 elts = XALLOCAVEC (tree, nelts * 2);
1691 if (!vec_cst_ctor_to_array (arg0, elts))
1692 return NULL_TREE;
1694 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1695 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1696 elts += nelts;
1698 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1699 subcode = NOP_EXPR;
1700 else
1701 subcode = FLOAT_EXPR;
1703 for (i = 0; i < nelts; i++)
1705 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1706 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1707 return NULL_TREE;
1710 return build_vector (type, elts);
1713 case REDUC_MIN_EXPR:
1714 case REDUC_MAX_EXPR:
1715 case REDUC_PLUS_EXPR:
1717 unsigned int nelts, i;
1718 tree *elts;
1719 enum tree_code subcode;
1721 if (TREE_CODE (arg0) != VECTOR_CST)
1722 return NULL_TREE;
1723 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1725 elts = XALLOCAVEC (tree, nelts);
1726 if (!vec_cst_ctor_to_array (arg0, elts))
1727 return NULL_TREE;
1729 switch (code)
1731 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1732 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1733 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1734 default: gcc_unreachable ();
1737 for (i = 1; i < nelts; i++)
1739 elts[0] = const_binop (subcode, elts[0], elts[i]);
1740 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1741 return NULL_TREE;
1744 return elts[0];
1747 default:
1748 break;
1751 return NULL_TREE;
1754 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1755 indicates which particular sizetype to create. */
1757 tree
1758 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1760 return build_int_cst (sizetype_tab[(int) kind], number);
1763 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1764 is a tree code. The type of the result is taken from the operands.
1765 Both must be equivalent integer types, ala int_binop_types_match_p.
1766 If the operands are constant, so is the result. */
1768 tree
1769 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1771 tree type = TREE_TYPE (arg0);
1773 if (arg0 == error_mark_node || arg1 == error_mark_node)
1774 return error_mark_node;
1776 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1777 TREE_TYPE (arg1)));
1779 /* Handle the special case of two integer constants faster. */
1780 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1782 /* And some specific cases even faster than that. */
1783 if (code == PLUS_EXPR)
1785 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1786 return arg1;
1787 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1788 return arg0;
1790 else if (code == MINUS_EXPR)
1792 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1793 return arg0;
1795 else if (code == MULT_EXPR)
1797 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1798 return arg1;
1801 /* Handle general case of two integer constants. For sizetype
1802 constant calculations we always want to know about overflow,
1803 even in the unsigned case. */
1804 return int_const_binop_1 (code, arg0, arg1, -1);
1807 return fold_build2_loc (loc, code, type, arg0, arg1);
1810 /* Given two values, either both of sizetype or both of bitsizetype,
1811 compute the difference between the two values. Return the value
1812 in signed type corresponding to the type of the operands. */
1814 tree
1815 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1817 tree type = TREE_TYPE (arg0);
1818 tree ctype;
1820 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1821 TREE_TYPE (arg1)));
1823 /* If the type is already signed, just do the simple thing. */
1824 if (!TYPE_UNSIGNED (type))
1825 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1827 if (type == sizetype)
1828 ctype = ssizetype;
1829 else if (type == bitsizetype)
1830 ctype = sbitsizetype;
1831 else
1832 ctype = signed_type_for (type);
1834 /* If either operand is not a constant, do the conversions to the signed
1835 type and subtract. The hardware will do the right thing with any
1836 overflow in the subtraction. */
1837 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1838 return size_binop_loc (loc, MINUS_EXPR,
1839 fold_convert_loc (loc, ctype, arg0),
1840 fold_convert_loc (loc, ctype, arg1));
1842 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1843 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1844 overflow) and negate (which can't either). Special-case a result
1845 of zero while we're here. */
1846 if (tree_int_cst_equal (arg0, arg1))
1847 return build_int_cst (ctype, 0);
1848 else if (tree_int_cst_lt (arg1, arg0))
1849 return fold_convert_loc (loc, ctype,
1850 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1851 else
1852 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1853 fold_convert_loc (loc, ctype,
1854 size_binop_loc (loc,
1855 MINUS_EXPR,
1856 arg1, arg0)));
1859 /* A subroutine of fold_convert_const handling conversions of an
1860 INTEGER_CST to another integer type. */
1862 static tree
1863 fold_convert_const_int_from_int (tree type, const_tree arg1)
1865 /* Given an integer constant, make new constant with new type,
1866 appropriately sign-extended or truncated. Use widest_int
1867 so that any extension is done according ARG1's type. */
1868 return force_fit_type (type, wi::to_widest (arg1),
1869 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1870 TREE_OVERFLOW (arg1));
1873 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1874 to an integer type. */
1876 static tree
1877 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1879 bool overflow = false;
1880 tree t;
1882 /* The following code implements the floating point to integer
1883 conversion rules required by the Java Language Specification,
1884 that IEEE NaNs are mapped to zero and values that overflow
1885 the target precision saturate, i.e. values greater than
1886 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1887 are mapped to INT_MIN. These semantics are allowed by the
1888 C and C++ standards that simply state that the behavior of
1889 FP-to-integer conversion is unspecified upon overflow. */
1891 wide_int val;
1892 REAL_VALUE_TYPE r;
1893 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1895 switch (code)
1897 case FIX_TRUNC_EXPR:
1898 real_trunc (&r, VOIDmode, &x);
1899 break;
1901 default:
1902 gcc_unreachable ();
1905 /* If R is NaN, return zero and show we have an overflow. */
1906 if (REAL_VALUE_ISNAN (r))
1908 overflow = true;
1909 val = wi::zero (TYPE_PRECISION (type));
1912 /* See if R is less than the lower bound or greater than the
1913 upper bound. */
1915 if (! overflow)
1917 tree lt = TYPE_MIN_VALUE (type);
1918 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1919 if (real_less (&r, &l))
1921 overflow = true;
1922 val = lt;
1926 if (! overflow)
1928 tree ut = TYPE_MAX_VALUE (type);
1929 if (ut)
1931 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1932 if (real_less (&u, &r))
1934 overflow = true;
1935 val = ut;
1940 if (! overflow)
1941 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1943 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1944 return t;
1947 /* A subroutine of fold_convert_const handling conversions of a
1948 FIXED_CST to an integer type. */
1950 static tree
1951 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1953 tree t;
1954 double_int temp, temp_trunc;
1955 unsigned int mode;
1957 /* Right shift FIXED_CST to temp by fbit. */
1958 temp = TREE_FIXED_CST (arg1).data;
1959 mode = TREE_FIXED_CST (arg1).mode;
1960 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1962 temp = temp.rshift (GET_MODE_FBIT (mode),
1963 HOST_BITS_PER_DOUBLE_INT,
1964 SIGNED_FIXED_POINT_MODE_P (mode));
1966 /* Left shift temp to temp_trunc by fbit. */
1967 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1968 HOST_BITS_PER_DOUBLE_INT,
1969 SIGNED_FIXED_POINT_MODE_P (mode));
1971 else
1973 temp = double_int_zero;
1974 temp_trunc = double_int_zero;
1977 /* If FIXED_CST is negative, we need to round the value toward 0.
1978 By checking if the fractional bits are not zero to add 1 to temp. */
1979 if (SIGNED_FIXED_POINT_MODE_P (mode)
1980 && temp_trunc.is_negative ()
1981 && TREE_FIXED_CST (arg1).data != temp_trunc)
1982 temp += double_int_one;
1984 /* Given a fixed-point constant, make new constant with new type,
1985 appropriately sign-extended or truncated. */
1986 t = force_fit_type (type, temp, -1,
1987 (temp.is_negative ()
1988 && (TYPE_UNSIGNED (type)
1989 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1990 | TREE_OVERFLOW (arg1));
1992 return t;
1995 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1996 to another floating point type. */
1998 static tree
1999 fold_convert_const_real_from_real (tree type, const_tree arg1)
2001 REAL_VALUE_TYPE value;
2002 tree t;
2004 /* Don't perform the operation if flag_signaling_nans is on
2005 and the operand is a signaling NaN. */
2006 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2007 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2008 return NULL_TREE;
2010 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2011 t = build_real (type, value);
2013 /* If converting an infinity or NAN to a representation that doesn't
2014 have one, set the overflow bit so that we can produce some kind of
2015 error message at the appropriate point if necessary. It's not the
2016 most user-friendly message, but it's better than nothing. */
2017 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2018 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2019 TREE_OVERFLOW (t) = 1;
2020 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2021 && !MODE_HAS_NANS (TYPE_MODE (type)))
2022 TREE_OVERFLOW (t) = 1;
2023 /* Regular overflow, conversion produced an infinity in a mode that
2024 can't represent them. */
2025 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2026 && REAL_VALUE_ISINF (value)
2027 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2028 TREE_OVERFLOW (t) = 1;
2029 else
2030 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2031 return t;
2034 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2035 to a floating point type. */
2037 static tree
2038 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2040 REAL_VALUE_TYPE value;
2041 tree t;
2043 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2044 t = build_real (type, value);
2046 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2047 return t;
2050 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2051 to another fixed-point type. */
2053 static tree
2054 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2056 FIXED_VALUE_TYPE value;
2057 tree t;
2058 bool overflow_p;
2060 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2061 TYPE_SATURATING (type));
2062 t = build_fixed (type, value);
2064 /* Propagate overflow flags. */
2065 if (overflow_p | TREE_OVERFLOW (arg1))
2066 TREE_OVERFLOW (t) = 1;
2067 return t;
2070 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2071 to a fixed-point type. */
2073 static tree
2074 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2076 FIXED_VALUE_TYPE value;
2077 tree t;
2078 bool overflow_p;
2079 double_int di;
2081 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2083 di.low = TREE_INT_CST_ELT (arg1, 0);
2084 if (TREE_INT_CST_NUNITS (arg1) == 1)
2085 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2086 else
2087 di.high = TREE_INT_CST_ELT (arg1, 1);
2089 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2090 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2091 TYPE_SATURATING (type));
2092 t = build_fixed (type, value);
2094 /* Propagate overflow flags. */
2095 if (overflow_p | TREE_OVERFLOW (arg1))
2096 TREE_OVERFLOW (t) = 1;
2097 return t;
2100 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2101 to a fixed-point type. */
2103 static tree
2104 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2106 FIXED_VALUE_TYPE value;
2107 tree t;
2108 bool overflow_p;
2110 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2111 &TREE_REAL_CST (arg1),
2112 TYPE_SATURATING (type));
2113 t = build_fixed (type, value);
2115 /* Propagate overflow flags. */
2116 if (overflow_p | TREE_OVERFLOW (arg1))
2117 TREE_OVERFLOW (t) = 1;
2118 return t;
2121 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2122 type TYPE. If no simplification can be done return NULL_TREE. */
2124 static tree
2125 fold_convert_const (enum tree_code code, tree type, tree arg1)
2127 if (TREE_TYPE (arg1) == type)
2128 return arg1;
2130 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2131 || TREE_CODE (type) == OFFSET_TYPE)
2133 if (TREE_CODE (arg1) == INTEGER_CST)
2134 return fold_convert_const_int_from_int (type, arg1);
2135 else if (TREE_CODE (arg1) == REAL_CST)
2136 return fold_convert_const_int_from_real (code, type, arg1);
2137 else if (TREE_CODE (arg1) == FIXED_CST)
2138 return fold_convert_const_int_from_fixed (type, arg1);
2140 else if (TREE_CODE (type) == REAL_TYPE)
2142 if (TREE_CODE (arg1) == INTEGER_CST)
2143 return build_real_from_int_cst (type, arg1);
2144 else if (TREE_CODE (arg1) == REAL_CST)
2145 return fold_convert_const_real_from_real (type, arg1);
2146 else if (TREE_CODE (arg1) == FIXED_CST)
2147 return fold_convert_const_real_from_fixed (type, arg1);
2149 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2151 if (TREE_CODE (arg1) == FIXED_CST)
2152 return fold_convert_const_fixed_from_fixed (type, arg1);
2153 else if (TREE_CODE (arg1) == INTEGER_CST)
2154 return fold_convert_const_fixed_from_int (type, arg1);
2155 else if (TREE_CODE (arg1) == REAL_CST)
2156 return fold_convert_const_fixed_from_real (type, arg1);
2158 else if (TREE_CODE (type) == VECTOR_TYPE)
2160 if (TREE_CODE (arg1) == VECTOR_CST
2161 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2163 int len = TYPE_VECTOR_SUBPARTS (type);
2164 tree elttype = TREE_TYPE (type);
2165 tree *v = XALLOCAVEC (tree, len);
2166 for (int i = 0; i < len; ++i)
2168 tree elt = VECTOR_CST_ELT (arg1, i);
2169 tree cvt = fold_convert_const (code, elttype, elt);
2170 if (cvt == NULL_TREE)
2171 return NULL_TREE;
2172 v[i] = cvt;
2174 return build_vector (type, v);
2177 return NULL_TREE;
2180 /* Construct a vector of zero elements of vector type TYPE. */
2182 static tree
2183 build_zero_vector (tree type)
2185 tree t;
2187 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2188 return build_vector_from_val (type, t);
2191 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2193 bool
2194 fold_convertible_p (const_tree type, const_tree arg)
2196 tree orig = TREE_TYPE (arg);
2198 if (type == orig)
2199 return true;
2201 if (TREE_CODE (arg) == ERROR_MARK
2202 || TREE_CODE (type) == ERROR_MARK
2203 || TREE_CODE (orig) == ERROR_MARK)
2204 return false;
2206 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2207 return true;
2209 switch (TREE_CODE (type))
2211 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2212 case POINTER_TYPE: case REFERENCE_TYPE:
2213 case OFFSET_TYPE:
2214 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2215 || TREE_CODE (orig) == OFFSET_TYPE);
2217 case REAL_TYPE:
2218 case FIXED_POINT_TYPE:
2219 case VECTOR_TYPE:
2220 case VOID_TYPE:
2221 return TREE_CODE (type) == TREE_CODE (orig);
2223 default:
2224 return false;
2228 /* Convert expression ARG to type TYPE. Used by the middle-end for
2229 simple conversions in preference to calling the front-end's convert. */
2231 tree
2232 fold_convert_loc (location_t loc, tree type, tree arg)
2234 tree orig = TREE_TYPE (arg);
2235 tree tem;
2237 if (type == orig)
2238 return arg;
2240 if (TREE_CODE (arg) == ERROR_MARK
2241 || TREE_CODE (type) == ERROR_MARK
2242 || TREE_CODE (orig) == ERROR_MARK)
2243 return error_mark_node;
2245 switch (TREE_CODE (type))
2247 case POINTER_TYPE:
2248 case REFERENCE_TYPE:
2249 /* Handle conversions between pointers to different address spaces. */
2250 if (POINTER_TYPE_P (orig)
2251 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2252 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2253 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2254 /* fall through */
2256 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2257 case OFFSET_TYPE:
2258 if (TREE_CODE (arg) == INTEGER_CST)
2260 tem = fold_convert_const (NOP_EXPR, type, arg);
2261 if (tem != NULL_TREE)
2262 return tem;
2264 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2265 || TREE_CODE (orig) == OFFSET_TYPE)
2266 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2267 if (TREE_CODE (orig) == COMPLEX_TYPE)
2268 return fold_convert_loc (loc, type,
2269 fold_build1_loc (loc, REALPART_EXPR,
2270 TREE_TYPE (orig), arg));
2271 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2272 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2273 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2275 case REAL_TYPE:
2276 if (TREE_CODE (arg) == INTEGER_CST)
2278 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2279 if (tem != NULL_TREE)
2280 return tem;
2282 else if (TREE_CODE (arg) == REAL_CST)
2284 tem = fold_convert_const (NOP_EXPR, type, arg);
2285 if (tem != NULL_TREE)
2286 return tem;
2288 else if (TREE_CODE (arg) == FIXED_CST)
2290 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2291 if (tem != NULL_TREE)
2292 return tem;
2295 switch (TREE_CODE (orig))
2297 case INTEGER_TYPE:
2298 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2299 case POINTER_TYPE: case REFERENCE_TYPE:
2300 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2302 case REAL_TYPE:
2303 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2305 case FIXED_POINT_TYPE:
2306 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2308 case COMPLEX_TYPE:
2309 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2310 return fold_convert_loc (loc, type, tem);
2312 default:
2313 gcc_unreachable ();
2316 case FIXED_POINT_TYPE:
2317 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2318 || TREE_CODE (arg) == REAL_CST)
2320 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2321 if (tem != NULL_TREE)
2322 goto fold_convert_exit;
2325 switch (TREE_CODE (orig))
2327 case FIXED_POINT_TYPE:
2328 case INTEGER_TYPE:
2329 case ENUMERAL_TYPE:
2330 case BOOLEAN_TYPE:
2331 case REAL_TYPE:
2332 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2334 case COMPLEX_TYPE:
2335 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2336 return fold_convert_loc (loc, type, tem);
2338 default:
2339 gcc_unreachable ();
2342 case COMPLEX_TYPE:
2343 switch (TREE_CODE (orig))
2345 case INTEGER_TYPE:
2346 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2347 case POINTER_TYPE: case REFERENCE_TYPE:
2348 case REAL_TYPE:
2349 case FIXED_POINT_TYPE:
2350 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2351 fold_convert_loc (loc, TREE_TYPE (type), arg),
2352 fold_convert_loc (loc, TREE_TYPE (type),
2353 integer_zero_node));
2354 case COMPLEX_TYPE:
2356 tree rpart, ipart;
2358 if (TREE_CODE (arg) == COMPLEX_EXPR)
2360 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2361 TREE_OPERAND (arg, 0));
2362 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2363 TREE_OPERAND (arg, 1));
2364 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2367 arg = save_expr (arg);
2368 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2369 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2370 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2371 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2372 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2375 default:
2376 gcc_unreachable ();
2379 case VECTOR_TYPE:
2380 if (integer_zerop (arg))
2381 return build_zero_vector (type);
2382 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2383 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2384 || TREE_CODE (orig) == VECTOR_TYPE);
2385 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2387 case VOID_TYPE:
2388 tem = fold_ignored_result (arg);
2389 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2391 default:
2392 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2393 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2394 gcc_unreachable ();
2396 fold_convert_exit:
2397 protected_set_expr_location_unshare (tem, loc);
2398 return tem;
2401 /* Return false if expr can be assumed not to be an lvalue, true
2402 otherwise. */
2404 static bool
2405 maybe_lvalue_p (const_tree x)
2407 /* We only need to wrap lvalue tree codes. */
2408 switch (TREE_CODE (x))
2410 case VAR_DECL:
2411 case PARM_DECL:
2412 case RESULT_DECL:
2413 case LABEL_DECL:
2414 case FUNCTION_DECL:
2415 case SSA_NAME:
2417 case COMPONENT_REF:
2418 case MEM_REF:
2419 case INDIRECT_REF:
2420 case ARRAY_REF:
2421 case ARRAY_RANGE_REF:
2422 case BIT_FIELD_REF:
2423 case OBJ_TYPE_REF:
2425 case REALPART_EXPR:
2426 case IMAGPART_EXPR:
2427 case PREINCREMENT_EXPR:
2428 case PREDECREMENT_EXPR:
2429 case SAVE_EXPR:
2430 case TRY_CATCH_EXPR:
2431 case WITH_CLEANUP_EXPR:
2432 case COMPOUND_EXPR:
2433 case MODIFY_EXPR:
2434 case TARGET_EXPR:
2435 case COND_EXPR:
2436 case BIND_EXPR:
2437 break;
2439 default:
2440 /* Assume the worst for front-end tree codes. */
2441 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2442 break;
2443 return false;
2446 return true;
2449 /* Return an expr equal to X but certainly not valid as an lvalue. */
2451 tree
2452 non_lvalue_loc (location_t loc, tree x)
2454 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2455 us. */
2456 if (in_gimple_form)
2457 return x;
2459 if (! maybe_lvalue_p (x))
2460 return x;
2461 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2464 /* When pedantic, return an expr equal to X but certainly not valid as a
2465 pedantic lvalue. Otherwise, return X. */
2467 static tree
2468 pedantic_non_lvalue_loc (location_t loc, tree x)
2470 return protected_set_expr_location_unshare (x, loc);
2473 /* Given a tree comparison code, return the code that is the logical inverse.
2474 It is generally not safe to do this for floating-point comparisons, except
2475 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2476 ERROR_MARK in this case. */
2478 enum tree_code
2479 invert_tree_comparison (enum tree_code code, bool honor_nans)
2481 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2482 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2483 return ERROR_MARK;
2485 switch (code)
2487 case EQ_EXPR:
2488 return NE_EXPR;
2489 case NE_EXPR:
2490 return EQ_EXPR;
2491 case GT_EXPR:
2492 return honor_nans ? UNLE_EXPR : LE_EXPR;
2493 case GE_EXPR:
2494 return honor_nans ? UNLT_EXPR : LT_EXPR;
2495 case LT_EXPR:
2496 return honor_nans ? UNGE_EXPR : GE_EXPR;
2497 case LE_EXPR:
2498 return honor_nans ? UNGT_EXPR : GT_EXPR;
2499 case LTGT_EXPR:
2500 return UNEQ_EXPR;
2501 case UNEQ_EXPR:
2502 return LTGT_EXPR;
2503 case UNGT_EXPR:
2504 return LE_EXPR;
2505 case UNGE_EXPR:
2506 return LT_EXPR;
2507 case UNLT_EXPR:
2508 return GE_EXPR;
2509 case UNLE_EXPR:
2510 return GT_EXPR;
2511 case ORDERED_EXPR:
2512 return UNORDERED_EXPR;
2513 case UNORDERED_EXPR:
2514 return ORDERED_EXPR;
2515 default:
2516 gcc_unreachable ();
2520 /* Similar, but return the comparison that results if the operands are
2521 swapped. This is safe for floating-point. */
2523 enum tree_code
2524 swap_tree_comparison (enum tree_code code)
2526 switch (code)
2528 case EQ_EXPR:
2529 case NE_EXPR:
2530 case ORDERED_EXPR:
2531 case UNORDERED_EXPR:
2532 case LTGT_EXPR:
2533 case UNEQ_EXPR:
2534 return code;
2535 case GT_EXPR:
2536 return LT_EXPR;
2537 case GE_EXPR:
2538 return LE_EXPR;
2539 case LT_EXPR:
2540 return GT_EXPR;
2541 case LE_EXPR:
2542 return GE_EXPR;
2543 case UNGT_EXPR:
2544 return UNLT_EXPR;
2545 case UNGE_EXPR:
2546 return UNLE_EXPR;
2547 case UNLT_EXPR:
2548 return UNGT_EXPR;
2549 case UNLE_EXPR:
2550 return UNGE_EXPR;
2551 default:
2552 gcc_unreachable ();
2557 /* Convert a comparison tree code from an enum tree_code representation
2558 into a compcode bit-based encoding. This function is the inverse of
2559 compcode_to_comparison. */
2561 static enum comparison_code
2562 comparison_to_compcode (enum tree_code code)
2564 switch (code)
2566 case LT_EXPR:
2567 return COMPCODE_LT;
2568 case EQ_EXPR:
2569 return COMPCODE_EQ;
2570 case LE_EXPR:
2571 return COMPCODE_LE;
2572 case GT_EXPR:
2573 return COMPCODE_GT;
2574 case NE_EXPR:
2575 return COMPCODE_NE;
2576 case GE_EXPR:
2577 return COMPCODE_GE;
2578 case ORDERED_EXPR:
2579 return COMPCODE_ORD;
2580 case UNORDERED_EXPR:
2581 return COMPCODE_UNORD;
2582 case UNLT_EXPR:
2583 return COMPCODE_UNLT;
2584 case UNEQ_EXPR:
2585 return COMPCODE_UNEQ;
2586 case UNLE_EXPR:
2587 return COMPCODE_UNLE;
2588 case UNGT_EXPR:
2589 return COMPCODE_UNGT;
2590 case LTGT_EXPR:
2591 return COMPCODE_LTGT;
2592 case UNGE_EXPR:
2593 return COMPCODE_UNGE;
2594 default:
2595 gcc_unreachable ();
2599 /* Convert a compcode bit-based encoding of a comparison operator back
2600 to GCC's enum tree_code representation. This function is the
2601 inverse of comparison_to_compcode. */
2603 static enum tree_code
2604 compcode_to_comparison (enum comparison_code code)
2606 switch (code)
2608 case COMPCODE_LT:
2609 return LT_EXPR;
2610 case COMPCODE_EQ:
2611 return EQ_EXPR;
2612 case COMPCODE_LE:
2613 return LE_EXPR;
2614 case COMPCODE_GT:
2615 return GT_EXPR;
2616 case COMPCODE_NE:
2617 return NE_EXPR;
2618 case COMPCODE_GE:
2619 return GE_EXPR;
2620 case COMPCODE_ORD:
2621 return ORDERED_EXPR;
2622 case COMPCODE_UNORD:
2623 return UNORDERED_EXPR;
2624 case COMPCODE_UNLT:
2625 return UNLT_EXPR;
2626 case COMPCODE_UNEQ:
2627 return UNEQ_EXPR;
2628 case COMPCODE_UNLE:
2629 return UNLE_EXPR;
2630 case COMPCODE_UNGT:
2631 return UNGT_EXPR;
2632 case COMPCODE_LTGT:
2633 return LTGT_EXPR;
2634 case COMPCODE_UNGE:
2635 return UNGE_EXPR;
2636 default:
2637 gcc_unreachable ();
2641 /* Return a tree for the comparison which is the combination of
2642 doing the AND or OR (depending on CODE) of the two operations LCODE
2643 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2644 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2645 if this makes the transformation invalid. */
2647 tree
2648 combine_comparisons (location_t loc,
2649 enum tree_code code, enum tree_code lcode,
2650 enum tree_code rcode, tree truth_type,
2651 tree ll_arg, tree lr_arg)
2653 bool honor_nans = HONOR_NANS (ll_arg);
2654 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2655 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2656 int compcode;
2658 switch (code)
2660 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2661 compcode = lcompcode & rcompcode;
2662 break;
2664 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2665 compcode = lcompcode | rcompcode;
2666 break;
2668 default:
2669 return NULL_TREE;
2672 if (!honor_nans)
2674 /* Eliminate unordered comparisons, as well as LTGT and ORD
2675 which are not used unless the mode has NaNs. */
2676 compcode &= ~COMPCODE_UNORD;
2677 if (compcode == COMPCODE_LTGT)
2678 compcode = COMPCODE_NE;
2679 else if (compcode == COMPCODE_ORD)
2680 compcode = COMPCODE_TRUE;
2682 else if (flag_trapping_math)
2684 /* Check that the original operation and the optimized ones will trap
2685 under the same condition. */
2686 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2687 && (lcompcode != COMPCODE_EQ)
2688 && (lcompcode != COMPCODE_ORD);
2689 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2690 && (rcompcode != COMPCODE_EQ)
2691 && (rcompcode != COMPCODE_ORD);
2692 bool trap = (compcode & COMPCODE_UNORD) == 0
2693 && (compcode != COMPCODE_EQ)
2694 && (compcode != COMPCODE_ORD);
2696 /* In a short-circuited boolean expression the LHS might be
2697 such that the RHS, if evaluated, will never trap. For
2698 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2699 if neither x nor y is NaN. (This is a mixed blessing: for
2700 example, the expression above will never trap, hence
2701 optimizing it to x < y would be invalid). */
2702 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2703 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2704 rtrap = false;
2706 /* If the comparison was short-circuited, and only the RHS
2707 trapped, we may now generate a spurious trap. */
2708 if (rtrap && !ltrap
2709 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2710 return NULL_TREE;
2712 /* If we changed the conditions that cause a trap, we lose. */
2713 if ((ltrap || rtrap) != trap)
2714 return NULL_TREE;
2717 if (compcode == COMPCODE_TRUE)
2718 return constant_boolean_node (true, truth_type);
2719 else if (compcode == COMPCODE_FALSE)
2720 return constant_boolean_node (false, truth_type);
2721 else
2723 enum tree_code tcode;
2725 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2726 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2730 /* Return nonzero if two operands (typically of the same tree node)
2731 are necessarily equal. FLAGS modifies behavior as follows:
2733 If OEP_ONLY_CONST is set, only return nonzero for constants.
2734 This function tests whether the operands are indistinguishable;
2735 it does not test whether they are equal using C's == operation.
2736 The distinction is important for IEEE floating point, because
2737 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2738 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2740 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2741 even though it may hold multiple values during a function.
2742 This is because a GCC tree node guarantees that nothing else is
2743 executed between the evaluation of its "operands" (which may often
2744 be evaluated in arbitrary order). Hence if the operands themselves
2745 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2746 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2747 unset means assuming isochronic (or instantaneous) tree equivalence.
2748 Unless comparing arbitrary expression trees, such as from different
2749 statements, this flag can usually be left unset.
2751 If OEP_PURE_SAME is set, then pure functions with identical arguments
2752 are considered the same. It is used when the caller has other ways
2753 to ensure that global memory is unchanged in between.
2755 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2756 not values of expressions.
2758 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2759 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2761 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2762 any operand with side effect. This is unnecesarily conservative in the
2763 case we know that arg0 and arg1 are in disjoint code paths (such as in
2764 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2765 addresses with TREE_CONSTANT flag set so we know that &var == &var
2766 even if var is volatile. */
2769 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2771 /* When checking, verify at the outermost operand_equal_p call that
2772 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2773 hash value. */
2774 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2776 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2778 if (arg0 != arg1)
2780 inchash::hash hstate0 (0), hstate1 (0);
2781 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2782 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2783 hashval_t h0 = hstate0.end ();
2784 hashval_t h1 = hstate1.end ();
2785 gcc_assert (h0 == h1);
2787 return 1;
2789 else
2790 return 0;
2793 /* If either is ERROR_MARK, they aren't equal. */
2794 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2795 || TREE_TYPE (arg0) == error_mark_node
2796 || TREE_TYPE (arg1) == error_mark_node)
2797 return 0;
2799 /* Similar, if either does not have a type (like a released SSA name),
2800 they aren't equal. */
2801 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2802 return 0;
2804 /* We cannot consider pointers to different address space equal. */
2805 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2806 && POINTER_TYPE_P (TREE_TYPE (arg1))
2807 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2808 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2809 return 0;
2811 /* Check equality of integer constants before bailing out due to
2812 precision differences. */
2813 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2815 /* Address of INTEGER_CST is not defined; check that we did not forget
2816 to drop the OEP_ADDRESS_OF flags. */
2817 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2818 return tree_int_cst_equal (arg0, arg1);
2821 if (!(flags & OEP_ADDRESS_OF))
2823 /* If both types don't have the same signedness, then we can't consider
2824 them equal. We must check this before the STRIP_NOPS calls
2825 because they may change the signedness of the arguments. As pointers
2826 strictly don't have a signedness, require either two pointers or
2827 two non-pointers as well. */
2828 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2829 || POINTER_TYPE_P (TREE_TYPE (arg0))
2830 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2831 return 0;
2833 /* If both types don't have the same precision, then it is not safe
2834 to strip NOPs. */
2835 if (element_precision (TREE_TYPE (arg0))
2836 != element_precision (TREE_TYPE (arg1)))
2837 return 0;
2839 STRIP_NOPS (arg0);
2840 STRIP_NOPS (arg1);
2842 #if 0
2843 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2844 sanity check once the issue is solved. */
2845 else
2846 /* Addresses of conversions and SSA_NAMEs (and many other things)
2847 are not defined. Check that we did not forget to drop the
2848 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2849 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2850 && TREE_CODE (arg0) != SSA_NAME);
2851 #endif
2853 /* In case both args are comparisons but with different comparison
2854 code, try to swap the comparison operands of one arg to produce
2855 a match and compare that variant. */
2856 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2857 && COMPARISON_CLASS_P (arg0)
2858 && COMPARISON_CLASS_P (arg1))
2860 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2862 if (TREE_CODE (arg0) == swap_code)
2863 return operand_equal_p (TREE_OPERAND (arg0, 0),
2864 TREE_OPERAND (arg1, 1), flags)
2865 && operand_equal_p (TREE_OPERAND (arg0, 1),
2866 TREE_OPERAND (arg1, 0), flags);
2869 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2871 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2872 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2874 else if (flags & OEP_ADDRESS_OF)
2876 /* If we are interested in comparing addresses ignore
2877 MEM_REF wrappings of the base that can appear just for
2878 TBAA reasons. */
2879 if (TREE_CODE (arg0) == MEM_REF
2880 && DECL_P (arg1)
2881 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2882 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2883 && integer_zerop (TREE_OPERAND (arg0, 1)))
2884 return 1;
2885 else if (TREE_CODE (arg1) == MEM_REF
2886 && DECL_P (arg0)
2887 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2888 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2889 && integer_zerop (TREE_OPERAND (arg1, 1)))
2890 return 1;
2891 return 0;
2893 else
2894 return 0;
2897 /* When not checking adddresses, this is needed for conversions and for
2898 COMPONENT_REF. Might as well play it safe and always test this. */
2899 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2900 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2901 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2902 && !(flags & OEP_ADDRESS_OF)))
2903 return 0;
2905 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2906 We don't care about side effects in that case because the SAVE_EXPR
2907 takes care of that for us. In all other cases, two expressions are
2908 equal if they have no side effects. If we have two identical
2909 expressions with side effects that should be treated the same due
2910 to the only side effects being identical SAVE_EXPR's, that will
2911 be detected in the recursive calls below.
2912 If we are taking an invariant address of two identical objects
2913 they are necessarily equal as well. */
2914 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2915 && (TREE_CODE (arg0) == SAVE_EXPR
2916 || (flags & OEP_MATCH_SIDE_EFFECTS)
2917 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2918 return 1;
2920 /* Next handle constant cases, those for which we can return 1 even
2921 if ONLY_CONST is set. */
2922 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2923 switch (TREE_CODE (arg0))
2925 case INTEGER_CST:
2926 return tree_int_cst_equal (arg0, arg1);
2928 case FIXED_CST:
2929 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2930 TREE_FIXED_CST (arg1));
2932 case REAL_CST:
2933 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2934 return 1;
2937 if (!HONOR_SIGNED_ZEROS (arg0))
2939 /* If we do not distinguish between signed and unsigned zero,
2940 consider them equal. */
2941 if (real_zerop (arg0) && real_zerop (arg1))
2942 return 1;
2944 return 0;
2946 case VECTOR_CST:
2948 unsigned i;
2950 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2951 return 0;
2953 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2955 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2956 VECTOR_CST_ELT (arg1, i), flags))
2957 return 0;
2959 return 1;
2962 case COMPLEX_CST:
2963 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2964 flags)
2965 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2966 flags));
2968 case STRING_CST:
2969 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2970 && ! memcmp (TREE_STRING_POINTER (arg0),
2971 TREE_STRING_POINTER (arg1),
2972 TREE_STRING_LENGTH (arg0)));
2974 case ADDR_EXPR:
2975 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2976 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2977 flags | OEP_ADDRESS_OF
2978 | OEP_MATCH_SIDE_EFFECTS);
2979 case CONSTRUCTOR:
2980 /* In GIMPLE empty constructors are allowed in initializers of
2981 aggregates. */
2982 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2983 default:
2984 break;
2987 if (flags & OEP_ONLY_CONST)
2988 return 0;
2990 /* Define macros to test an operand from arg0 and arg1 for equality and a
2991 variant that allows null and views null as being different from any
2992 non-null value. In the latter case, if either is null, the both
2993 must be; otherwise, do the normal comparison. */
2994 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2995 TREE_OPERAND (arg1, N), flags)
2997 #define OP_SAME_WITH_NULL(N) \
2998 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2999 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3001 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3003 case tcc_unary:
3004 /* Two conversions are equal only if signedness and modes match. */
3005 switch (TREE_CODE (arg0))
3007 CASE_CONVERT:
3008 case FIX_TRUNC_EXPR:
3009 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3010 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3011 return 0;
3012 break;
3013 default:
3014 break;
3017 return OP_SAME (0);
3020 case tcc_comparison:
3021 case tcc_binary:
3022 if (OP_SAME (0) && OP_SAME (1))
3023 return 1;
3025 /* For commutative ops, allow the other order. */
3026 return (commutative_tree_code (TREE_CODE (arg0))
3027 && operand_equal_p (TREE_OPERAND (arg0, 0),
3028 TREE_OPERAND (arg1, 1), flags)
3029 && operand_equal_p (TREE_OPERAND (arg0, 1),
3030 TREE_OPERAND (arg1, 0), flags));
3032 case tcc_reference:
3033 /* If either of the pointer (or reference) expressions we are
3034 dereferencing contain a side effect, these cannot be equal,
3035 but their addresses can be. */
3036 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3037 && (TREE_SIDE_EFFECTS (arg0)
3038 || TREE_SIDE_EFFECTS (arg1)))
3039 return 0;
3041 switch (TREE_CODE (arg0))
3043 case INDIRECT_REF:
3044 if (!(flags & OEP_ADDRESS_OF)
3045 && (TYPE_ALIGN (TREE_TYPE (arg0))
3046 != TYPE_ALIGN (TREE_TYPE (arg1))))
3047 return 0;
3048 flags &= ~OEP_ADDRESS_OF;
3049 return OP_SAME (0);
3051 case IMAGPART_EXPR:
3052 /* Require the same offset. */
3053 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3054 TYPE_SIZE (TREE_TYPE (arg1)),
3055 flags & ~OEP_ADDRESS_OF))
3056 return 0;
3058 /* Fallthru. */
3059 case REALPART_EXPR:
3060 case VIEW_CONVERT_EXPR:
3061 return OP_SAME (0);
3063 case TARGET_MEM_REF:
3064 case MEM_REF:
3065 if (!(flags & OEP_ADDRESS_OF))
3067 /* Require equal access sizes */
3068 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3069 && (!TYPE_SIZE (TREE_TYPE (arg0))
3070 || !TYPE_SIZE (TREE_TYPE (arg1))
3071 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3072 TYPE_SIZE (TREE_TYPE (arg1)),
3073 flags)))
3074 return 0;
3075 /* Verify that access happens in similar types. */
3076 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3077 return 0;
3078 /* Verify that accesses are TBAA compatible. */
3079 if (!alias_ptr_types_compatible_p
3080 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3081 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3082 || (MR_DEPENDENCE_CLIQUE (arg0)
3083 != MR_DEPENDENCE_CLIQUE (arg1))
3084 || (MR_DEPENDENCE_BASE (arg0)
3085 != MR_DEPENDENCE_BASE (arg1)))
3086 return 0;
3087 /* Verify that alignment is compatible. */
3088 if (TYPE_ALIGN (TREE_TYPE (arg0))
3089 != TYPE_ALIGN (TREE_TYPE (arg1)))
3090 return 0;
3092 flags &= ~OEP_ADDRESS_OF;
3093 return (OP_SAME (0) && OP_SAME (1)
3094 /* TARGET_MEM_REF require equal extra operands. */
3095 && (TREE_CODE (arg0) != TARGET_MEM_REF
3096 || (OP_SAME_WITH_NULL (2)
3097 && OP_SAME_WITH_NULL (3)
3098 && OP_SAME_WITH_NULL (4))));
3100 case ARRAY_REF:
3101 case ARRAY_RANGE_REF:
3102 if (!OP_SAME (0))
3103 return 0;
3104 flags &= ~OEP_ADDRESS_OF;
3105 /* Compare the array index by value if it is constant first as we
3106 may have different types but same value here. */
3107 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3108 TREE_OPERAND (arg1, 1))
3109 || OP_SAME (1))
3110 && OP_SAME_WITH_NULL (2)
3111 && OP_SAME_WITH_NULL (3)
3112 /* Compare low bound and element size as with OEP_ADDRESS_OF
3113 we have to account for the offset of the ref. */
3114 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3115 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3116 || (operand_equal_p (array_ref_low_bound
3117 (CONST_CAST_TREE (arg0)),
3118 array_ref_low_bound
3119 (CONST_CAST_TREE (arg1)), flags)
3120 && operand_equal_p (array_ref_element_size
3121 (CONST_CAST_TREE (arg0)),
3122 array_ref_element_size
3123 (CONST_CAST_TREE (arg1)),
3124 flags))));
3126 case COMPONENT_REF:
3127 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3128 may be NULL when we're called to compare MEM_EXPRs. */
3129 if (!OP_SAME_WITH_NULL (0)
3130 || !OP_SAME (1))
3131 return 0;
3132 flags &= ~OEP_ADDRESS_OF;
3133 return OP_SAME_WITH_NULL (2);
3135 case BIT_FIELD_REF:
3136 if (!OP_SAME (0))
3137 return 0;
3138 flags &= ~OEP_ADDRESS_OF;
3139 return OP_SAME (1) && OP_SAME (2);
3141 default:
3142 return 0;
3145 case tcc_expression:
3146 switch (TREE_CODE (arg0))
3148 case ADDR_EXPR:
3149 /* Be sure we pass right ADDRESS_OF flag. */
3150 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3151 return operand_equal_p (TREE_OPERAND (arg0, 0),
3152 TREE_OPERAND (arg1, 0),
3153 flags | OEP_ADDRESS_OF);
3155 case TRUTH_NOT_EXPR:
3156 return OP_SAME (0);
3158 case TRUTH_ANDIF_EXPR:
3159 case TRUTH_ORIF_EXPR:
3160 return OP_SAME (0) && OP_SAME (1);
3162 case FMA_EXPR:
3163 case WIDEN_MULT_PLUS_EXPR:
3164 case WIDEN_MULT_MINUS_EXPR:
3165 if (!OP_SAME (2))
3166 return 0;
3167 /* The multiplcation operands are commutative. */
3168 /* FALLTHRU */
3170 case TRUTH_AND_EXPR:
3171 case TRUTH_OR_EXPR:
3172 case TRUTH_XOR_EXPR:
3173 if (OP_SAME (0) && OP_SAME (1))
3174 return 1;
3176 /* Otherwise take into account this is a commutative operation. */
3177 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3178 TREE_OPERAND (arg1, 1), flags)
3179 && operand_equal_p (TREE_OPERAND (arg0, 1),
3180 TREE_OPERAND (arg1, 0), flags));
3182 case COND_EXPR:
3183 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3184 return 0;
3185 flags &= ~OEP_ADDRESS_OF;
3186 return OP_SAME (0);
3188 case VEC_COND_EXPR:
3189 case DOT_PROD_EXPR:
3190 case BIT_INSERT_EXPR:
3191 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3193 case MODIFY_EXPR:
3194 case INIT_EXPR:
3195 case COMPOUND_EXPR:
3196 case PREDECREMENT_EXPR:
3197 case PREINCREMENT_EXPR:
3198 case POSTDECREMENT_EXPR:
3199 case POSTINCREMENT_EXPR:
3200 if (flags & OEP_LEXICOGRAPHIC)
3201 return OP_SAME (0) && OP_SAME (1);
3202 return 0;
3204 case CLEANUP_POINT_EXPR:
3205 case EXPR_STMT:
3206 if (flags & OEP_LEXICOGRAPHIC)
3207 return OP_SAME (0);
3208 return 0;
3210 default:
3211 return 0;
3214 case tcc_vl_exp:
3215 switch (TREE_CODE (arg0))
3217 case CALL_EXPR:
3218 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3219 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3220 /* If not both CALL_EXPRs are either internal or normal function
3221 functions, then they are not equal. */
3222 return 0;
3223 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3225 /* If the CALL_EXPRs call different internal functions, then they
3226 are not equal. */
3227 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3228 return 0;
3230 else
3232 /* If the CALL_EXPRs call different functions, then they are not
3233 equal. */
3234 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3235 flags))
3236 return 0;
3239 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3241 unsigned int cef = call_expr_flags (arg0);
3242 if (flags & OEP_PURE_SAME)
3243 cef &= ECF_CONST | ECF_PURE;
3244 else
3245 cef &= ECF_CONST;
3246 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3247 return 0;
3250 /* Now see if all the arguments are the same. */
3252 const_call_expr_arg_iterator iter0, iter1;
3253 const_tree a0, a1;
3254 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3255 a1 = first_const_call_expr_arg (arg1, &iter1);
3256 a0 && a1;
3257 a0 = next_const_call_expr_arg (&iter0),
3258 a1 = next_const_call_expr_arg (&iter1))
3259 if (! operand_equal_p (a0, a1, flags))
3260 return 0;
3262 /* If we get here and both argument lists are exhausted
3263 then the CALL_EXPRs are equal. */
3264 return ! (a0 || a1);
3266 default:
3267 return 0;
3270 case tcc_declaration:
3271 /* Consider __builtin_sqrt equal to sqrt. */
3272 return (TREE_CODE (arg0) == FUNCTION_DECL
3273 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3274 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3275 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3277 case tcc_exceptional:
3278 if (TREE_CODE (arg0) == CONSTRUCTOR)
3280 /* In GIMPLE constructors are used only to build vectors from
3281 elements. Individual elements in the constructor must be
3282 indexed in increasing order and form an initial sequence.
3284 We make no effort to compare constructors in generic.
3285 (see sem_variable::equals in ipa-icf which can do so for
3286 constants). */
3287 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3288 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3289 return 0;
3291 /* Be sure that vectors constructed have the same representation.
3292 We only tested element precision and modes to match.
3293 Vectors may be BLKmode and thus also check that the number of
3294 parts match. */
3295 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3296 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3297 return 0;
3299 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3300 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3301 unsigned int len = vec_safe_length (v0);
3303 if (len != vec_safe_length (v1))
3304 return 0;
3306 for (unsigned int i = 0; i < len; i++)
3308 constructor_elt *c0 = &(*v0)[i];
3309 constructor_elt *c1 = &(*v1)[i];
3311 if (!operand_equal_p (c0->value, c1->value, flags)
3312 /* In GIMPLE the indexes can be either NULL or matching i.
3313 Double check this so we won't get false
3314 positives for GENERIC. */
3315 || (c0->index
3316 && (TREE_CODE (c0->index) != INTEGER_CST
3317 || !compare_tree_int (c0->index, i)))
3318 || (c1->index
3319 && (TREE_CODE (c1->index) != INTEGER_CST
3320 || !compare_tree_int (c1->index, i))))
3321 return 0;
3323 return 1;
3325 else if (TREE_CODE (arg0) == STATEMENT_LIST
3326 && (flags & OEP_LEXICOGRAPHIC))
3328 /* Compare the STATEMENT_LISTs. */
3329 tree_stmt_iterator tsi1, tsi2;
3330 tree body1 = CONST_CAST_TREE (arg0);
3331 tree body2 = CONST_CAST_TREE (arg1);
3332 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3333 tsi_next (&tsi1), tsi_next (&tsi2))
3335 /* The lists don't have the same number of statements. */
3336 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3337 return 0;
3338 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3339 return 1;
3340 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3341 OEP_LEXICOGRAPHIC))
3342 return 0;
3345 return 0;
3347 case tcc_statement:
3348 switch (TREE_CODE (arg0))
3350 case RETURN_EXPR:
3351 if (flags & OEP_LEXICOGRAPHIC)
3352 return OP_SAME_WITH_NULL (0);
3353 return 0;
3354 default:
3355 return 0;
3358 default:
3359 return 0;
3362 #undef OP_SAME
3363 #undef OP_SAME_WITH_NULL
3366 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3367 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3369 When in doubt, return 0. */
3371 static int
3372 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3374 int unsignedp1, unsignedpo;
3375 tree primarg0, primarg1, primother;
3376 unsigned int correct_width;
3378 if (operand_equal_p (arg0, arg1, 0))
3379 return 1;
3381 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3382 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3383 return 0;
3385 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3386 and see if the inner values are the same. This removes any
3387 signedness comparison, which doesn't matter here. */
3388 primarg0 = arg0, primarg1 = arg1;
3389 STRIP_NOPS (primarg0);
3390 STRIP_NOPS (primarg1);
3391 if (operand_equal_p (primarg0, primarg1, 0))
3392 return 1;
3394 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3395 actual comparison operand, ARG0.
3397 First throw away any conversions to wider types
3398 already present in the operands. */
3400 primarg1 = get_narrower (arg1, &unsignedp1);
3401 primother = get_narrower (other, &unsignedpo);
3403 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3404 if (unsignedp1 == unsignedpo
3405 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3406 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3408 tree type = TREE_TYPE (arg0);
3410 /* Make sure shorter operand is extended the right way
3411 to match the longer operand. */
3412 primarg1 = fold_convert (signed_or_unsigned_type_for
3413 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3415 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3416 return 1;
3419 return 0;
3422 /* See if ARG is an expression that is either a comparison or is performing
3423 arithmetic on comparisons. The comparisons must only be comparing
3424 two different values, which will be stored in *CVAL1 and *CVAL2; if
3425 they are nonzero it means that some operands have already been found.
3426 No variables may be used anywhere else in the expression except in the
3427 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3428 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3430 If this is true, return 1. Otherwise, return zero. */
3432 static int
3433 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3435 enum tree_code code = TREE_CODE (arg);
3436 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3438 /* We can handle some of the tcc_expression cases here. */
3439 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3440 tclass = tcc_unary;
3441 else if (tclass == tcc_expression
3442 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3443 || code == COMPOUND_EXPR))
3444 tclass = tcc_binary;
3446 else if (tclass == tcc_expression && code == SAVE_EXPR
3447 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3449 /* If we've already found a CVAL1 or CVAL2, this expression is
3450 two complex to handle. */
3451 if (*cval1 || *cval2)
3452 return 0;
3454 tclass = tcc_unary;
3455 *save_p = 1;
3458 switch (tclass)
3460 case tcc_unary:
3461 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3463 case tcc_binary:
3464 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3465 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3466 cval1, cval2, save_p));
3468 case tcc_constant:
3469 return 1;
3471 case tcc_expression:
3472 if (code == COND_EXPR)
3473 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3474 cval1, cval2, save_p)
3475 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3476 cval1, cval2, save_p)
3477 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3478 cval1, cval2, save_p));
3479 return 0;
3481 case tcc_comparison:
3482 /* First see if we can handle the first operand, then the second. For
3483 the second operand, we know *CVAL1 can't be zero. It must be that
3484 one side of the comparison is each of the values; test for the
3485 case where this isn't true by failing if the two operands
3486 are the same. */
3488 if (operand_equal_p (TREE_OPERAND (arg, 0),
3489 TREE_OPERAND (arg, 1), 0))
3490 return 0;
3492 if (*cval1 == 0)
3493 *cval1 = TREE_OPERAND (arg, 0);
3494 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3496 else if (*cval2 == 0)
3497 *cval2 = TREE_OPERAND (arg, 0);
3498 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3500 else
3501 return 0;
3503 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3505 else if (*cval2 == 0)
3506 *cval2 = TREE_OPERAND (arg, 1);
3507 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3509 else
3510 return 0;
3512 return 1;
3514 default:
3515 return 0;
3519 /* ARG is a tree that is known to contain just arithmetic operations and
3520 comparisons. Evaluate the operations in the tree substituting NEW0 for
3521 any occurrence of OLD0 as an operand of a comparison and likewise for
3522 NEW1 and OLD1. */
3524 static tree
3525 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3526 tree old1, tree new1)
3528 tree type = TREE_TYPE (arg);
3529 enum tree_code code = TREE_CODE (arg);
3530 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3532 /* We can handle some of the tcc_expression cases here. */
3533 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3534 tclass = tcc_unary;
3535 else if (tclass == tcc_expression
3536 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3537 tclass = tcc_binary;
3539 switch (tclass)
3541 case tcc_unary:
3542 return fold_build1_loc (loc, code, type,
3543 eval_subst (loc, TREE_OPERAND (arg, 0),
3544 old0, new0, old1, new1));
3546 case tcc_binary:
3547 return fold_build2_loc (loc, code, type,
3548 eval_subst (loc, TREE_OPERAND (arg, 0),
3549 old0, new0, old1, new1),
3550 eval_subst (loc, TREE_OPERAND (arg, 1),
3551 old0, new0, old1, new1));
3553 case tcc_expression:
3554 switch (code)
3556 case SAVE_EXPR:
3557 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3558 old1, new1);
3560 case COMPOUND_EXPR:
3561 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3562 old1, new1);
3564 case COND_EXPR:
3565 return fold_build3_loc (loc, code, type,
3566 eval_subst (loc, TREE_OPERAND (arg, 0),
3567 old0, new0, old1, new1),
3568 eval_subst (loc, TREE_OPERAND (arg, 1),
3569 old0, new0, old1, new1),
3570 eval_subst (loc, TREE_OPERAND (arg, 2),
3571 old0, new0, old1, new1));
3572 default:
3573 break;
3575 /* Fall through - ??? */
3577 case tcc_comparison:
3579 tree arg0 = TREE_OPERAND (arg, 0);
3580 tree arg1 = TREE_OPERAND (arg, 1);
3582 /* We need to check both for exact equality and tree equality. The
3583 former will be true if the operand has a side-effect. In that
3584 case, we know the operand occurred exactly once. */
3586 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3587 arg0 = new0;
3588 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3589 arg0 = new1;
3591 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3592 arg1 = new0;
3593 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3594 arg1 = new1;
3596 return fold_build2_loc (loc, code, type, arg0, arg1);
3599 default:
3600 return arg;
3604 /* Return a tree for the case when the result of an expression is RESULT
3605 converted to TYPE and OMITTED was previously an operand of the expression
3606 but is now not needed (e.g., we folded OMITTED * 0).
3608 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3609 the conversion of RESULT to TYPE. */
3611 tree
3612 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3614 tree t = fold_convert_loc (loc, type, result);
3616 /* If the resulting operand is an empty statement, just return the omitted
3617 statement casted to void. */
3618 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3619 return build1_loc (loc, NOP_EXPR, void_type_node,
3620 fold_ignored_result (omitted));
3622 if (TREE_SIDE_EFFECTS (omitted))
3623 return build2_loc (loc, COMPOUND_EXPR, type,
3624 fold_ignored_result (omitted), t);
3626 return non_lvalue_loc (loc, t);
3629 /* Return a tree for the case when the result of an expression is RESULT
3630 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3631 of the expression but are now not needed.
3633 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3634 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3635 evaluated before OMITTED2. Otherwise, if neither has side effects,
3636 just do the conversion of RESULT to TYPE. */
3638 tree
3639 omit_two_operands_loc (location_t loc, tree type, tree result,
3640 tree omitted1, tree omitted2)
3642 tree t = fold_convert_loc (loc, type, result);
3644 if (TREE_SIDE_EFFECTS (omitted2))
3645 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3646 if (TREE_SIDE_EFFECTS (omitted1))
3647 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3649 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3653 /* Return a simplified tree node for the truth-negation of ARG. This
3654 never alters ARG itself. We assume that ARG is an operation that
3655 returns a truth value (0 or 1).
3657 FIXME: one would think we would fold the result, but it causes
3658 problems with the dominator optimizer. */
3660 static tree
3661 fold_truth_not_expr (location_t loc, tree arg)
3663 tree type = TREE_TYPE (arg);
3664 enum tree_code code = TREE_CODE (arg);
3665 location_t loc1, loc2;
3667 /* If this is a comparison, we can simply invert it, except for
3668 floating-point non-equality comparisons, in which case we just
3669 enclose a TRUTH_NOT_EXPR around what we have. */
3671 if (TREE_CODE_CLASS (code) == tcc_comparison)
3673 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3674 if (FLOAT_TYPE_P (op_type)
3675 && flag_trapping_math
3676 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3677 && code != NE_EXPR && code != EQ_EXPR)
3678 return NULL_TREE;
3680 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3681 if (code == ERROR_MARK)
3682 return NULL_TREE;
3684 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3685 TREE_OPERAND (arg, 1));
3686 if (TREE_NO_WARNING (arg))
3687 TREE_NO_WARNING (ret) = 1;
3688 return ret;
3691 switch (code)
3693 case INTEGER_CST:
3694 return constant_boolean_node (integer_zerop (arg), type);
3696 case TRUTH_AND_EXPR:
3697 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3698 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3699 return build2_loc (loc, TRUTH_OR_EXPR, type,
3700 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3701 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3703 case TRUTH_OR_EXPR:
3704 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3705 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3706 return build2_loc (loc, TRUTH_AND_EXPR, type,
3707 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3708 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3710 case TRUTH_XOR_EXPR:
3711 /* Here we can invert either operand. We invert the first operand
3712 unless the second operand is a TRUTH_NOT_EXPR in which case our
3713 result is the XOR of the first operand with the inside of the
3714 negation of the second operand. */
3716 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3717 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3718 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3719 else
3720 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3721 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3722 TREE_OPERAND (arg, 1));
3724 case TRUTH_ANDIF_EXPR:
3725 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3726 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3727 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3728 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3729 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3731 case TRUTH_ORIF_EXPR:
3732 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3733 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3734 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3735 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3736 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3738 case TRUTH_NOT_EXPR:
3739 return TREE_OPERAND (arg, 0);
3741 case COND_EXPR:
3743 tree arg1 = TREE_OPERAND (arg, 1);
3744 tree arg2 = TREE_OPERAND (arg, 2);
3746 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3747 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3749 /* A COND_EXPR may have a throw as one operand, which
3750 then has void type. Just leave void operands
3751 as they are. */
3752 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3753 VOID_TYPE_P (TREE_TYPE (arg1))
3754 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3755 VOID_TYPE_P (TREE_TYPE (arg2))
3756 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3759 case COMPOUND_EXPR:
3760 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3761 return build2_loc (loc, COMPOUND_EXPR, type,
3762 TREE_OPERAND (arg, 0),
3763 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3765 case NON_LVALUE_EXPR:
3766 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3767 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3769 CASE_CONVERT:
3770 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3771 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3773 /* fall through */
3775 case FLOAT_EXPR:
3776 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3777 return build1_loc (loc, TREE_CODE (arg), type,
3778 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3780 case BIT_AND_EXPR:
3781 if (!integer_onep (TREE_OPERAND (arg, 1)))
3782 return NULL_TREE;
3783 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3785 case SAVE_EXPR:
3786 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3788 case CLEANUP_POINT_EXPR:
3789 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3790 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3791 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3793 default:
3794 return NULL_TREE;
3798 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3799 assume that ARG is an operation that returns a truth value (0 or 1
3800 for scalars, 0 or -1 for vectors). Return the folded expression if
3801 folding is successful. Otherwise, return NULL_TREE. */
3803 static tree
3804 fold_invert_truthvalue (location_t loc, tree arg)
3806 tree type = TREE_TYPE (arg);
3807 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3808 ? BIT_NOT_EXPR
3809 : TRUTH_NOT_EXPR,
3810 type, arg);
3813 /* Return a simplified tree node for the truth-negation of ARG. This
3814 never alters ARG itself. We assume that ARG is an operation that
3815 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3817 tree
3818 invert_truthvalue_loc (location_t loc, tree arg)
3820 if (TREE_CODE (arg) == ERROR_MARK)
3821 return arg;
3823 tree type = TREE_TYPE (arg);
3824 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3825 ? BIT_NOT_EXPR
3826 : TRUTH_NOT_EXPR,
3827 type, arg);
3830 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3831 with code CODE. This optimization is unsafe. */
3832 static tree
3833 distribute_real_division (location_t loc, enum tree_code code, tree type,
3834 tree arg0, tree arg1)
3836 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3837 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3839 /* (A / C) +- (B / C) -> (A +- B) / C. */
3840 if (mul0 == mul1
3841 && operand_equal_p (TREE_OPERAND (arg0, 1),
3842 TREE_OPERAND (arg1, 1), 0))
3843 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3844 fold_build2_loc (loc, code, type,
3845 TREE_OPERAND (arg0, 0),
3846 TREE_OPERAND (arg1, 0)),
3847 TREE_OPERAND (arg0, 1));
3849 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3850 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3851 TREE_OPERAND (arg1, 0), 0)
3852 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3853 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3855 REAL_VALUE_TYPE r0, r1;
3856 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3857 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3858 if (!mul0)
3859 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3860 if (!mul1)
3861 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3862 real_arithmetic (&r0, code, &r0, &r1);
3863 return fold_build2_loc (loc, MULT_EXPR, type,
3864 TREE_OPERAND (arg0, 0),
3865 build_real (type, r0));
3868 return NULL_TREE;
3871 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3872 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3873 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3874 is the original memory reference used to preserve the alias set of
3875 the access. */
3877 static tree
3878 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3879 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3880 int unsignedp, int reversep)
3882 tree result, bftype;
3884 /* Attempt not to lose the access path if possible. */
3885 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3887 tree ninner = TREE_OPERAND (orig_inner, 0);
3888 machine_mode nmode;
3889 HOST_WIDE_INT nbitsize, nbitpos;
3890 tree noffset;
3891 int nunsignedp, nreversep, nvolatilep = 0;
3892 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3893 &noffset, &nmode, &nunsignedp,
3894 &nreversep, &nvolatilep);
3895 if (base == inner
3896 && noffset == NULL_TREE
3897 && nbitsize >= bitsize
3898 && nbitpos <= bitpos
3899 && bitpos + bitsize <= nbitpos + nbitsize
3900 && !reversep
3901 && !nreversep
3902 && !nvolatilep)
3904 inner = ninner;
3905 bitpos -= nbitpos;
3909 alias_set_type iset = get_alias_set (orig_inner);
3910 if (iset == 0 && get_alias_set (inner) != iset)
3911 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3912 build_fold_addr_expr (inner),
3913 build_int_cst (ptr_type_node, 0));
3915 if (bitpos == 0 && !reversep)
3917 tree size = TYPE_SIZE (TREE_TYPE (inner));
3918 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3919 || POINTER_TYPE_P (TREE_TYPE (inner)))
3920 && tree_fits_shwi_p (size)
3921 && tree_to_shwi (size) == bitsize)
3922 return fold_convert_loc (loc, type, inner);
3925 bftype = type;
3926 if (TYPE_PRECISION (bftype) != bitsize
3927 || TYPE_UNSIGNED (bftype) == !unsignedp)
3928 bftype = build_nonstandard_integer_type (bitsize, 0);
3930 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3931 size_int (bitsize), bitsize_int (bitpos));
3932 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3934 if (bftype != type)
3935 result = fold_convert_loc (loc, type, result);
3937 return result;
3940 /* Optimize a bit-field compare.
3942 There are two cases: First is a compare against a constant and the
3943 second is a comparison of two items where the fields are at the same
3944 bit position relative to the start of a chunk (byte, halfword, word)
3945 large enough to contain it. In these cases we can avoid the shift
3946 implicit in bitfield extractions.
3948 For constants, we emit a compare of the shifted constant with the
3949 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3950 compared. For two fields at the same position, we do the ANDs with the
3951 similar mask and compare the result of the ANDs.
3953 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3954 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3955 are the left and right operands of the comparison, respectively.
3957 If the optimization described above can be done, we return the resulting
3958 tree. Otherwise we return zero. */
3960 static tree
3961 optimize_bit_field_compare (location_t loc, enum tree_code code,
3962 tree compare_type, tree lhs, tree rhs)
3964 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3965 tree type = TREE_TYPE (lhs);
3966 tree unsigned_type;
3967 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3968 machine_mode lmode, rmode, nmode;
3969 int lunsignedp, runsignedp;
3970 int lreversep, rreversep;
3971 int lvolatilep = 0, rvolatilep = 0;
3972 tree linner, rinner = NULL_TREE;
3973 tree mask;
3974 tree offset;
3976 /* Get all the information about the extractions being done. If the bit size
3977 if the same as the size of the underlying object, we aren't doing an
3978 extraction at all and so can do nothing. We also don't want to
3979 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3980 then will no longer be able to replace it. */
3981 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3982 &lunsignedp, &lreversep, &lvolatilep);
3983 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3984 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3985 return 0;
3987 if (const_p)
3988 rreversep = lreversep;
3989 else
3991 /* If this is not a constant, we can only do something if bit positions,
3992 sizes, signedness and storage order are the same. */
3993 rinner
3994 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3995 &runsignedp, &rreversep, &rvolatilep);
3997 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3998 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3999 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
4000 return 0;
4003 /* Honor the C++ memory model and mimic what RTL expansion does. */
4004 unsigned HOST_WIDE_INT bitstart = 0;
4005 unsigned HOST_WIDE_INT bitend = 0;
4006 if (TREE_CODE (lhs) == COMPONENT_REF)
4008 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
4009 if (offset != NULL_TREE)
4010 return 0;
4013 /* See if we can find a mode to refer to this field. We should be able to,
4014 but fail if we can't. */
4015 nmode = get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4016 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4017 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4018 TYPE_ALIGN (TREE_TYPE (rinner))),
4019 word_mode, false);
4020 if (nmode == VOIDmode)
4021 return 0;
4023 /* Set signed and unsigned types of the precision of this mode for the
4024 shifts below. */
4025 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4027 /* Compute the bit position and size for the new reference and our offset
4028 within it. If the new reference is the same size as the original, we
4029 won't optimize anything, so return zero. */
4030 nbitsize = GET_MODE_BITSIZE (nmode);
4031 nbitpos = lbitpos & ~ (nbitsize - 1);
4032 lbitpos -= nbitpos;
4033 if (nbitsize == lbitsize)
4034 return 0;
4036 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4037 lbitpos = nbitsize - lbitsize - lbitpos;
4039 /* Make the mask to be used against the extracted field. */
4040 mask = build_int_cst_type (unsigned_type, -1);
4041 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4042 mask = const_binop (RSHIFT_EXPR, mask,
4043 size_int (nbitsize - lbitsize - lbitpos));
4045 if (! const_p)
4046 /* If not comparing with constant, just rework the comparison
4047 and return. */
4048 return fold_build2_loc (loc, code, compare_type,
4049 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4050 make_bit_field_ref (loc, linner, lhs,
4051 unsigned_type,
4052 nbitsize, nbitpos,
4053 1, lreversep),
4054 mask),
4055 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4056 make_bit_field_ref (loc, rinner, rhs,
4057 unsigned_type,
4058 nbitsize, nbitpos,
4059 1, rreversep),
4060 mask));
4062 /* Otherwise, we are handling the constant case. See if the constant is too
4063 big for the field. Warn and return a tree for 0 (false) if so. We do
4064 this not only for its own sake, but to avoid having to test for this
4065 error case below. If we didn't, we might generate wrong code.
4067 For unsigned fields, the constant shifted right by the field length should
4068 be all zero. For signed fields, the high-order bits should agree with
4069 the sign bit. */
4071 if (lunsignedp)
4073 if (wi::lrshift (rhs, lbitsize) != 0)
4075 warning (0, "comparison is always %d due to width of bit-field",
4076 code == NE_EXPR);
4077 return constant_boolean_node (code == NE_EXPR, compare_type);
4080 else
4082 wide_int tem = wi::arshift (rhs, lbitsize - 1);
4083 if (tem != 0 && tem != -1)
4085 warning (0, "comparison is always %d due to width of bit-field",
4086 code == NE_EXPR);
4087 return constant_boolean_node (code == NE_EXPR, compare_type);
4091 /* Single-bit compares should always be against zero. */
4092 if (lbitsize == 1 && ! integer_zerop (rhs))
4094 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4095 rhs = build_int_cst (type, 0);
4098 /* Make a new bitfield reference, shift the constant over the
4099 appropriate number of bits and mask it with the computed mask
4100 (in case this was a signed field). If we changed it, make a new one. */
4101 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4102 nbitsize, nbitpos, 1, lreversep);
4104 rhs = const_binop (BIT_AND_EXPR,
4105 const_binop (LSHIFT_EXPR,
4106 fold_convert_loc (loc, unsigned_type, rhs),
4107 size_int (lbitpos)),
4108 mask);
4110 lhs = build2_loc (loc, code, compare_type,
4111 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4112 return lhs;
4115 /* Subroutine for fold_truth_andor_1: decode a field reference.
4117 If EXP is a comparison reference, we return the innermost reference.
4119 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4120 set to the starting bit number.
4122 If the innermost field can be completely contained in a mode-sized
4123 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4125 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4126 otherwise it is not changed.
4128 *PUNSIGNEDP is set to the signedness of the field.
4130 *PREVERSEP is set to the storage order of the field.
4132 *PMASK is set to the mask used. This is either contained in a
4133 BIT_AND_EXPR or derived from the width of the field.
4135 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4137 Return 0 if this is not a component reference or is one that we can't
4138 do anything with. */
4140 static tree
4141 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4142 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4143 int *punsignedp, int *preversep, int *pvolatilep,
4144 tree *pmask, tree *pand_mask)
4146 tree exp = *exp_;
4147 tree outer_type = 0;
4148 tree and_mask = 0;
4149 tree mask, inner, offset;
4150 tree unsigned_type;
4151 unsigned int precision;
4153 /* All the optimizations using this function assume integer fields.
4154 There are problems with FP fields since the type_for_size call
4155 below can fail for, e.g., XFmode. */
4156 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4157 return 0;
4159 /* We are interested in the bare arrangement of bits, so strip everything
4160 that doesn't affect the machine mode. However, record the type of the
4161 outermost expression if it may matter below. */
4162 if (CONVERT_EXPR_P (exp)
4163 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4164 outer_type = TREE_TYPE (exp);
4165 STRIP_NOPS (exp);
4167 if (TREE_CODE (exp) == BIT_AND_EXPR)
4169 and_mask = TREE_OPERAND (exp, 1);
4170 exp = TREE_OPERAND (exp, 0);
4171 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4172 if (TREE_CODE (and_mask) != INTEGER_CST)
4173 return 0;
4176 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4177 punsignedp, preversep, pvolatilep);
4178 if ((inner == exp && and_mask == 0)
4179 || *pbitsize < 0 || offset != 0
4180 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4181 /* Reject out-of-bound accesses (PR79731). */
4182 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4183 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4184 *pbitpos + *pbitsize) < 0))
4185 return 0;
4187 *exp_ = exp;
4189 /* If the number of bits in the reference is the same as the bitsize of
4190 the outer type, then the outer type gives the signedness. Otherwise
4191 (in case of a small bitfield) the signedness is unchanged. */
4192 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4193 *punsignedp = TYPE_UNSIGNED (outer_type);
4195 /* Compute the mask to access the bitfield. */
4196 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4197 precision = TYPE_PRECISION (unsigned_type);
4199 mask = build_int_cst_type (unsigned_type, -1);
4201 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4202 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4204 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4205 if (and_mask != 0)
4206 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4207 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4209 *pmask = mask;
4210 *pand_mask = and_mask;
4211 return inner;
4214 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4215 bit positions and MASK is SIGNED. */
4217 static int
4218 all_ones_mask_p (const_tree mask, unsigned int size)
4220 tree type = TREE_TYPE (mask);
4221 unsigned int precision = TYPE_PRECISION (type);
4223 /* If this function returns true when the type of the mask is
4224 UNSIGNED, then there will be errors. In particular see
4225 gcc.c-torture/execute/990326-1.c. There does not appear to be
4226 any documentation paper trail as to why this is so. But the pre
4227 wide-int worked with that restriction and it has been preserved
4228 here. */
4229 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4230 return false;
4232 return wi::mask (size, false, precision) == mask;
4235 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4236 represents the sign bit of EXP's type. If EXP represents a sign
4237 or zero extension, also test VAL against the unextended type.
4238 The return value is the (sub)expression whose sign bit is VAL,
4239 or NULL_TREE otherwise. */
4241 tree
4242 sign_bit_p (tree exp, const_tree val)
4244 int width;
4245 tree t;
4247 /* Tree EXP must have an integral type. */
4248 t = TREE_TYPE (exp);
4249 if (! INTEGRAL_TYPE_P (t))
4250 return NULL_TREE;
4252 /* Tree VAL must be an integer constant. */
4253 if (TREE_CODE (val) != INTEGER_CST
4254 || TREE_OVERFLOW (val))
4255 return NULL_TREE;
4257 width = TYPE_PRECISION (t);
4258 if (wi::only_sign_bit_p (val, width))
4259 return exp;
4261 /* Handle extension from a narrower type. */
4262 if (TREE_CODE (exp) == NOP_EXPR
4263 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4264 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4266 return NULL_TREE;
4269 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4270 to be evaluated unconditionally. */
4272 static int
4273 simple_operand_p (const_tree exp)
4275 /* Strip any conversions that don't change the machine mode. */
4276 STRIP_NOPS (exp);
4278 return (CONSTANT_CLASS_P (exp)
4279 || TREE_CODE (exp) == SSA_NAME
4280 || (DECL_P (exp)
4281 && ! TREE_ADDRESSABLE (exp)
4282 && ! TREE_THIS_VOLATILE (exp)
4283 && ! DECL_NONLOCAL (exp)
4284 /* Don't regard global variables as simple. They may be
4285 allocated in ways unknown to the compiler (shared memory,
4286 #pragma weak, etc). */
4287 && ! TREE_PUBLIC (exp)
4288 && ! DECL_EXTERNAL (exp)
4289 /* Weakrefs are not safe to be read, since they can be NULL.
4290 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4291 have DECL_WEAK flag set. */
4292 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4293 /* Loading a static variable is unduly expensive, but global
4294 registers aren't expensive. */
4295 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4298 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4299 to be evaluated unconditionally.
4300 I addition to simple_operand_p, we assume that comparisons, conversions,
4301 and logic-not operations are simple, if their operands are simple, too. */
4303 static bool
4304 simple_operand_p_2 (tree exp)
4306 enum tree_code code;
4308 if (TREE_SIDE_EFFECTS (exp)
4309 || tree_could_trap_p (exp))
4310 return false;
4312 while (CONVERT_EXPR_P (exp))
4313 exp = TREE_OPERAND (exp, 0);
4315 code = TREE_CODE (exp);
4317 if (TREE_CODE_CLASS (code) == tcc_comparison)
4318 return (simple_operand_p (TREE_OPERAND (exp, 0))
4319 && simple_operand_p (TREE_OPERAND (exp, 1)));
4321 if (code == TRUTH_NOT_EXPR)
4322 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4324 return simple_operand_p (exp);
4328 /* The following functions are subroutines to fold_range_test and allow it to
4329 try to change a logical combination of comparisons into a range test.
4331 For example, both
4332 X == 2 || X == 3 || X == 4 || X == 5
4334 X >= 2 && X <= 5
4335 are converted to
4336 (unsigned) (X - 2) <= 3
4338 We describe each set of comparisons as being either inside or outside
4339 a range, using a variable named like IN_P, and then describe the
4340 range with a lower and upper bound. If one of the bounds is omitted,
4341 it represents either the highest or lowest value of the type.
4343 In the comments below, we represent a range by two numbers in brackets
4344 preceded by a "+" to designate being inside that range, or a "-" to
4345 designate being outside that range, so the condition can be inverted by
4346 flipping the prefix. An omitted bound is represented by a "-". For
4347 example, "- [-, 10]" means being outside the range starting at the lowest
4348 possible value and ending at 10, in other words, being greater than 10.
4349 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4350 always false.
4352 We set up things so that the missing bounds are handled in a consistent
4353 manner so neither a missing bound nor "true" and "false" need to be
4354 handled using a special case. */
4356 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4357 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4358 and UPPER1_P are nonzero if the respective argument is an upper bound
4359 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4360 must be specified for a comparison. ARG1 will be converted to ARG0's
4361 type if both are specified. */
4363 static tree
4364 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4365 tree arg1, int upper1_p)
4367 tree tem;
4368 int result;
4369 int sgn0, sgn1;
4371 /* If neither arg represents infinity, do the normal operation.
4372 Else, if not a comparison, return infinity. Else handle the special
4373 comparison rules. Note that most of the cases below won't occur, but
4374 are handled for consistency. */
4376 if (arg0 != 0 && arg1 != 0)
4378 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4379 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4380 STRIP_NOPS (tem);
4381 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4384 if (TREE_CODE_CLASS (code) != tcc_comparison)
4385 return 0;
4387 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4388 for neither. In real maths, we cannot assume open ended ranges are
4389 the same. But, this is computer arithmetic, where numbers are finite.
4390 We can therefore make the transformation of any unbounded range with
4391 the value Z, Z being greater than any representable number. This permits
4392 us to treat unbounded ranges as equal. */
4393 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4394 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4395 switch (code)
4397 case EQ_EXPR:
4398 result = sgn0 == sgn1;
4399 break;
4400 case NE_EXPR:
4401 result = sgn0 != sgn1;
4402 break;
4403 case LT_EXPR:
4404 result = sgn0 < sgn1;
4405 break;
4406 case LE_EXPR:
4407 result = sgn0 <= sgn1;
4408 break;
4409 case GT_EXPR:
4410 result = sgn0 > sgn1;
4411 break;
4412 case GE_EXPR:
4413 result = sgn0 >= sgn1;
4414 break;
4415 default:
4416 gcc_unreachable ();
4419 return constant_boolean_node (result, type);
4422 /* Helper routine for make_range. Perform one step for it, return
4423 new expression if the loop should continue or NULL_TREE if it should
4424 stop. */
4426 tree
4427 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4428 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4429 bool *strict_overflow_p)
4431 tree arg0_type = TREE_TYPE (arg0);
4432 tree n_low, n_high, low = *p_low, high = *p_high;
4433 int in_p = *p_in_p, n_in_p;
4435 switch (code)
4437 case TRUTH_NOT_EXPR:
4438 /* We can only do something if the range is testing for zero. */
4439 if (low == NULL_TREE || high == NULL_TREE
4440 || ! integer_zerop (low) || ! integer_zerop (high))
4441 return NULL_TREE;
4442 *p_in_p = ! in_p;
4443 return arg0;
4445 case EQ_EXPR: case NE_EXPR:
4446 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4447 /* We can only do something if the range is testing for zero
4448 and if the second operand is an integer constant. Note that
4449 saying something is "in" the range we make is done by
4450 complementing IN_P since it will set in the initial case of
4451 being not equal to zero; "out" is leaving it alone. */
4452 if (low == NULL_TREE || high == NULL_TREE
4453 || ! integer_zerop (low) || ! integer_zerop (high)
4454 || TREE_CODE (arg1) != INTEGER_CST)
4455 return NULL_TREE;
4457 switch (code)
4459 case NE_EXPR: /* - [c, c] */
4460 low = high = arg1;
4461 break;
4462 case EQ_EXPR: /* + [c, c] */
4463 in_p = ! in_p, low = high = arg1;
4464 break;
4465 case GT_EXPR: /* - [-, c] */
4466 low = 0, high = arg1;
4467 break;
4468 case GE_EXPR: /* + [c, -] */
4469 in_p = ! in_p, low = arg1, high = 0;
4470 break;
4471 case LT_EXPR: /* - [c, -] */
4472 low = arg1, high = 0;
4473 break;
4474 case LE_EXPR: /* + [-, c] */
4475 in_p = ! in_p, low = 0, high = arg1;
4476 break;
4477 default:
4478 gcc_unreachable ();
4481 /* If this is an unsigned comparison, we also know that EXP is
4482 greater than or equal to zero. We base the range tests we make
4483 on that fact, so we record it here so we can parse existing
4484 range tests. We test arg0_type since often the return type
4485 of, e.g. EQ_EXPR, is boolean. */
4486 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4488 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4489 in_p, low, high, 1,
4490 build_int_cst (arg0_type, 0),
4491 NULL_TREE))
4492 return NULL_TREE;
4494 in_p = n_in_p, low = n_low, high = n_high;
4496 /* If the high bound is missing, but we have a nonzero low
4497 bound, reverse the range so it goes from zero to the low bound
4498 minus 1. */
4499 if (high == 0 && low && ! integer_zerop (low))
4501 in_p = ! in_p;
4502 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4503 build_int_cst (TREE_TYPE (low), 1), 0);
4504 low = build_int_cst (arg0_type, 0);
4508 *p_low = low;
4509 *p_high = high;
4510 *p_in_p = in_p;
4511 return arg0;
4513 case NEGATE_EXPR:
4514 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4515 low and high are non-NULL, then normalize will DTRT. */
4516 if (!TYPE_UNSIGNED (arg0_type)
4517 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4519 if (low == NULL_TREE)
4520 low = TYPE_MIN_VALUE (arg0_type);
4521 if (high == NULL_TREE)
4522 high = TYPE_MAX_VALUE (arg0_type);
4525 /* (-x) IN [a,b] -> x in [-b, -a] */
4526 n_low = range_binop (MINUS_EXPR, exp_type,
4527 build_int_cst (exp_type, 0),
4528 0, high, 1);
4529 n_high = range_binop (MINUS_EXPR, exp_type,
4530 build_int_cst (exp_type, 0),
4531 0, low, 0);
4532 if (n_high != 0 && TREE_OVERFLOW (n_high))
4533 return NULL_TREE;
4534 goto normalize;
4536 case BIT_NOT_EXPR:
4537 /* ~ X -> -X - 1 */
4538 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4539 build_int_cst (exp_type, 1));
4541 case PLUS_EXPR:
4542 case MINUS_EXPR:
4543 if (TREE_CODE (arg1) != INTEGER_CST)
4544 return NULL_TREE;
4546 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4547 move a constant to the other side. */
4548 if (!TYPE_UNSIGNED (arg0_type)
4549 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4550 return NULL_TREE;
4552 /* If EXP is signed, any overflow in the computation is undefined,
4553 so we don't worry about it so long as our computations on
4554 the bounds don't overflow. For unsigned, overflow is defined
4555 and this is exactly the right thing. */
4556 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4557 arg0_type, low, 0, arg1, 0);
4558 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4559 arg0_type, high, 1, arg1, 0);
4560 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4561 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4562 return NULL_TREE;
4564 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4565 *strict_overflow_p = true;
4567 normalize:
4568 /* Check for an unsigned range which has wrapped around the maximum
4569 value thus making n_high < n_low, and normalize it. */
4570 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4572 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4573 build_int_cst (TREE_TYPE (n_high), 1), 0);
4574 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4575 build_int_cst (TREE_TYPE (n_low), 1), 0);
4577 /* If the range is of the form +/- [ x+1, x ], we won't
4578 be able to normalize it. But then, it represents the
4579 whole range or the empty set, so make it
4580 +/- [ -, - ]. */
4581 if (tree_int_cst_equal (n_low, low)
4582 && tree_int_cst_equal (n_high, high))
4583 low = high = 0;
4584 else
4585 in_p = ! in_p;
4587 else
4588 low = n_low, high = n_high;
4590 *p_low = low;
4591 *p_high = high;
4592 *p_in_p = in_p;
4593 return arg0;
4595 CASE_CONVERT:
4596 case NON_LVALUE_EXPR:
4597 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4598 return NULL_TREE;
4600 if (! INTEGRAL_TYPE_P (arg0_type)
4601 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4602 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4603 return NULL_TREE;
4605 n_low = low, n_high = high;
4607 if (n_low != 0)
4608 n_low = fold_convert_loc (loc, arg0_type, n_low);
4610 if (n_high != 0)
4611 n_high = fold_convert_loc (loc, arg0_type, n_high);
4613 /* If we're converting arg0 from an unsigned type, to exp,
4614 a signed type, we will be doing the comparison as unsigned.
4615 The tests above have already verified that LOW and HIGH
4616 are both positive.
4618 So we have to ensure that we will handle large unsigned
4619 values the same way that the current signed bounds treat
4620 negative values. */
4622 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4624 tree high_positive;
4625 tree equiv_type;
4626 /* For fixed-point modes, we need to pass the saturating flag
4627 as the 2nd parameter. */
4628 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4629 equiv_type
4630 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4631 TYPE_SATURATING (arg0_type));
4632 else
4633 equiv_type
4634 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4636 /* A range without an upper bound is, naturally, unbounded.
4637 Since convert would have cropped a very large value, use
4638 the max value for the destination type. */
4639 high_positive
4640 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4641 : TYPE_MAX_VALUE (arg0_type);
4643 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4644 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4645 fold_convert_loc (loc, arg0_type,
4646 high_positive),
4647 build_int_cst (arg0_type, 1));
4649 /* If the low bound is specified, "and" the range with the
4650 range for which the original unsigned value will be
4651 positive. */
4652 if (low != 0)
4654 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4655 1, fold_convert_loc (loc, arg0_type,
4656 integer_zero_node),
4657 high_positive))
4658 return NULL_TREE;
4660 in_p = (n_in_p == in_p);
4662 else
4664 /* Otherwise, "or" the range with the range of the input
4665 that will be interpreted as negative. */
4666 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4667 1, fold_convert_loc (loc, arg0_type,
4668 integer_zero_node),
4669 high_positive))
4670 return NULL_TREE;
4672 in_p = (in_p != n_in_p);
4676 *p_low = n_low;
4677 *p_high = n_high;
4678 *p_in_p = in_p;
4679 return arg0;
4681 default:
4682 return NULL_TREE;
4686 /* Given EXP, a logical expression, set the range it is testing into
4687 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4688 actually being tested. *PLOW and *PHIGH will be made of the same
4689 type as the returned expression. If EXP is not a comparison, we
4690 will most likely not be returning a useful value and range. Set
4691 *STRICT_OVERFLOW_P to true if the return value is only valid
4692 because signed overflow is undefined; otherwise, do not change
4693 *STRICT_OVERFLOW_P. */
4695 tree
4696 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4697 bool *strict_overflow_p)
4699 enum tree_code code;
4700 tree arg0, arg1 = NULL_TREE;
4701 tree exp_type, nexp;
4702 int in_p;
4703 tree low, high;
4704 location_t loc = EXPR_LOCATION (exp);
4706 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4707 and see if we can refine the range. Some of the cases below may not
4708 happen, but it doesn't seem worth worrying about this. We "continue"
4709 the outer loop when we've changed something; otherwise we "break"
4710 the switch, which will "break" the while. */
4712 in_p = 0;
4713 low = high = build_int_cst (TREE_TYPE (exp), 0);
4715 while (1)
4717 code = TREE_CODE (exp);
4718 exp_type = TREE_TYPE (exp);
4719 arg0 = NULL_TREE;
4721 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4723 if (TREE_OPERAND_LENGTH (exp) > 0)
4724 arg0 = TREE_OPERAND (exp, 0);
4725 if (TREE_CODE_CLASS (code) == tcc_binary
4726 || TREE_CODE_CLASS (code) == tcc_comparison
4727 || (TREE_CODE_CLASS (code) == tcc_expression
4728 && TREE_OPERAND_LENGTH (exp) > 1))
4729 arg1 = TREE_OPERAND (exp, 1);
4731 if (arg0 == NULL_TREE)
4732 break;
4734 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4735 &high, &in_p, strict_overflow_p);
4736 if (nexp == NULL_TREE)
4737 break;
4738 exp = nexp;
4741 /* If EXP is a constant, we can evaluate whether this is true or false. */
4742 if (TREE_CODE (exp) == INTEGER_CST)
4744 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4745 exp, 0, low, 0))
4746 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4747 exp, 1, high, 1)));
4748 low = high = 0;
4749 exp = 0;
4752 *pin_p = in_p, *plow = low, *phigh = high;
4753 return exp;
4756 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4757 a bitwise check i.e. when
4758 LOW == 0xXX...X00...0
4759 HIGH == 0xXX...X11...1
4760 Return corresponding mask in MASK and stem in VALUE. */
4762 static bool
4763 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4764 tree *value)
4766 if (TREE_CODE (low) != INTEGER_CST
4767 || TREE_CODE (high) != INTEGER_CST)
4768 return false;
4770 unsigned prec = TYPE_PRECISION (type);
4771 wide_int lo = wi::to_wide (low, prec);
4772 wide_int hi = wi::to_wide (high, prec);
4774 wide_int end_mask = lo ^ hi;
4775 if ((end_mask & (end_mask + 1)) != 0
4776 || (lo & end_mask) != 0)
4777 return false;
4779 wide_int stem_mask = ~end_mask;
4780 wide_int stem = lo & stem_mask;
4781 if (stem != (hi & stem_mask))
4782 return false;
4784 *mask = wide_int_to_tree (type, stem_mask);
4785 *value = wide_int_to_tree (type, stem);
4787 return true;
4790 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4791 type, TYPE, return an expression to test if EXP is in (or out of, depending
4792 on IN_P) the range. Return 0 if the test couldn't be created. */
4794 tree
4795 build_range_check (location_t loc, tree type, tree exp, int in_p,
4796 tree low, tree high)
4798 tree etype = TREE_TYPE (exp), mask, value;
4800 /* Disable this optimization for function pointer expressions
4801 on targets that require function pointer canonicalization. */
4802 if (targetm.have_canonicalize_funcptr_for_compare ()
4803 && TREE_CODE (etype) == POINTER_TYPE
4804 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4805 return NULL_TREE;
4807 if (! in_p)
4809 value = build_range_check (loc, type, exp, 1, low, high);
4810 if (value != 0)
4811 return invert_truthvalue_loc (loc, value);
4813 return 0;
4816 if (low == 0 && high == 0)
4817 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4819 if (low == 0)
4820 return fold_build2_loc (loc, LE_EXPR, type, exp,
4821 fold_convert_loc (loc, etype, high));
4823 if (high == 0)
4824 return fold_build2_loc (loc, GE_EXPR, type, exp,
4825 fold_convert_loc (loc, etype, low));
4827 if (operand_equal_p (low, high, 0))
4828 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4829 fold_convert_loc (loc, etype, low));
4831 if (TREE_CODE (exp) == BIT_AND_EXPR
4832 && maskable_range_p (low, high, etype, &mask, &value))
4833 return fold_build2_loc (loc, EQ_EXPR, type,
4834 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4835 exp, mask),
4836 value);
4838 if (integer_zerop (low))
4840 if (! TYPE_UNSIGNED (etype))
4842 etype = unsigned_type_for (etype);
4843 high = fold_convert_loc (loc, etype, high);
4844 exp = fold_convert_loc (loc, etype, exp);
4846 return build_range_check (loc, type, exp, 1, 0, high);
4849 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4850 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4852 int prec = TYPE_PRECISION (etype);
4854 if (wi::mask (prec - 1, false, prec) == high)
4856 if (TYPE_UNSIGNED (etype))
4858 tree signed_etype = signed_type_for (etype);
4859 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4860 etype
4861 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4862 else
4863 etype = signed_etype;
4864 exp = fold_convert_loc (loc, etype, exp);
4866 return fold_build2_loc (loc, GT_EXPR, type, exp,
4867 build_int_cst (etype, 0));
4871 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4872 This requires wrap-around arithmetics for the type of the expression.
4873 First make sure that arithmetics in this type is valid, then make sure
4874 that it wraps around. */
4875 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4876 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4877 TYPE_UNSIGNED (etype));
4879 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4881 tree utype, minv, maxv;
4883 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4884 for the type in question, as we rely on this here. */
4885 utype = unsigned_type_for (etype);
4886 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4887 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4888 build_int_cst (TREE_TYPE (maxv), 1), 1);
4889 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4891 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4892 minv, 1, maxv, 1)))
4893 etype = utype;
4894 else
4895 return 0;
4898 high = fold_convert_loc (loc, etype, high);
4899 low = fold_convert_loc (loc, etype, low);
4900 exp = fold_convert_loc (loc, etype, exp);
4902 value = const_binop (MINUS_EXPR, high, low);
4905 if (POINTER_TYPE_P (etype))
4907 if (value != 0 && !TREE_OVERFLOW (value))
4909 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4910 return build_range_check (loc, type,
4911 fold_build_pointer_plus_loc (loc, exp, low),
4912 1, build_int_cst (etype, 0), value);
4914 return 0;
4917 if (value != 0 && !TREE_OVERFLOW (value))
4918 return build_range_check (loc, type,
4919 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4920 1, build_int_cst (etype, 0), value);
4922 return 0;
4925 /* Return the predecessor of VAL in its type, handling the infinite case. */
4927 static tree
4928 range_predecessor (tree val)
4930 tree type = TREE_TYPE (val);
4932 if (INTEGRAL_TYPE_P (type)
4933 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4934 return 0;
4935 else
4936 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4937 build_int_cst (TREE_TYPE (val), 1), 0);
4940 /* Return the successor of VAL in its type, handling the infinite case. */
4942 static tree
4943 range_successor (tree val)
4945 tree type = TREE_TYPE (val);
4947 if (INTEGRAL_TYPE_P (type)
4948 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4949 return 0;
4950 else
4951 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4952 build_int_cst (TREE_TYPE (val), 1), 0);
4955 /* Given two ranges, see if we can merge them into one. Return 1 if we
4956 can, 0 if we can't. Set the output range into the specified parameters. */
4958 bool
4959 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4960 tree high0, int in1_p, tree low1, tree high1)
4962 int no_overlap;
4963 int subset;
4964 int temp;
4965 tree tem;
4966 int in_p;
4967 tree low, high;
4968 int lowequal = ((low0 == 0 && low1 == 0)
4969 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4970 low0, 0, low1, 0)));
4971 int highequal = ((high0 == 0 && high1 == 0)
4972 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4973 high0, 1, high1, 1)));
4975 /* Make range 0 be the range that starts first, or ends last if they
4976 start at the same value. Swap them if it isn't. */
4977 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4978 low0, 0, low1, 0))
4979 || (lowequal
4980 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4981 high1, 1, high0, 1))))
4983 temp = in0_p, in0_p = in1_p, in1_p = temp;
4984 tem = low0, low0 = low1, low1 = tem;
4985 tem = high0, high0 = high1, high1 = tem;
4988 /* Now flag two cases, whether the ranges are disjoint or whether the
4989 second range is totally subsumed in the first. Note that the tests
4990 below are simplified by the ones above. */
4991 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4992 high0, 1, low1, 0));
4993 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4994 high1, 1, high0, 1));
4996 /* We now have four cases, depending on whether we are including or
4997 excluding the two ranges. */
4998 if (in0_p && in1_p)
5000 /* If they don't overlap, the result is false. If the second range
5001 is a subset it is the result. Otherwise, the range is from the start
5002 of the second to the end of the first. */
5003 if (no_overlap)
5004 in_p = 0, low = high = 0;
5005 else if (subset)
5006 in_p = 1, low = low1, high = high1;
5007 else
5008 in_p = 1, low = low1, high = high0;
5011 else if (in0_p && ! in1_p)
5013 /* If they don't overlap, the result is the first range. If they are
5014 equal, the result is false. If the second range is a subset of the
5015 first, and the ranges begin at the same place, we go from just after
5016 the end of the second range to the end of the first. If the second
5017 range is not a subset of the first, or if it is a subset and both
5018 ranges end at the same place, the range starts at the start of the
5019 first range and ends just before the second range.
5020 Otherwise, we can't describe this as a single range. */
5021 if (no_overlap)
5022 in_p = 1, low = low0, high = high0;
5023 else if (lowequal && highequal)
5024 in_p = 0, low = high = 0;
5025 else if (subset && lowequal)
5027 low = range_successor (high1);
5028 high = high0;
5029 in_p = 1;
5030 if (low == 0)
5032 /* We are in the weird situation where high0 > high1 but
5033 high1 has no successor. Punt. */
5034 return 0;
5037 else if (! subset || highequal)
5039 low = low0;
5040 high = range_predecessor (low1);
5041 in_p = 1;
5042 if (high == 0)
5044 /* low0 < low1 but low1 has no predecessor. Punt. */
5045 return 0;
5048 else
5049 return 0;
5052 else if (! in0_p && in1_p)
5054 /* If they don't overlap, the result is the second range. If the second
5055 is a subset of the first, the result is false. Otherwise,
5056 the range starts just after the first range and ends at the
5057 end of the second. */
5058 if (no_overlap)
5059 in_p = 1, low = low1, high = high1;
5060 else if (subset || highequal)
5061 in_p = 0, low = high = 0;
5062 else
5064 low = range_successor (high0);
5065 high = high1;
5066 in_p = 1;
5067 if (low == 0)
5069 /* high1 > high0 but high0 has no successor. Punt. */
5070 return 0;
5075 else
5077 /* The case where we are excluding both ranges. Here the complex case
5078 is if they don't overlap. In that case, the only time we have a
5079 range is if they are adjacent. If the second is a subset of the
5080 first, the result is the first. Otherwise, the range to exclude
5081 starts at the beginning of the first range and ends at the end of the
5082 second. */
5083 if (no_overlap)
5085 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5086 range_successor (high0),
5087 1, low1, 0)))
5088 in_p = 0, low = low0, high = high1;
5089 else
5091 /* Canonicalize - [min, x] into - [-, x]. */
5092 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5093 switch (TREE_CODE (TREE_TYPE (low0)))
5095 case ENUMERAL_TYPE:
5096 if (TYPE_PRECISION (TREE_TYPE (low0))
5097 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5098 break;
5099 /* FALLTHROUGH */
5100 case INTEGER_TYPE:
5101 if (tree_int_cst_equal (low0,
5102 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5103 low0 = 0;
5104 break;
5105 case POINTER_TYPE:
5106 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5107 && integer_zerop (low0))
5108 low0 = 0;
5109 break;
5110 default:
5111 break;
5114 /* Canonicalize - [x, max] into - [x, -]. */
5115 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5116 switch (TREE_CODE (TREE_TYPE (high1)))
5118 case ENUMERAL_TYPE:
5119 if (TYPE_PRECISION (TREE_TYPE (high1))
5120 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5121 break;
5122 /* FALLTHROUGH */
5123 case INTEGER_TYPE:
5124 if (tree_int_cst_equal (high1,
5125 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5126 high1 = 0;
5127 break;
5128 case POINTER_TYPE:
5129 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5130 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5131 high1, 1,
5132 build_int_cst (TREE_TYPE (high1), 1),
5133 1)))
5134 high1 = 0;
5135 break;
5136 default:
5137 break;
5140 /* The ranges might be also adjacent between the maximum and
5141 minimum values of the given type. For
5142 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5143 return + [x + 1, y - 1]. */
5144 if (low0 == 0 && high1 == 0)
5146 low = range_successor (high0);
5147 high = range_predecessor (low1);
5148 if (low == 0 || high == 0)
5149 return 0;
5151 in_p = 1;
5153 else
5154 return 0;
5157 else if (subset)
5158 in_p = 0, low = low0, high = high0;
5159 else
5160 in_p = 0, low = low0, high = high1;
5163 *pin_p = in_p, *plow = low, *phigh = high;
5164 return 1;
5168 /* Subroutine of fold, looking inside expressions of the form
5169 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5170 of the COND_EXPR. This function is being used also to optimize
5171 A op B ? C : A, by reversing the comparison first.
5173 Return a folded expression whose code is not a COND_EXPR
5174 anymore, or NULL_TREE if no folding opportunity is found. */
5176 static tree
5177 fold_cond_expr_with_comparison (location_t loc, tree type,
5178 tree arg0, tree arg1, tree arg2)
5180 enum tree_code comp_code = TREE_CODE (arg0);
5181 tree arg00 = TREE_OPERAND (arg0, 0);
5182 tree arg01 = TREE_OPERAND (arg0, 1);
5183 tree arg1_type = TREE_TYPE (arg1);
5184 tree tem;
5186 STRIP_NOPS (arg1);
5187 STRIP_NOPS (arg2);
5189 /* If we have A op 0 ? A : -A, consider applying the following
5190 transformations:
5192 A == 0? A : -A same as -A
5193 A != 0? A : -A same as A
5194 A >= 0? A : -A same as abs (A)
5195 A > 0? A : -A same as abs (A)
5196 A <= 0? A : -A same as -abs (A)
5197 A < 0? A : -A same as -abs (A)
5199 None of these transformations work for modes with signed
5200 zeros. If A is +/-0, the first two transformations will
5201 change the sign of the result (from +0 to -0, or vice
5202 versa). The last four will fix the sign of the result,
5203 even though the original expressions could be positive or
5204 negative, depending on the sign of A.
5206 Note that all these transformations are correct if A is
5207 NaN, since the two alternatives (A and -A) are also NaNs. */
5208 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5209 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5210 ? real_zerop (arg01)
5211 : integer_zerop (arg01))
5212 && ((TREE_CODE (arg2) == NEGATE_EXPR
5213 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5214 /* In the case that A is of the form X-Y, '-A' (arg2) may
5215 have already been folded to Y-X, check for that. */
5216 || (TREE_CODE (arg1) == MINUS_EXPR
5217 && TREE_CODE (arg2) == MINUS_EXPR
5218 && operand_equal_p (TREE_OPERAND (arg1, 0),
5219 TREE_OPERAND (arg2, 1), 0)
5220 && operand_equal_p (TREE_OPERAND (arg1, 1),
5221 TREE_OPERAND (arg2, 0), 0))))
5222 switch (comp_code)
5224 case EQ_EXPR:
5225 case UNEQ_EXPR:
5226 tem = fold_convert_loc (loc, arg1_type, arg1);
5227 return fold_convert_loc (loc, type, negate_expr (tem));
5228 case NE_EXPR:
5229 case LTGT_EXPR:
5230 return fold_convert_loc (loc, type, arg1);
5231 case UNGE_EXPR:
5232 case UNGT_EXPR:
5233 if (flag_trapping_math)
5234 break;
5235 /* Fall through. */
5236 case GE_EXPR:
5237 case GT_EXPR:
5238 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5239 break;
5240 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5241 return fold_convert_loc (loc, type, tem);
5242 case UNLE_EXPR:
5243 case UNLT_EXPR:
5244 if (flag_trapping_math)
5245 break;
5246 /* FALLTHRU */
5247 case LE_EXPR:
5248 case LT_EXPR:
5249 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5250 break;
5251 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5252 return negate_expr (fold_convert_loc (loc, type, tem));
5253 default:
5254 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5255 break;
5258 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5259 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5260 both transformations are correct when A is NaN: A != 0
5261 is then true, and A == 0 is false. */
5263 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5264 && integer_zerop (arg01) && integer_zerop (arg2))
5266 if (comp_code == NE_EXPR)
5267 return fold_convert_loc (loc, type, arg1);
5268 else if (comp_code == EQ_EXPR)
5269 return build_zero_cst (type);
5272 /* Try some transformations of A op B ? A : B.
5274 A == B? A : B same as B
5275 A != B? A : B same as A
5276 A >= B? A : B same as max (A, B)
5277 A > B? A : B same as max (B, A)
5278 A <= B? A : B same as min (A, B)
5279 A < B? A : B same as min (B, A)
5281 As above, these transformations don't work in the presence
5282 of signed zeros. For example, if A and B are zeros of
5283 opposite sign, the first two transformations will change
5284 the sign of the result. In the last four, the original
5285 expressions give different results for (A=+0, B=-0) and
5286 (A=-0, B=+0), but the transformed expressions do not.
5288 The first two transformations are correct if either A or B
5289 is a NaN. In the first transformation, the condition will
5290 be false, and B will indeed be chosen. In the case of the
5291 second transformation, the condition A != B will be true,
5292 and A will be chosen.
5294 The conversions to max() and min() are not correct if B is
5295 a number and A is not. The conditions in the original
5296 expressions will be false, so all four give B. The min()
5297 and max() versions would give a NaN instead. */
5298 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5299 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5300 /* Avoid these transformations if the COND_EXPR may be used
5301 as an lvalue in the C++ front-end. PR c++/19199. */
5302 && (in_gimple_form
5303 || VECTOR_TYPE_P (type)
5304 || (! lang_GNU_CXX ()
5305 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5306 || ! maybe_lvalue_p (arg1)
5307 || ! maybe_lvalue_p (arg2)))
5309 tree comp_op0 = arg00;
5310 tree comp_op1 = arg01;
5311 tree comp_type = TREE_TYPE (comp_op0);
5313 switch (comp_code)
5315 case EQ_EXPR:
5316 return fold_convert_loc (loc, type, arg2);
5317 case NE_EXPR:
5318 return fold_convert_loc (loc, type, arg1);
5319 case LE_EXPR:
5320 case LT_EXPR:
5321 case UNLE_EXPR:
5322 case UNLT_EXPR:
5323 /* In C++ a ?: expression can be an lvalue, so put the
5324 operand which will be used if they are equal first
5325 so that we can convert this back to the
5326 corresponding COND_EXPR. */
5327 if (!HONOR_NANS (arg1))
5329 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5330 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5331 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5332 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5333 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5334 comp_op1, comp_op0);
5335 return fold_convert_loc (loc, type, tem);
5337 break;
5338 case GE_EXPR:
5339 case GT_EXPR:
5340 case UNGE_EXPR:
5341 case UNGT_EXPR:
5342 if (!HONOR_NANS (arg1))
5344 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5345 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5346 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5347 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5348 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5349 comp_op1, comp_op0);
5350 return fold_convert_loc (loc, type, tem);
5352 break;
5353 case UNEQ_EXPR:
5354 if (!HONOR_NANS (arg1))
5355 return fold_convert_loc (loc, type, arg2);
5356 break;
5357 case LTGT_EXPR:
5358 if (!HONOR_NANS (arg1))
5359 return fold_convert_loc (loc, type, arg1);
5360 break;
5361 default:
5362 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5363 break;
5367 return NULL_TREE;
5372 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5373 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5374 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5375 false) >= 2)
5376 #endif
5378 /* EXP is some logical combination of boolean tests. See if we can
5379 merge it into some range test. Return the new tree if so. */
5381 static tree
5382 fold_range_test (location_t loc, enum tree_code code, tree type,
5383 tree op0, tree op1)
5385 int or_op = (code == TRUTH_ORIF_EXPR
5386 || code == TRUTH_OR_EXPR);
5387 int in0_p, in1_p, in_p;
5388 tree low0, low1, low, high0, high1, high;
5389 bool strict_overflow_p = false;
5390 tree tem, lhs, rhs;
5391 const char * const warnmsg = G_("assuming signed overflow does not occur "
5392 "when simplifying range test");
5394 if (!INTEGRAL_TYPE_P (type))
5395 return 0;
5397 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5398 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5400 /* If this is an OR operation, invert both sides; we will invert
5401 again at the end. */
5402 if (or_op)
5403 in0_p = ! in0_p, in1_p = ! in1_p;
5405 /* If both expressions are the same, if we can merge the ranges, and we
5406 can build the range test, return it or it inverted. If one of the
5407 ranges is always true or always false, consider it to be the same
5408 expression as the other. */
5409 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5410 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5411 in1_p, low1, high1)
5412 && 0 != (tem = (build_range_check (loc, type,
5413 lhs != 0 ? lhs
5414 : rhs != 0 ? rhs : integer_zero_node,
5415 in_p, low, high))))
5417 if (strict_overflow_p)
5418 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5419 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5422 /* On machines where the branch cost is expensive, if this is a
5423 short-circuited branch and the underlying object on both sides
5424 is the same, make a non-short-circuit operation. */
5425 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5426 && lhs != 0 && rhs != 0
5427 && (code == TRUTH_ANDIF_EXPR
5428 || code == TRUTH_ORIF_EXPR)
5429 && operand_equal_p (lhs, rhs, 0))
5431 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5432 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5433 which cases we can't do this. */
5434 if (simple_operand_p (lhs))
5435 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5436 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5437 type, op0, op1);
5439 else if (!lang_hooks.decls.global_bindings_p ()
5440 && !CONTAINS_PLACEHOLDER_P (lhs))
5442 tree common = save_expr (lhs);
5444 if (0 != (lhs = build_range_check (loc, type, common,
5445 or_op ? ! in0_p : in0_p,
5446 low0, high0))
5447 && (0 != (rhs = build_range_check (loc, type, common,
5448 or_op ? ! in1_p : in1_p,
5449 low1, high1))))
5451 if (strict_overflow_p)
5452 fold_overflow_warning (warnmsg,
5453 WARN_STRICT_OVERFLOW_COMPARISON);
5454 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5455 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5456 type, lhs, rhs);
5461 return 0;
5464 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5465 bit value. Arrange things so the extra bits will be set to zero if and
5466 only if C is signed-extended to its full width. If MASK is nonzero,
5467 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5469 static tree
5470 unextend (tree c, int p, int unsignedp, tree mask)
5472 tree type = TREE_TYPE (c);
5473 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5474 tree temp;
5476 if (p == modesize || unsignedp)
5477 return c;
5479 /* We work by getting just the sign bit into the low-order bit, then
5480 into the high-order bit, then sign-extend. We then XOR that value
5481 with C. */
5482 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5484 /* We must use a signed type in order to get an arithmetic right shift.
5485 However, we must also avoid introducing accidental overflows, so that
5486 a subsequent call to integer_zerop will work. Hence we must
5487 do the type conversion here. At this point, the constant is either
5488 zero or one, and the conversion to a signed type can never overflow.
5489 We could get an overflow if this conversion is done anywhere else. */
5490 if (TYPE_UNSIGNED (type))
5491 temp = fold_convert (signed_type_for (type), temp);
5493 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5494 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5495 if (mask != 0)
5496 temp = const_binop (BIT_AND_EXPR, temp,
5497 fold_convert (TREE_TYPE (c), mask));
5498 /* If necessary, convert the type back to match the type of C. */
5499 if (TYPE_UNSIGNED (type))
5500 temp = fold_convert (type, temp);
5502 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5505 /* For an expression that has the form
5506 (A && B) || ~B
5508 (A || B) && ~B,
5509 we can drop one of the inner expressions and simplify to
5510 A || ~B
5512 A && ~B
5513 LOC is the location of the resulting expression. OP is the inner
5514 logical operation; the left-hand side in the examples above, while CMPOP
5515 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5516 removing a condition that guards another, as in
5517 (A != NULL && A->...) || A == NULL
5518 which we must not transform. If RHS_ONLY is true, only eliminate the
5519 right-most operand of the inner logical operation. */
5521 static tree
5522 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5523 bool rhs_only)
5525 tree type = TREE_TYPE (cmpop);
5526 enum tree_code code = TREE_CODE (cmpop);
5527 enum tree_code truthop_code = TREE_CODE (op);
5528 tree lhs = TREE_OPERAND (op, 0);
5529 tree rhs = TREE_OPERAND (op, 1);
5530 tree orig_lhs = lhs, orig_rhs = rhs;
5531 enum tree_code rhs_code = TREE_CODE (rhs);
5532 enum tree_code lhs_code = TREE_CODE (lhs);
5533 enum tree_code inv_code;
5535 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5536 return NULL_TREE;
5538 if (TREE_CODE_CLASS (code) != tcc_comparison)
5539 return NULL_TREE;
5541 if (rhs_code == truthop_code)
5543 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5544 if (newrhs != NULL_TREE)
5546 rhs = newrhs;
5547 rhs_code = TREE_CODE (rhs);
5550 if (lhs_code == truthop_code && !rhs_only)
5552 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5553 if (newlhs != NULL_TREE)
5555 lhs = newlhs;
5556 lhs_code = TREE_CODE (lhs);
5560 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5561 if (inv_code == rhs_code
5562 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5563 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5564 return lhs;
5565 if (!rhs_only && inv_code == lhs_code
5566 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5567 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5568 return rhs;
5569 if (rhs != orig_rhs || lhs != orig_lhs)
5570 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5571 lhs, rhs);
5572 return NULL_TREE;
5575 /* Find ways of folding logical expressions of LHS and RHS:
5576 Try to merge two comparisons to the same innermost item.
5577 Look for range tests like "ch >= '0' && ch <= '9'".
5578 Look for combinations of simple terms on machines with expensive branches
5579 and evaluate the RHS unconditionally.
5581 For example, if we have p->a == 2 && p->b == 4 and we can make an
5582 object large enough to span both A and B, we can do this with a comparison
5583 against the object ANDed with the a mask.
5585 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5586 operations to do this with one comparison.
5588 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5589 function and the one above.
5591 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5592 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5594 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5595 two operands.
5597 We return the simplified tree or 0 if no optimization is possible. */
5599 static tree
5600 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5601 tree lhs, tree rhs)
5603 /* If this is the "or" of two comparisons, we can do something if
5604 the comparisons are NE_EXPR. If this is the "and", we can do something
5605 if the comparisons are EQ_EXPR. I.e.,
5606 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5608 WANTED_CODE is this operation code. For single bit fields, we can
5609 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5610 comparison for one-bit fields. */
5612 enum tree_code wanted_code;
5613 enum tree_code lcode, rcode;
5614 tree ll_arg, lr_arg, rl_arg, rr_arg;
5615 tree ll_inner, lr_inner, rl_inner, rr_inner;
5616 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5617 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5618 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5619 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5620 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5621 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5622 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5623 machine_mode lnmode, rnmode;
5624 tree ll_mask, lr_mask, rl_mask, rr_mask;
5625 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5626 tree l_const, r_const;
5627 tree lntype, rntype, result;
5628 HOST_WIDE_INT first_bit, end_bit;
5629 int volatilep;
5631 /* Start by getting the comparison codes. Fail if anything is volatile.
5632 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5633 it were surrounded with a NE_EXPR. */
5635 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5636 return 0;
5638 lcode = TREE_CODE (lhs);
5639 rcode = TREE_CODE (rhs);
5641 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5643 lhs = build2 (NE_EXPR, truth_type, lhs,
5644 build_int_cst (TREE_TYPE (lhs), 0));
5645 lcode = NE_EXPR;
5648 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5650 rhs = build2 (NE_EXPR, truth_type, rhs,
5651 build_int_cst (TREE_TYPE (rhs), 0));
5652 rcode = NE_EXPR;
5655 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5656 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5657 return 0;
5659 ll_arg = TREE_OPERAND (lhs, 0);
5660 lr_arg = TREE_OPERAND (lhs, 1);
5661 rl_arg = TREE_OPERAND (rhs, 0);
5662 rr_arg = TREE_OPERAND (rhs, 1);
5664 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5665 if (simple_operand_p (ll_arg)
5666 && simple_operand_p (lr_arg))
5668 if (operand_equal_p (ll_arg, rl_arg, 0)
5669 && operand_equal_p (lr_arg, rr_arg, 0))
5671 result = combine_comparisons (loc, code, lcode, rcode,
5672 truth_type, ll_arg, lr_arg);
5673 if (result)
5674 return result;
5676 else if (operand_equal_p (ll_arg, rr_arg, 0)
5677 && operand_equal_p (lr_arg, rl_arg, 0))
5679 result = combine_comparisons (loc, code, lcode,
5680 swap_tree_comparison (rcode),
5681 truth_type, ll_arg, lr_arg);
5682 if (result)
5683 return result;
5687 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5688 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5690 /* If the RHS can be evaluated unconditionally and its operands are
5691 simple, it wins to evaluate the RHS unconditionally on machines
5692 with expensive branches. In this case, this isn't a comparison
5693 that can be merged. */
5695 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5696 false) >= 2
5697 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5698 && simple_operand_p (rl_arg)
5699 && simple_operand_p (rr_arg))
5701 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5702 if (code == TRUTH_OR_EXPR
5703 && lcode == NE_EXPR && integer_zerop (lr_arg)
5704 && rcode == NE_EXPR && integer_zerop (rr_arg)
5705 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5706 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5707 return build2_loc (loc, NE_EXPR, truth_type,
5708 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5709 ll_arg, rl_arg),
5710 build_int_cst (TREE_TYPE (ll_arg), 0));
5712 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5713 if (code == TRUTH_AND_EXPR
5714 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5715 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5716 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5717 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5718 return build2_loc (loc, EQ_EXPR, truth_type,
5719 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5720 ll_arg, rl_arg),
5721 build_int_cst (TREE_TYPE (ll_arg), 0));
5724 /* See if the comparisons can be merged. Then get all the parameters for
5725 each side. */
5727 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5728 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5729 return 0;
5731 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5732 volatilep = 0;
5733 ll_inner = decode_field_reference (loc, &ll_arg,
5734 &ll_bitsize, &ll_bitpos, &ll_mode,
5735 &ll_unsignedp, &ll_reversep, &volatilep,
5736 &ll_mask, &ll_and_mask);
5737 lr_inner = decode_field_reference (loc, &lr_arg,
5738 &lr_bitsize, &lr_bitpos, &lr_mode,
5739 &lr_unsignedp, &lr_reversep, &volatilep,
5740 &lr_mask, &lr_and_mask);
5741 rl_inner = decode_field_reference (loc, &rl_arg,
5742 &rl_bitsize, &rl_bitpos, &rl_mode,
5743 &rl_unsignedp, &rl_reversep, &volatilep,
5744 &rl_mask, &rl_and_mask);
5745 rr_inner = decode_field_reference (loc, &rr_arg,
5746 &rr_bitsize, &rr_bitpos, &rr_mode,
5747 &rr_unsignedp, &rr_reversep, &volatilep,
5748 &rr_mask, &rr_and_mask);
5750 /* It must be true that the inner operation on the lhs of each
5751 comparison must be the same if we are to be able to do anything.
5752 Then see if we have constants. If not, the same must be true for
5753 the rhs's. */
5754 if (volatilep
5755 || ll_reversep != rl_reversep
5756 || ll_inner == 0 || rl_inner == 0
5757 || ! operand_equal_p (ll_inner, rl_inner, 0))
5758 return 0;
5760 if (TREE_CODE (lr_arg) == INTEGER_CST
5761 && TREE_CODE (rr_arg) == INTEGER_CST)
5763 l_const = lr_arg, r_const = rr_arg;
5764 lr_reversep = ll_reversep;
5766 else if (lr_reversep != rr_reversep
5767 || lr_inner == 0 || rr_inner == 0
5768 || ! operand_equal_p (lr_inner, rr_inner, 0))
5769 return 0;
5770 else
5771 l_const = r_const = 0;
5773 /* If either comparison code is not correct for our logical operation,
5774 fail. However, we can convert a one-bit comparison against zero into
5775 the opposite comparison against that bit being set in the field. */
5777 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5778 if (lcode != wanted_code)
5780 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5782 /* Make the left operand unsigned, since we are only interested
5783 in the value of one bit. Otherwise we are doing the wrong
5784 thing below. */
5785 ll_unsignedp = 1;
5786 l_const = ll_mask;
5788 else
5789 return 0;
5792 /* This is analogous to the code for l_const above. */
5793 if (rcode != wanted_code)
5795 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5797 rl_unsignedp = 1;
5798 r_const = rl_mask;
5800 else
5801 return 0;
5804 /* See if we can find a mode that contains both fields being compared on
5805 the left. If we can't, fail. Otherwise, update all constants and masks
5806 to be relative to a field of that size. */
5807 first_bit = MIN (ll_bitpos, rl_bitpos);
5808 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5809 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5810 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5811 volatilep);
5812 if (lnmode == VOIDmode)
5813 return 0;
5815 lnbitsize = GET_MODE_BITSIZE (lnmode);
5816 lnbitpos = first_bit & ~ (lnbitsize - 1);
5817 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5818 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5820 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5822 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5823 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5826 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5827 size_int (xll_bitpos));
5828 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5829 size_int (xrl_bitpos));
5831 if (l_const)
5833 l_const = fold_convert_loc (loc, lntype, l_const);
5834 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5835 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5836 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5837 fold_build1_loc (loc, BIT_NOT_EXPR,
5838 lntype, ll_mask))))
5840 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5842 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5845 if (r_const)
5847 r_const = fold_convert_loc (loc, lntype, r_const);
5848 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5849 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5850 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5851 fold_build1_loc (loc, BIT_NOT_EXPR,
5852 lntype, rl_mask))))
5854 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5856 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5860 /* If the right sides are not constant, do the same for it. Also,
5861 disallow this optimization if a size or signedness mismatch occurs
5862 between the left and right sides. */
5863 if (l_const == 0)
5865 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5866 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5867 /* Make sure the two fields on the right
5868 correspond to the left without being swapped. */
5869 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5870 return 0;
5872 first_bit = MIN (lr_bitpos, rr_bitpos);
5873 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5874 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5875 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5876 volatilep);
5877 if (rnmode == VOIDmode)
5878 return 0;
5880 rnbitsize = GET_MODE_BITSIZE (rnmode);
5881 rnbitpos = first_bit & ~ (rnbitsize - 1);
5882 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5883 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5885 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5887 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5888 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5891 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5892 rntype, lr_mask),
5893 size_int (xlr_bitpos));
5894 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5895 rntype, rr_mask),
5896 size_int (xrr_bitpos));
5898 /* Make a mask that corresponds to both fields being compared.
5899 Do this for both items being compared. If the operands are the
5900 same size and the bits being compared are in the same position
5901 then we can do this by masking both and comparing the masked
5902 results. */
5903 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5904 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5905 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5907 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5908 lntype, lnbitsize, lnbitpos,
5909 ll_unsignedp || rl_unsignedp, ll_reversep);
5910 if (! all_ones_mask_p (ll_mask, lnbitsize))
5911 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5913 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5914 rntype, rnbitsize, rnbitpos,
5915 lr_unsignedp || rr_unsignedp, lr_reversep);
5916 if (! all_ones_mask_p (lr_mask, rnbitsize))
5917 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5919 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5922 /* There is still another way we can do something: If both pairs of
5923 fields being compared are adjacent, we may be able to make a wider
5924 field containing them both.
5926 Note that we still must mask the lhs/rhs expressions. Furthermore,
5927 the mask must be shifted to account for the shift done by
5928 make_bit_field_ref. */
5929 if ((ll_bitsize + ll_bitpos == rl_bitpos
5930 && lr_bitsize + lr_bitpos == rr_bitpos)
5931 || (ll_bitpos == rl_bitpos + rl_bitsize
5932 && lr_bitpos == rr_bitpos + rr_bitsize))
5934 tree type;
5936 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5937 ll_bitsize + rl_bitsize,
5938 MIN (ll_bitpos, rl_bitpos),
5939 ll_unsignedp, ll_reversep);
5940 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5941 lr_bitsize + rr_bitsize,
5942 MIN (lr_bitpos, rr_bitpos),
5943 lr_unsignedp, lr_reversep);
5945 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5946 size_int (MIN (xll_bitpos, xrl_bitpos)));
5947 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5948 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5950 /* Convert to the smaller type before masking out unwanted bits. */
5951 type = lntype;
5952 if (lntype != rntype)
5954 if (lnbitsize > rnbitsize)
5956 lhs = fold_convert_loc (loc, rntype, lhs);
5957 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5958 type = rntype;
5960 else if (lnbitsize < rnbitsize)
5962 rhs = fold_convert_loc (loc, lntype, rhs);
5963 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5964 type = lntype;
5968 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5969 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5971 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5972 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5974 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5977 return 0;
5980 /* Handle the case of comparisons with constants. If there is something in
5981 common between the masks, those bits of the constants must be the same.
5982 If not, the condition is always false. Test for this to avoid generating
5983 incorrect code below. */
5984 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5985 if (! integer_zerop (result)
5986 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5987 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5989 if (wanted_code == NE_EXPR)
5991 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5992 return constant_boolean_node (true, truth_type);
5994 else
5996 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5997 return constant_boolean_node (false, truth_type);
6001 /* Construct the expression we will return. First get the component
6002 reference we will make. Unless the mask is all ones the width of
6003 that field, perform the mask operation. Then compare with the
6004 merged constant. */
6005 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6006 lntype, lnbitsize, lnbitpos,
6007 ll_unsignedp || rl_unsignedp, ll_reversep);
6009 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6010 if (! all_ones_mask_p (ll_mask, lnbitsize))
6011 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6013 return build2_loc (loc, wanted_code, truth_type, result,
6014 const_binop (BIT_IOR_EXPR, l_const, r_const));
6017 /* T is an integer expression that is being multiplied, divided, or taken a
6018 modulus (CODE says which and what kind of divide or modulus) by a
6019 constant C. See if we can eliminate that operation by folding it with
6020 other operations already in T. WIDE_TYPE, if non-null, is a type that
6021 should be used for the computation if wider than our type.
6023 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6024 (X * 2) + (Y * 4). We must, however, be assured that either the original
6025 expression would not overflow or that overflow is undefined for the type
6026 in the language in question.
6028 If we return a non-null expression, it is an equivalent form of the
6029 original computation, but need not be in the original type.
6031 We set *STRICT_OVERFLOW_P to true if the return values depends on
6032 signed overflow being undefined. Otherwise we do not change
6033 *STRICT_OVERFLOW_P. */
6035 static tree
6036 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6037 bool *strict_overflow_p)
6039 /* To avoid exponential search depth, refuse to allow recursion past
6040 three levels. Beyond that (1) it's highly unlikely that we'll find
6041 something interesting and (2) we've probably processed it before
6042 when we built the inner expression. */
6044 static int depth;
6045 tree ret;
6047 if (depth > 3)
6048 return NULL;
6050 depth++;
6051 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6052 depth--;
6054 return ret;
6057 static tree
6058 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6059 bool *strict_overflow_p)
6061 tree type = TREE_TYPE (t);
6062 enum tree_code tcode = TREE_CODE (t);
6063 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6064 > GET_MODE_SIZE (TYPE_MODE (type)))
6065 ? wide_type : type);
6066 tree t1, t2;
6067 int same_p = tcode == code;
6068 tree op0 = NULL_TREE, op1 = NULL_TREE;
6069 bool sub_strict_overflow_p;
6071 /* Don't deal with constants of zero here; they confuse the code below. */
6072 if (integer_zerop (c))
6073 return NULL_TREE;
6075 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6076 op0 = TREE_OPERAND (t, 0);
6078 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6079 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6081 /* Note that we need not handle conditional operations here since fold
6082 already handles those cases. So just do arithmetic here. */
6083 switch (tcode)
6085 case INTEGER_CST:
6086 /* For a constant, we can always simplify if we are a multiply
6087 or (for divide and modulus) if it is a multiple of our constant. */
6088 if (code == MULT_EXPR
6089 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6091 tree tem = const_binop (code, fold_convert (ctype, t),
6092 fold_convert (ctype, c));
6093 /* If the multiplication overflowed, we lost information on it.
6094 See PR68142 and PR69845. */
6095 if (TREE_OVERFLOW (tem))
6096 return NULL_TREE;
6097 return tem;
6099 break;
6101 CASE_CONVERT: case NON_LVALUE_EXPR:
6102 /* If op0 is an expression ... */
6103 if ((COMPARISON_CLASS_P (op0)
6104 || UNARY_CLASS_P (op0)
6105 || BINARY_CLASS_P (op0)
6106 || VL_EXP_CLASS_P (op0)
6107 || EXPRESSION_CLASS_P (op0))
6108 /* ... and has wrapping overflow, and its type is smaller
6109 than ctype, then we cannot pass through as widening. */
6110 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6111 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6112 && (TYPE_PRECISION (ctype)
6113 > TYPE_PRECISION (TREE_TYPE (op0))))
6114 /* ... or this is a truncation (t is narrower than op0),
6115 then we cannot pass through this narrowing. */
6116 || (TYPE_PRECISION (type)
6117 < TYPE_PRECISION (TREE_TYPE (op0)))
6118 /* ... or signedness changes for division or modulus,
6119 then we cannot pass through this conversion. */
6120 || (code != MULT_EXPR
6121 && (TYPE_UNSIGNED (ctype)
6122 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6123 /* ... or has undefined overflow while the converted to
6124 type has not, we cannot do the operation in the inner type
6125 as that would introduce undefined overflow. */
6126 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6127 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6128 && !TYPE_OVERFLOW_UNDEFINED (type))))
6129 break;
6131 /* Pass the constant down and see if we can make a simplification. If
6132 we can, replace this expression with the inner simplification for
6133 possible later conversion to our or some other type. */
6134 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6135 && TREE_CODE (t2) == INTEGER_CST
6136 && !TREE_OVERFLOW (t2)
6137 && (0 != (t1 = extract_muldiv (op0, t2, code,
6138 code == MULT_EXPR
6139 ? ctype : NULL_TREE,
6140 strict_overflow_p))))
6141 return t1;
6142 break;
6144 case ABS_EXPR:
6145 /* If widening the type changes it from signed to unsigned, then we
6146 must avoid building ABS_EXPR itself as unsigned. */
6147 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6149 tree cstype = (*signed_type_for) (ctype);
6150 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6151 != 0)
6153 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6154 return fold_convert (ctype, t1);
6156 break;
6158 /* If the constant is negative, we cannot simplify this. */
6159 if (tree_int_cst_sgn (c) == -1)
6160 break;
6161 /* FALLTHROUGH */
6162 case NEGATE_EXPR:
6163 /* For division and modulus, type can't be unsigned, as e.g.
6164 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6165 For signed types, even with wrapping overflow, this is fine. */
6166 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6167 break;
6168 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6169 != 0)
6170 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6171 break;
6173 case MIN_EXPR: case MAX_EXPR:
6174 /* If widening the type changes the signedness, then we can't perform
6175 this optimization as that changes the result. */
6176 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6177 break;
6179 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6180 sub_strict_overflow_p = false;
6181 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6182 &sub_strict_overflow_p)) != 0
6183 && (t2 = extract_muldiv (op1, c, code, wide_type,
6184 &sub_strict_overflow_p)) != 0)
6186 if (tree_int_cst_sgn (c) < 0)
6187 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6188 if (sub_strict_overflow_p)
6189 *strict_overflow_p = true;
6190 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6191 fold_convert (ctype, t2));
6193 break;
6195 case LSHIFT_EXPR: case RSHIFT_EXPR:
6196 /* If the second operand is constant, this is a multiplication
6197 or floor division, by a power of two, so we can treat it that
6198 way unless the multiplier or divisor overflows. Signed
6199 left-shift overflow is implementation-defined rather than
6200 undefined in C90, so do not convert signed left shift into
6201 multiplication. */
6202 if (TREE_CODE (op1) == INTEGER_CST
6203 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6204 /* const_binop may not detect overflow correctly,
6205 so check for it explicitly here. */
6206 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6207 && 0 != (t1 = fold_convert (ctype,
6208 const_binop (LSHIFT_EXPR,
6209 size_one_node,
6210 op1)))
6211 && !TREE_OVERFLOW (t1))
6212 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6213 ? MULT_EXPR : FLOOR_DIV_EXPR,
6214 ctype,
6215 fold_convert (ctype, op0),
6216 t1),
6217 c, code, wide_type, strict_overflow_p);
6218 break;
6220 case PLUS_EXPR: case MINUS_EXPR:
6221 /* See if we can eliminate the operation on both sides. If we can, we
6222 can return a new PLUS or MINUS. If we can't, the only remaining
6223 cases where we can do anything are if the second operand is a
6224 constant. */
6225 sub_strict_overflow_p = false;
6226 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6227 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6228 if (t1 != 0 && t2 != 0
6229 && TYPE_OVERFLOW_WRAPS (ctype)
6230 && (code == MULT_EXPR
6231 /* If not multiplication, we can only do this if both operands
6232 are divisible by c. */
6233 || (multiple_of_p (ctype, op0, c)
6234 && multiple_of_p (ctype, op1, c))))
6236 if (sub_strict_overflow_p)
6237 *strict_overflow_p = true;
6238 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6239 fold_convert (ctype, t2));
6242 /* If this was a subtraction, negate OP1 and set it to be an addition.
6243 This simplifies the logic below. */
6244 if (tcode == MINUS_EXPR)
6246 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6247 /* If OP1 was not easily negatable, the constant may be OP0. */
6248 if (TREE_CODE (op0) == INTEGER_CST)
6250 std::swap (op0, op1);
6251 std::swap (t1, t2);
6255 if (TREE_CODE (op1) != INTEGER_CST)
6256 break;
6258 /* If either OP1 or C are negative, this optimization is not safe for
6259 some of the division and remainder types while for others we need
6260 to change the code. */
6261 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6263 if (code == CEIL_DIV_EXPR)
6264 code = FLOOR_DIV_EXPR;
6265 else if (code == FLOOR_DIV_EXPR)
6266 code = CEIL_DIV_EXPR;
6267 else if (code != MULT_EXPR
6268 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6269 break;
6272 /* If it's a multiply or a division/modulus operation of a multiple
6273 of our constant, do the operation and verify it doesn't overflow. */
6274 if (code == MULT_EXPR
6275 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6277 op1 = const_binop (code, fold_convert (ctype, op1),
6278 fold_convert (ctype, c));
6279 /* We allow the constant to overflow with wrapping semantics. */
6280 if (op1 == 0
6281 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6282 break;
6284 else
6285 break;
6287 /* If we have an unsigned type, we cannot widen the operation since it
6288 will change the result if the original computation overflowed. */
6289 if (TYPE_UNSIGNED (ctype) && ctype != type)
6290 break;
6292 /* The last case is if we are a multiply. In that case, we can
6293 apply the distributive law to commute the multiply and addition
6294 if the multiplication of the constants doesn't overflow
6295 and overflow is defined. With undefined overflow
6296 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6297 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6298 return fold_build2 (tcode, ctype,
6299 fold_build2 (code, ctype,
6300 fold_convert (ctype, op0),
6301 fold_convert (ctype, c)),
6302 op1);
6304 break;
6306 case MULT_EXPR:
6307 /* We have a special case here if we are doing something like
6308 (C * 8) % 4 since we know that's zero. */
6309 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6310 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6311 /* If the multiplication can overflow we cannot optimize this. */
6312 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6313 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6314 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6316 *strict_overflow_p = true;
6317 return omit_one_operand (type, integer_zero_node, op0);
6320 /* ... fall through ... */
6322 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6323 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6324 /* If we can extract our operation from the LHS, do so and return a
6325 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6326 do something only if the second operand is a constant. */
6327 if (same_p
6328 && TYPE_OVERFLOW_WRAPS (ctype)
6329 && (t1 = extract_muldiv (op0, c, code, wide_type,
6330 strict_overflow_p)) != 0)
6331 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6332 fold_convert (ctype, op1));
6333 else if (tcode == MULT_EXPR && code == MULT_EXPR
6334 && TYPE_OVERFLOW_WRAPS (ctype)
6335 && (t1 = extract_muldiv (op1, c, code, wide_type,
6336 strict_overflow_p)) != 0)
6337 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6338 fold_convert (ctype, t1));
6339 else if (TREE_CODE (op1) != INTEGER_CST)
6340 return 0;
6342 /* If these are the same operation types, we can associate them
6343 assuming no overflow. */
6344 if (tcode == code)
6346 bool overflow_p = false;
6347 bool overflow_mul_p;
6348 signop sign = TYPE_SIGN (ctype);
6349 unsigned prec = TYPE_PRECISION (ctype);
6350 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6351 wi::to_wide (c, prec),
6352 sign, &overflow_mul_p);
6353 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6354 if (overflow_mul_p
6355 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6356 overflow_p = true;
6357 if (!overflow_p)
6358 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6359 wide_int_to_tree (ctype, mul));
6362 /* If these operations "cancel" each other, we have the main
6363 optimizations of this pass, which occur when either constant is a
6364 multiple of the other, in which case we replace this with either an
6365 operation or CODE or TCODE.
6367 If we have an unsigned type, we cannot do this since it will change
6368 the result if the original computation overflowed. */
6369 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6370 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6371 || (tcode == MULT_EXPR
6372 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6373 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6374 && code != MULT_EXPR)))
6376 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6378 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6379 *strict_overflow_p = true;
6380 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6381 fold_convert (ctype,
6382 const_binop (TRUNC_DIV_EXPR,
6383 op1, c)));
6385 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6387 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6388 *strict_overflow_p = true;
6389 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6390 fold_convert (ctype,
6391 const_binop (TRUNC_DIV_EXPR,
6392 c, op1)));
6395 break;
6397 default:
6398 break;
6401 return 0;
6404 /* Return a node which has the indicated constant VALUE (either 0 or
6405 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6406 and is of the indicated TYPE. */
6408 tree
6409 constant_boolean_node (bool value, tree type)
6411 if (type == integer_type_node)
6412 return value ? integer_one_node : integer_zero_node;
6413 else if (type == boolean_type_node)
6414 return value ? boolean_true_node : boolean_false_node;
6415 else if (TREE_CODE (type) == VECTOR_TYPE)
6416 return build_vector_from_val (type,
6417 build_int_cst (TREE_TYPE (type),
6418 value ? -1 : 0));
6419 else
6420 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6424 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6425 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6426 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6427 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6428 COND is the first argument to CODE; otherwise (as in the example
6429 given here), it is the second argument. TYPE is the type of the
6430 original expression. Return NULL_TREE if no simplification is
6431 possible. */
6433 static tree
6434 fold_binary_op_with_conditional_arg (location_t loc,
6435 enum tree_code code,
6436 tree type, tree op0, tree op1,
6437 tree cond, tree arg, int cond_first_p)
6439 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6440 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6441 tree test, true_value, false_value;
6442 tree lhs = NULL_TREE;
6443 tree rhs = NULL_TREE;
6444 enum tree_code cond_code = COND_EXPR;
6446 if (TREE_CODE (cond) == COND_EXPR
6447 || TREE_CODE (cond) == VEC_COND_EXPR)
6449 test = TREE_OPERAND (cond, 0);
6450 true_value = TREE_OPERAND (cond, 1);
6451 false_value = TREE_OPERAND (cond, 2);
6452 /* If this operand throws an expression, then it does not make
6453 sense to try to perform a logical or arithmetic operation
6454 involving it. */
6455 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6456 lhs = true_value;
6457 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6458 rhs = false_value;
6460 else if (!(TREE_CODE (type) != VECTOR_TYPE
6461 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6463 tree testtype = TREE_TYPE (cond);
6464 test = cond;
6465 true_value = constant_boolean_node (true, testtype);
6466 false_value = constant_boolean_node (false, testtype);
6468 else
6469 /* Detect the case of mixing vector and scalar types - bail out. */
6470 return NULL_TREE;
6472 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6473 cond_code = VEC_COND_EXPR;
6475 /* This transformation is only worthwhile if we don't have to wrap ARG
6476 in a SAVE_EXPR and the operation can be simplified without recursing
6477 on at least one of the branches once its pushed inside the COND_EXPR. */
6478 if (!TREE_CONSTANT (arg)
6479 && (TREE_SIDE_EFFECTS (arg)
6480 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6481 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6482 return NULL_TREE;
6484 arg = fold_convert_loc (loc, arg_type, arg);
6485 if (lhs == 0)
6487 true_value = fold_convert_loc (loc, cond_type, true_value);
6488 if (cond_first_p)
6489 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6490 else
6491 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6493 if (rhs == 0)
6495 false_value = fold_convert_loc (loc, cond_type, false_value);
6496 if (cond_first_p)
6497 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6498 else
6499 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6502 /* Check that we have simplified at least one of the branches. */
6503 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6504 return NULL_TREE;
6506 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6510 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6512 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6513 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6514 ADDEND is the same as X.
6516 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6517 and finite. The problematic cases are when X is zero, and its mode
6518 has signed zeros. In the case of rounding towards -infinity,
6519 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6520 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6522 bool
6523 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6525 if (!real_zerop (addend))
6526 return false;
6528 /* Don't allow the fold with -fsignaling-nans. */
6529 if (HONOR_SNANS (element_mode (type)))
6530 return false;
6532 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6533 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6534 return true;
6536 /* In a vector or complex, we would need to check the sign of all zeros. */
6537 if (TREE_CODE (addend) != REAL_CST)
6538 return false;
6540 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6541 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6542 negate = !negate;
6544 /* The mode has signed zeros, and we have to honor their sign.
6545 In this situation, there is only one case we can return true for.
6546 X - 0 is the same as X unless rounding towards -infinity is
6547 supported. */
6548 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6551 /* Subroutine of fold() that optimizes comparisons of a division by
6552 a nonzero integer constant against an integer constant, i.e.
6553 X/C1 op C2.
6555 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6556 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6557 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6559 The function returns the constant folded tree if a simplification
6560 can be made, and NULL_TREE otherwise. */
6562 static tree
6563 fold_div_compare (location_t loc,
6564 enum tree_code code, tree type, tree arg0, tree arg1)
6566 tree prod, tmp, hi, lo;
6567 tree arg00 = TREE_OPERAND (arg0, 0);
6568 tree arg01 = TREE_OPERAND (arg0, 1);
6569 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6570 bool neg_overflow = false;
6571 bool overflow;
6573 /* We have to do this the hard way to detect unsigned overflow.
6574 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6575 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6576 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6577 neg_overflow = false;
6579 if (sign == UNSIGNED)
6581 tmp = int_const_binop (MINUS_EXPR, arg01,
6582 build_int_cst (TREE_TYPE (arg01), 1));
6583 lo = prod;
6585 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6586 val = wi::add (prod, tmp, sign, &overflow);
6587 hi = force_fit_type (TREE_TYPE (arg00), val,
6588 -1, overflow | TREE_OVERFLOW (prod));
6590 else if (tree_int_cst_sgn (arg01) >= 0)
6592 tmp = int_const_binop (MINUS_EXPR, arg01,
6593 build_int_cst (TREE_TYPE (arg01), 1));
6594 switch (tree_int_cst_sgn (arg1))
6596 case -1:
6597 neg_overflow = true;
6598 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6599 hi = prod;
6600 break;
6602 case 0:
6603 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6604 hi = tmp;
6605 break;
6607 case 1:
6608 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6609 lo = prod;
6610 break;
6612 default:
6613 gcc_unreachable ();
6616 else
6618 /* A negative divisor reverses the relational operators. */
6619 code = swap_tree_comparison (code);
6621 tmp = int_const_binop (PLUS_EXPR, arg01,
6622 build_int_cst (TREE_TYPE (arg01), 1));
6623 switch (tree_int_cst_sgn (arg1))
6625 case -1:
6626 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6627 lo = prod;
6628 break;
6630 case 0:
6631 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6632 lo = tmp;
6633 break;
6635 case 1:
6636 neg_overflow = true;
6637 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6638 hi = prod;
6639 break;
6641 default:
6642 gcc_unreachable ();
6646 switch (code)
6648 case EQ_EXPR:
6649 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6650 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6651 if (TREE_OVERFLOW (hi))
6652 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6653 if (TREE_OVERFLOW (lo))
6654 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6655 return build_range_check (loc, type, arg00, 1, lo, hi);
6657 case NE_EXPR:
6658 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6659 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6660 if (TREE_OVERFLOW (hi))
6661 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6662 if (TREE_OVERFLOW (lo))
6663 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6664 return build_range_check (loc, type, arg00, 0, lo, hi);
6666 case LT_EXPR:
6667 if (TREE_OVERFLOW (lo))
6669 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6670 return omit_one_operand_loc (loc, type, tmp, arg00);
6672 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6674 case LE_EXPR:
6675 if (TREE_OVERFLOW (hi))
6677 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6678 return omit_one_operand_loc (loc, type, tmp, arg00);
6680 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6682 case GT_EXPR:
6683 if (TREE_OVERFLOW (hi))
6685 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6686 return omit_one_operand_loc (loc, type, tmp, arg00);
6688 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6690 case GE_EXPR:
6691 if (TREE_OVERFLOW (lo))
6693 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6694 return omit_one_operand_loc (loc, type, tmp, arg00);
6696 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6698 default:
6699 break;
6702 return NULL_TREE;
6706 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6707 equality/inequality test, then return a simplified form of the test
6708 using a sign testing. Otherwise return NULL. TYPE is the desired
6709 result type. */
6711 static tree
6712 fold_single_bit_test_into_sign_test (location_t loc,
6713 enum tree_code code, tree arg0, tree arg1,
6714 tree result_type)
6716 /* If this is testing a single bit, we can optimize the test. */
6717 if ((code == NE_EXPR || code == EQ_EXPR)
6718 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6719 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6721 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6722 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6723 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6725 if (arg00 != NULL_TREE
6726 /* This is only a win if casting to a signed type is cheap,
6727 i.e. when arg00's type is not a partial mode. */
6728 && TYPE_PRECISION (TREE_TYPE (arg00))
6729 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6731 tree stype = signed_type_for (TREE_TYPE (arg00));
6732 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6733 result_type,
6734 fold_convert_loc (loc, stype, arg00),
6735 build_int_cst (stype, 0));
6739 return NULL_TREE;
6742 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6743 equality/inequality test, then return a simplified form of
6744 the test using shifts and logical operations. Otherwise return
6745 NULL. TYPE is the desired result type. */
6747 tree
6748 fold_single_bit_test (location_t loc, enum tree_code code,
6749 tree arg0, tree arg1, tree result_type)
6751 /* If this is testing a single bit, we can optimize the test. */
6752 if ((code == NE_EXPR || code == EQ_EXPR)
6753 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6754 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6756 tree inner = TREE_OPERAND (arg0, 0);
6757 tree type = TREE_TYPE (arg0);
6758 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6759 machine_mode operand_mode = TYPE_MODE (type);
6760 int ops_unsigned;
6761 tree signed_type, unsigned_type, intermediate_type;
6762 tree tem, one;
6764 /* First, see if we can fold the single bit test into a sign-bit
6765 test. */
6766 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6767 result_type);
6768 if (tem)
6769 return tem;
6771 /* Otherwise we have (A & C) != 0 where C is a single bit,
6772 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6773 Similarly for (A & C) == 0. */
6775 /* If INNER is a right shift of a constant and it plus BITNUM does
6776 not overflow, adjust BITNUM and INNER. */
6777 if (TREE_CODE (inner) == RSHIFT_EXPR
6778 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6779 && bitnum < TYPE_PRECISION (type)
6780 && wi::ltu_p (TREE_OPERAND (inner, 1),
6781 TYPE_PRECISION (type) - bitnum))
6783 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6784 inner = TREE_OPERAND (inner, 0);
6787 /* If we are going to be able to omit the AND below, we must do our
6788 operations as unsigned. If we must use the AND, we have a choice.
6789 Normally unsigned is faster, but for some machines signed is. */
6790 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6791 && !flag_syntax_only) ? 0 : 1;
6793 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6794 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6795 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6796 inner = fold_convert_loc (loc, intermediate_type, inner);
6798 if (bitnum != 0)
6799 inner = build2 (RSHIFT_EXPR, intermediate_type,
6800 inner, size_int (bitnum));
6802 one = build_int_cst (intermediate_type, 1);
6804 if (code == EQ_EXPR)
6805 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6807 /* Put the AND last so it can combine with more things. */
6808 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6810 /* Make sure to return the proper type. */
6811 inner = fold_convert_loc (loc, result_type, inner);
6813 return inner;
6815 return NULL_TREE;
6818 /* Test whether it is preferable two swap two operands, ARG0 and
6819 ARG1, for example because ARG0 is an integer constant and ARG1
6820 isn't. */
6822 bool
6823 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6825 if (CONSTANT_CLASS_P (arg1))
6826 return 0;
6827 if (CONSTANT_CLASS_P (arg0))
6828 return 1;
6830 STRIP_NOPS (arg0);
6831 STRIP_NOPS (arg1);
6833 if (TREE_CONSTANT (arg1))
6834 return 0;
6835 if (TREE_CONSTANT (arg0))
6836 return 1;
6838 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6839 for commutative and comparison operators. Ensuring a canonical
6840 form allows the optimizers to find additional redundancies without
6841 having to explicitly check for both orderings. */
6842 if (TREE_CODE (arg0) == SSA_NAME
6843 && TREE_CODE (arg1) == SSA_NAME
6844 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6845 return 1;
6847 /* Put SSA_NAMEs last. */
6848 if (TREE_CODE (arg1) == SSA_NAME)
6849 return 0;
6850 if (TREE_CODE (arg0) == SSA_NAME)
6851 return 1;
6853 /* Put variables last. */
6854 if (DECL_P (arg1))
6855 return 0;
6856 if (DECL_P (arg0))
6857 return 1;
6859 return 0;
6863 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6864 means A >= Y && A != MAX, but in this case we know that
6865 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6867 static tree
6868 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6870 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6872 if (TREE_CODE (bound) == LT_EXPR)
6873 a = TREE_OPERAND (bound, 0);
6874 else if (TREE_CODE (bound) == GT_EXPR)
6875 a = TREE_OPERAND (bound, 1);
6876 else
6877 return NULL_TREE;
6879 typea = TREE_TYPE (a);
6880 if (!INTEGRAL_TYPE_P (typea)
6881 && !POINTER_TYPE_P (typea))
6882 return NULL_TREE;
6884 if (TREE_CODE (ineq) == LT_EXPR)
6886 a1 = TREE_OPERAND (ineq, 1);
6887 y = TREE_OPERAND (ineq, 0);
6889 else if (TREE_CODE (ineq) == GT_EXPR)
6891 a1 = TREE_OPERAND (ineq, 0);
6892 y = TREE_OPERAND (ineq, 1);
6894 else
6895 return NULL_TREE;
6897 if (TREE_TYPE (a1) != typea)
6898 return NULL_TREE;
6900 if (POINTER_TYPE_P (typea))
6902 /* Convert the pointer types into integer before taking the difference. */
6903 tree ta = fold_convert_loc (loc, ssizetype, a);
6904 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6905 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6907 else
6908 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6910 if (!diff || !integer_onep (diff))
6911 return NULL_TREE;
6913 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6916 /* Fold a sum or difference of at least one multiplication.
6917 Returns the folded tree or NULL if no simplification could be made. */
6919 static tree
6920 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6921 tree arg0, tree arg1)
6923 tree arg00, arg01, arg10, arg11;
6924 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6926 /* (A * C) +- (B * C) -> (A+-B) * C.
6927 (A * C) +- A -> A * (C+-1).
6928 We are most concerned about the case where C is a constant,
6929 but other combinations show up during loop reduction. Since
6930 it is not difficult, try all four possibilities. */
6932 if (TREE_CODE (arg0) == MULT_EXPR)
6934 arg00 = TREE_OPERAND (arg0, 0);
6935 arg01 = TREE_OPERAND (arg0, 1);
6937 else if (TREE_CODE (arg0) == INTEGER_CST)
6939 arg00 = build_one_cst (type);
6940 arg01 = arg0;
6942 else
6944 /* We cannot generate constant 1 for fract. */
6945 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6946 return NULL_TREE;
6947 arg00 = arg0;
6948 arg01 = build_one_cst (type);
6950 if (TREE_CODE (arg1) == MULT_EXPR)
6952 arg10 = TREE_OPERAND (arg1, 0);
6953 arg11 = TREE_OPERAND (arg1, 1);
6955 else if (TREE_CODE (arg1) == INTEGER_CST)
6957 arg10 = build_one_cst (type);
6958 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6959 the purpose of this canonicalization. */
6960 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6961 && negate_expr_p (arg1)
6962 && code == PLUS_EXPR)
6964 arg11 = negate_expr (arg1);
6965 code = MINUS_EXPR;
6967 else
6968 arg11 = arg1;
6970 else
6972 /* We cannot generate constant 1 for fract. */
6973 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6974 return NULL_TREE;
6975 arg10 = arg1;
6976 arg11 = build_one_cst (type);
6978 same = NULL_TREE;
6980 /* Prefer factoring a common non-constant. */
6981 if (operand_equal_p (arg00, arg10, 0))
6982 same = arg00, alt0 = arg01, alt1 = arg11;
6983 else if (operand_equal_p (arg01, arg11, 0))
6984 same = arg01, alt0 = arg00, alt1 = arg10;
6985 else if (operand_equal_p (arg00, arg11, 0))
6986 same = arg00, alt0 = arg01, alt1 = arg10;
6987 else if (operand_equal_p (arg01, arg10, 0))
6988 same = arg01, alt0 = arg00, alt1 = arg11;
6990 /* No identical multiplicands; see if we can find a common
6991 power-of-two factor in non-power-of-two multiplies. This
6992 can help in multi-dimensional array access. */
6993 else if (tree_fits_shwi_p (arg01)
6994 && tree_fits_shwi_p (arg11))
6996 HOST_WIDE_INT int01, int11, tmp;
6997 bool swap = false;
6998 tree maybe_same;
6999 int01 = tree_to_shwi (arg01);
7000 int11 = tree_to_shwi (arg11);
7002 /* Move min of absolute values to int11. */
7003 if (absu_hwi (int01) < absu_hwi (int11))
7005 tmp = int01, int01 = int11, int11 = tmp;
7006 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7007 maybe_same = arg01;
7008 swap = true;
7010 else
7011 maybe_same = arg11;
7013 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7014 /* The remainder should not be a constant, otherwise we
7015 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7016 increased the number of multiplications necessary. */
7017 && TREE_CODE (arg10) != INTEGER_CST)
7019 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7020 build_int_cst (TREE_TYPE (arg00),
7021 int01 / int11));
7022 alt1 = arg10;
7023 same = maybe_same;
7024 if (swap)
7025 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7029 if (!same)
7030 return NULL_TREE;
7032 if (! INTEGRAL_TYPE_P (type)
7033 || TYPE_OVERFLOW_WRAPS (type)
7034 /* We are neither factoring zero nor minus one. */
7035 || TREE_CODE (same) == INTEGER_CST)
7036 return fold_build2_loc (loc, MULT_EXPR, type,
7037 fold_build2_loc (loc, code, type,
7038 fold_convert_loc (loc, type, alt0),
7039 fold_convert_loc (loc, type, alt1)),
7040 fold_convert_loc (loc, type, same));
7042 /* Same may be zero and thus the operation 'code' may overflow. Likewise
7043 same may be minus one and thus the multiplication may overflow. Perform
7044 the operations in an unsigned type. */
7045 tree utype = unsigned_type_for (type);
7046 tree tem = fold_build2_loc (loc, code, utype,
7047 fold_convert_loc (loc, utype, alt0),
7048 fold_convert_loc (loc, utype, alt1));
7049 /* If the sum evaluated to a constant that is not -INF the multiplication
7050 cannot overflow. */
7051 if (TREE_CODE (tem) == INTEGER_CST
7052 && ! wi::eq_p (tem, wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7053 return fold_build2_loc (loc, MULT_EXPR, type,
7054 fold_convert (type, tem), same);
7056 return fold_convert_loc (loc, type,
7057 fold_build2_loc (loc, MULT_EXPR, utype, tem,
7058 fold_convert_loc (loc, utype, same)));
7061 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7062 specified by EXPR into the buffer PTR of length LEN bytes.
7063 Return the number of bytes placed in the buffer, or zero
7064 upon failure. */
7066 static int
7067 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7069 tree type = TREE_TYPE (expr);
7070 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7071 int byte, offset, word, words;
7072 unsigned char value;
7074 if ((off == -1 && total_bytes > len)
7075 || off >= total_bytes)
7076 return 0;
7077 if (off == -1)
7078 off = 0;
7079 words = total_bytes / UNITS_PER_WORD;
7081 for (byte = 0; byte < total_bytes; byte++)
7083 int bitpos = byte * BITS_PER_UNIT;
7084 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7085 number of bytes. */
7086 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7088 if (total_bytes > UNITS_PER_WORD)
7090 word = byte / UNITS_PER_WORD;
7091 if (WORDS_BIG_ENDIAN)
7092 word = (words - 1) - word;
7093 offset = word * UNITS_PER_WORD;
7094 if (BYTES_BIG_ENDIAN)
7095 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7096 else
7097 offset += byte % UNITS_PER_WORD;
7099 else
7100 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7101 if (offset >= off
7102 && offset - off < len)
7103 ptr[offset - off] = value;
7105 return MIN (len, total_bytes - off);
7109 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7110 specified by EXPR into the buffer PTR of length LEN bytes.
7111 Return the number of bytes placed in the buffer, or zero
7112 upon failure. */
7114 static int
7115 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7117 tree type = TREE_TYPE (expr);
7118 machine_mode mode = TYPE_MODE (type);
7119 int total_bytes = GET_MODE_SIZE (mode);
7120 FIXED_VALUE_TYPE value;
7121 tree i_value, i_type;
7123 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7124 return 0;
7126 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7128 if (NULL_TREE == i_type
7129 || TYPE_PRECISION (i_type) != total_bytes)
7130 return 0;
7132 value = TREE_FIXED_CST (expr);
7133 i_value = double_int_to_tree (i_type, value.data);
7135 return native_encode_int (i_value, ptr, len, off);
7139 /* Subroutine of native_encode_expr. Encode the REAL_CST
7140 specified by EXPR into the buffer PTR of length LEN bytes.
7141 Return the number of bytes placed in the buffer, or zero
7142 upon failure. */
7144 static int
7145 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7147 tree type = TREE_TYPE (expr);
7148 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7149 int byte, offset, word, words, bitpos;
7150 unsigned char value;
7152 /* There are always 32 bits in each long, no matter the size of
7153 the hosts long. We handle floating point representations with
7154 up to 192 bits. */
7155 long tmp[6];
7157 if ((off == -1 && total_bytes > len)
7158 || off >= total_bytes)
7159 return 0;
7160 if (off == -1)
7161 off = 0;
7162 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7164 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7166 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7167 bitpos += BITS_PER_UNIT)
7169 byte = (bitpos / BITS_PER_UNIT) & 3;
7170 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7172 if (UNITS_PER_WORD < 4)
7174 word = byte / UNITS_PER_WORD;
7175 if (WORDS_BIG_ENDIAN)
7176 word = (words - 1) - word;
7177 offset = word * UNITS_PER_WORD;
7178 if (BYTES_BIG_ENDIAN)
7179 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7180 else
7181 offset += byte % UNITS_PER_WORD;
7183 else
7185 offset = byte;
7186 if (BYTES_BIG_ENDIAN)
7188 /* Reverse bytes within each long, or within the entire float
7189 if it's smaller than a long (for HFmode). */
7190 offset = MIN (3, total_bytes - 1) - offset;
7191 gcc_assert (offset >= 0);
7194 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7195 if (offset >= off
7196 && offset - off < len)
7197 ptr[offset - off] = value;
7199 return MIN (len, total_bytes - off);
7202 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7203 specified by EXPR into the buffer PTR of length LEN bytes.
7204 Return the number of bytes placed in the buffer, or zero
7205 upon failure. */
7207 static int
7208 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7210 int rsize, isize;
7211 tree part;
7213 part = TREE_REALPART (expr);
7214 rsize = native_encode_expr (part, ptr, len, off);
7215 if (off == -1
7216 && rsize == 0)
7217 return 0;
7218 part = TREE_IMAGPART (expr);
7219 if (off != -1)
7220 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7221 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7222 if (off == -1
7223 && isize != rsize)
7224 return 0;
7225 return rsize + isize;
7229 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7230 specified by EXPR into the buffer PTR of length LEN bytes.
7231 Return the number of bytes placed in the buffer, or zero
7232 upon failure. */
7234 static int
7235 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7237 unsigned i, count;
7238 int size, offset;
7239 tree itype, elem;
7241 offset = 0;
7242 count = VECTOR_CST_NELTS (expr);
7243 itype = TREE_TYPE (TREE_TYPE (expr));
7244 size = GET_MODE_SIZE (TYPE_MODE (itype));
7245 for (i = 0; i < count; i++)
7247 if (off >= size)
7249 off -= size;
7250 continue;
7252 elem = VECTOR_CST_ELT (expr, i);
7253 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7254 if ((off == -1 && res != size)
7255 || res == 0)
7256 return 0;
7257 offset += res;
7258 if (offset >= len)
7259 return offset;
7260 if (off != -1)
7261 off = 0;
7263 return offset;
7267 /* Subroutine of native_encode_expr. Encode the STRING_CST
7268 specified by EXPR into the buffer PTR of length LEN bytes.
7269 Return the number of bytes placed in the buffer, or zero
7270 upon failure. */
7272 static int
7273 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7275 tree type = TREE_TYPE (expr);
7276 HOST_WIDE_INT total_bytes;
7278 if (TREE_CODE (type) != ARRAY_TYPE
7279 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7280 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7281 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7282 return 0;
7283 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7284 if ((off == -1 && total_bytes > len)
7285 || off >= total_bytes)
7286 return 0;
7287 if (off == -1)
7288 off = 0;
7289 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7291 int written = 0;
7292 if (off < TREE_STRING_LENGTH (expr))
7294 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7295 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7297 memset (ptr + written, 0,
7298 MIN (total_bytes - written, len - written));
7300 else
7301 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7302 return MIN (total_bytes - off, len);
7306 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7307 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7308 buffer PTR of length LEN bytes. If OFF is not -1 then start
7309 the encoding at byte offset OFF and encode at most LEN bytes.
7310 Return the number of bytes placed in the buffer, or zero upon failure. */
7313 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7315 /* We don't support starting at negative offset and -1 is special. */
7316 if (off < -1)
7317 return 0;
7319 switch (TREE_CODE (expr))
7321 case INTEGER_CST:
7322 return native_encode_int (expr, ptr, len, off);
7324 case REAL_CST:
7325 return native_encode_real (expr, ptr, len, off);
7327 case FIXED_CST:
7328 return native_encode_fixed (expr, ptr, len, off);
7330 case COMPLEX_CST:
7331 return native_encode_complex (expr, ptr, len, off);
7333 case VECTOR_CST:
7334 return native_encode_vector (expr, ptr, len, off);
7336 case STRING_CST:
7337 return native_encode_string (expr, ptr, len, off);
7339 default:
7340 return 0;
7345 /* Subroutine of native_interpret_expr. Interpret the contents of
7346 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7347 If the buffer cannot be interpreted, return NULL_TREE. */
7349 static tree
7350 native_interpret_int (tree type, const unsigned char *ptr, int len)
7352 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7354 if (total_bytes > len
7355 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7356 return NULL_TREE;
7358 wide_int result = wi::from_buffer (ptr, total_bytes);
7360 return wide_int_to_tree (type, result);
7364 /* Subroutine of native_interpret_expr. Interpret the contents of
7365 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7366 If the buffer cannot be interpreted, return NULL_TREE. */
7368 static tree
7369 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7371 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7372 double_int result;
7373 FIXED_VALUE_TYPE fixed_value;
7375 if (total_bytes > len
7376 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7377 return NULL_TREE;
7379 result = double_int::from_buffer (ptr, total_bytes);
7380 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7382 return build_fixed (type, fixed_value);
7386 /* Subroutine of native_interpret_expr. Interpret the contents of
7387 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7388 If the buffer cannot be interpreted, return NULL_TREE. */
7390 static tree
7391 native_interpret_real (tree type, const unsigned char *ptr, int len)
7393 machine_mode mode = TYPE_MODE (type);
7394 int total_bytes = GET_MODE_SIZE (mode);
7395 unsigned char value;
7396 /* There are always 32 bits in each long, no matter the size of
7397 the hosts long. We handle floating point representations with
7398 up to 192 bits. */
7399 REAL_VALUE_TYPE r;
7400 long tmp[6];
7402 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7403 if (total_bytes > len || total_bytes > 24)
7404 return NULL_TREE;
7405 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7407 memset (tmp, 0, sizeof (tmp));
7408 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7409 bitpos += BITS_PER_UNIT)
7411 /* Both OFFSET and BYTE index within a long;
7412 bitpos indexes the whole float. */
7413 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7414 if (UNITS_PER_WORD < 4)
7416 int word = byte / UNITS_PER_WORD;
7417 if (WORDS_BIG_ENDIAN)
7418 word = (words - 1) - word;
7419 offset = word * UNITS_PER_WORD;
7420 if (BYTES_BIG_ENDIAN)
7421 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7422 else
7423 offset += byte % UNITS_PER_WORD;
7425 else
7427 offset = byte;
7428 if (BYTES_BIG_ENDIAN)
7430 /* Reverse bytes within each long, or within the entire float
7431 if it's smaller than a long (for HFmode). */
7432 offset = MIN (3, total_bytes - 1) - offset;
7433 gcc_assert (offset >= 0);
7436 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7438 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7441 real_from_target (&r, tmp, mode);
7442 return build_real (type, r);
7446 /* Subroutine of native_interpret_expr. Interpret the contents of
7447 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7448 If the buffer cannot be interpreted, return NULL_TREE. */
7450 static tree
7451 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7453 tree etype, rpart, ipart;
7454 int size;
7456 etype = TREE_TYPE (type);
7457 size = GET_MODE_SIZE (TYPE_MODE (etype));
7458 if (size * 2 > len)
7459 return NULL_TREE;
7460 rpart = native_interpret_expr (etype, ptr, size);
7461 if (!rpart)
7462 return NULL_TREE;
7463 ipart = native_interpret_expr (etype, ptr+size, size);
7464 if (!ipart)
7465 return NULL_TREE;
7466 return build_complex (type, rpart, ipart);
7470 /* Subroutine of native_interpret_expr. Interpret the contents of
7471 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7472 If the buffer cannot be interpreted, return NULL_TREE. */
7474 static tree
7475 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7477 tree etype, elem;
7478 int i, size, count;
7479 tree *elements;
7481 etype = TREE_TYPE (type);
7482 size = GET_MODE_SIZE (TYPE_MODE (etype));
7483 count = TYPE_VECTOR_SUBPARTS (type);
7484 if (size * count > len)
7485 return NULL_TREE;
7487 elements = XALLOCAVEC (tree, count);
7488 for (i = count - 1; i >= 0; i--)
7490 elem = native_interpret_expr (etype, ptr+(i*size), size);
7491 if (!elem)
7492 return NULL_TREE;
7493 elements[i] = elem;
7495 return build_vector (type, elements);
7499 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7500 the buffer PTR of length LEN as a constant of type TYPE. For
7501 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7502 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7503 return NULL_TREE. */
7505 tree
7506 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7508 switch (TREE_CODE (type))
7510 case INTEGER_TYPE:
7511 case ENUMERAL_TYPE:
7512 case BOOLEAN_TYPE:
7513 case POINTER_TYPE:
7514 case REFERENCE_TYPE:
7515 return native_interpret_int (type, ptr, len);
7517 case REAL_TYPE:
7518 return native_interpret_real (type, ptr, len);
7520 case FIXED_POINT_TYPE:
7521 return native_interpret_fixed (type, ptr, len);
7523 case COMPLEX_TYPE:
7524 return native_interpret_complex (type, ptr, len);
7526 case VECTOR_TYPE:
7527 return native_interpret_vector (type, ptr, len);
7529 default:
7530 return NULL_TREE;
7534 /* Returns true if we can interpret the contents of a native encoding
7535 as TYPE. */
7537 static bool
7538 can_native_interpret_type_p (tree type)
7540 switch (TREE_CODE (type))
7542 case INTEGER_TYPE:
7543 case ENUMERAL_TYPE:
7544 case BOOLEAN_TYPE:
7545 case POINTER_TYPE:
7546 case REFERENCE_TYPE:
7547 case FIXED_POINT_TYPE:
7548 case REAL_TYPE:
7549 case COMPLEX_TYPE:
7550 case VECTOR_TYPE:
7551 return true;
7552 default:
7553 return false;
7557 /* Return true iff a constant of type TYPE is accepted by
7558 native_encode_expr. */
7560 bool
7561 can_native_encode_type_p (tree type)
7563 switch (TREE_CODE (type))
7565 case INTEGER_TYPE:
7566 case REAL_TYPE:
7567 case FIXED_POINT_TYPE:
7568 case COMPLEX_TYPE:
7569 case VECTOR_TYPE:
7570 case POINTER_TYPE:
7571 return true;
7572 default:
7573 return false;
7577 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7578 TYPE at compile-time. If we're unable to perform the conversion
7579 return NULL_TREE. */
7581 static tree
7582 fold_view_convert_expr (tree type, tree expr)
7584 /* We support up to 512-bit values (for V8DFmode). */
7585 unsigned char buffer[64];
7586 int len;
7588 /* Check that the host and target are sane. */
7589 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7590 return NULL_TREE;
7592 len = native_encode_expr (expr, buffer, sizeof (buffer));
7593 if (len == 0)
7594 return NULL_TREE;
7596 return native_interpret_expr (type, buffer, len);
7599 /* Build an expression for the address of T. Folds away INDIRECT_REF
7600 to avoid confusing the gimplify process. */
7602 tree
7603 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7605 /* The size of the object is not relevant when talking about its address. */
7606 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7607 t = TREE_OPERAND (t, 0);
7609 if (TREE_CODE (t) == INDIRECT_REF)
7611 t = TREE_OPERAND (t, 0);
7613 if (TREE_TYPE (t) != ptrtype)
7614 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7616 else if (TREE_CODE (t) == MEM_REF
7617 && integer_zerop (TREE_OPERAND (t, 1)))
7618 return TREE_OPERAND (t, 0);
7619 else if (TREE_CODE (t) == MEM_REF
7620 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7621 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7622 TREE_OPERAND (t, 0),
7623 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7624 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7626 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7628 if (TREE_TYPE (t) != ptrtype)
7629 t = fold_convert_loc (loc, ptrtype, t);
7631 else
7632 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7634 return t;
7637 /* Build an expression for the address of T. */
7639 tree
7640 build_fold_addr_expr_loc (location_t loc, tree t)
7642 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7644 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7647 /* Fold a unary expression of code CODE and type TYPE with operand
7648 OP0. Return the folded expression if folding is successful.
7649 Otherwise, return NULL_TREE. */
7651 tree
7652 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7654 tree tem;
7655 tree arg0;
7656 enum tree_code_class kind = TREE_CODE_CLASS (code);
7658 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7659 && TREE_CODE_LENGTH (code) == 1);
7661 arg0 = op0;
7662 if (arg0)
7664 if (CONVERT_EXPR_CODE_P (code)
7665 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7667 /* Don't use STRIP_NOPS, because signedness of argument type
7668 matters. */
7669 STRIP_SIGN_NOPS (arg0);
7671 else
7673 /* Strip any conversions that don't change the mode. This
7674 is safe for every expression, except for a comparison
7675 expression because its signedness is derived from its
7676 operands.
7678 Note that this is done as an internal manipulation within
7679 the constant folder, in order to find the simplest
7680 representation of the arguments so that their form can be
7681 studied. In any cases, the appropriate type conversions
7682 should be put back in the tree that will get out of the
7683 constant folder. */
7684 STRIP_NOPS (arg0);
7687 if (CONSTANT_CLASS_P (arg0))
7689 tree tem = const_unop (code, type, arg0);
7690 if (tem)
7692 if (TREE_TYPE (tem) != type)
7693 tem = fold_convert_loc (loc, type, tem);
7694 return tem;
7699 tem = generic_simplify (loc, code, type, op0);
7700 if (tem)
7701 return tem;
7703 if (TREE_CODE_CLASS (code) == tcc_unary)
7705 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7706 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7707 fold_build1_loc (loc, code, type,
7708 fold_convert_loc (loc, TREE_TYPE (op0),
7709 TREE_OPERAND (arg0, 1))));
7710 else if (TREE_CODE (arg0) == COND_EXPR)
7712 tree arg01 = TREE_OPERAND (arg0, 1);
7713 tree arg02 = TREE_OPERAND (arg0, 2);
7714 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7715 arg01 = fold_build1_loc (loc, code, type,
7716 fold_convert_loc (loc,
7717 TREE_TYPE (op0), arg01));
7718 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7719 arg02 = fold_build1_loc (loc, code, type,
7720 fold_convert_loc (loc,
7721 TREE_TYPE (op0), arg02));
7722 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7723 arg01, arg02);
7725 /* If this was a conversion, and all we did was to move into
7726 inside the COND_EXPR, bring it back out. But leave it if
7727 it is a conversion from integer to integer and the
7728 result precision is no wider than a word since such a
7729 conversion is cheap and may be optimized away by combine,
7730 while it couldn't if it were outside the COND_EXPR. Then return
7731 so we don't get into an infinite recursion loop taking the
7732 conversion out and then back in. */
7734 if ((CONVERT_EXPR_CODE_P (code)
7735 || code == NON_LVALUE_EXPR)
7736 && TREE_CODE (tem) == COND_EXPR
7737 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7738 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7739 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7740 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7741 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7742 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7743 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7744 && (INTEGRAL_TYPE_P
7745 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7746 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7747 || flag_syntax_only))
7748 tem = build1_loc (loc, code, type,
7749 build3 (COND_EXPR,
7750 TREE_TYPE (TREE_OPERAND
7751 (TREE_OPERAND (tem, 1), 0)),
7752 TREE_OPERAND (tem, 0),
7753 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7754 TREE_OPERAND (TREE_OPERAND (tem, 2),
7755 0)));
7756 return tem;
7760 switch (code)
7762 case NON_LVALUE_EXPR:
7763 if (!maybe_lvalue_p (op0))
7764 return fold_convert_loc (loc, type, op0);
7765 return NULL_TREE;
7767 CASE_CONVERT:
7768 case FLOAT_EXPR:
7769 case FIX_TRUNC_EXPR:
7770 if (COMPARISON_CLASS_P (op0))
7772 /* If we have (type) (a CMP b) and type is an integral type, return
7773 new expression involving the new type. Canonicalize
7774 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7775 non-integral type.
7776 Do not fold the result as that would not simplify further, also
7777 folding again results in recursions. */
7778 if (TREE_CODE (type) == BOOLEAN_TYPE)
7779 return build2_loc (loc, TREE_CODE (op0), type,
7780 TREE_OPERAND (op0, 0),
7781 TREE_OPERAND (op0, 1));
7782 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7783 && TREE_CODE (type) != VECTOR_TYPE)
7784 return build3_loc (loc, COND_EXPR, type, op0,
7785 constant_boolean_node (true, type),
7786 constant_boolean_node (false, type));
7789 /* Handle (T *)&A.B.C for A being of type T and B and C
7790 living at offset zero. This occurs frequently in
7791 C++ upcasting and then accessing the base. */
7792 if (TREE_CODE (op0) == ADDR_EXPR
7793 && POINTER_TYPE_P (type)
7794 && handled_component_p (TREE_OPERAND (op0, 0)))
7796 HOST_WIDE_INT bitsize, bitpos;
7797 tree offset;
7798 machine_mode mode;
7799 int unsignedp, reversep, volatilep;
7800 tree base
7801 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7802 &offset, &mode, &unsignedp, &reversep,
7803 &volatilep);
7804 /* If the reference was to a (constant) zero offset, we can use
7805 the address of the base if it has the same base type
7806 as the result type and the pointer type is unqualified. */
7807 if (! offset && bitpos == 0
7808 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7809 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7810 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7811 return fold_convert_loc (loc, type,
7812 build_fold_addr_expr_loc (loc, base));
7815 if (TREE_CODE (op0) == MODIFY_EXPR
7816 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7817 /* Detect assigning a bitfield. */
7818 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7819 && DECL_BIT_FIELD
7820 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7822 /* Don't leave an assignment inside a conversion
7823 unless assigning a bitfield. */
7824 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7825 /* First do the assignment, then return converted constant. */
7826 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7827 TREE_NO_WARNING (tem) = 1;
7828 TREE_USED (tem) = 1;
7829 return tem;
7832 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7833 constants (if x has signed type, the sign bit cannot be set
7834 in c). This folds extension into the BIT_AND_EXPR.
7835 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7836 very likely don't have maximal range for their precision and this
7837 transformation effectively doesn't preserve non-maximal ranges. */
7838 if (TREE_CODE (type) == INTEGER_TYPE
7839 && TREE_CODE (op0) == BIT_AND_EXPR
7840 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7842 tree and_expr = op0;
7843 tree and0 = TREE_OPERAND (and_expr, 0);
7844 tree and1 = TREE_OPERAND (and_expr, 1);
7845 int change = 0;
7847 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7848 || (TYPE_PRECISION (type)
7849 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7850 change = 1;
7851 else if (TYPE_PRECISION (TREE_TYPE (and1))
7852 <= HOST_BITS_PER_WIDE_INT
7853 && tree_fits_uhwi_p (and1))
7855 unsigned HOST_WIDE_INT cst;
7857 cst = tree_to_uhwi (and1);
7858 cst &= HOST_WIDE_INT_M1U
7859 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7860 change = (cst == 0);
7861 if (change
7862 && !flag_syntax_only
7863 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7864 == ZERO_EXTEND))
7866 tree uns = unsigned_type_for (TREE_TYPE (and0));
7867 and0 = fold_convert_loc (loc, uns, and0);
7868 and1 = fold_convert_loc (loc, uns, and1);
7871 if (change)
7873 tem = force_fit_type (type, wi::to_widest (and1), 0,
7874 TREE_OVERFLOW (and1));
7875 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7876 fold_convert_loc (loc, type, and0), tem);
7880 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7881 cast (T1)X will fold away. We assume that this happens when X itself
7882 is a cast. */
7883 if (POINTER_TYPE_P (type)
7884 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7885 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7887 tree arg00 = TREE_OPERAND (arg0, 0);
7888 tree arg01 = TREE_OPERAND (arg0, 1);
7890 return fold_build_pointer_plus_loc
7891 (loc, fold_convert_loc (loc, type, arg00), arg01);
7894 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7895 of the same precision, and X is an integer type not narrower than
7896 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7897 if (INTEGRAL_TYPE_P (type)
7898 && TREE_CODE (op0) == BIT_NOT_EXPR
7899 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7900 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7901 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7903 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7904 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7905 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7906 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7907 fold_convert_loc (loc, type, tem));
7910 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7911 type of X and Y (integer types only). */
7912 if (INTEGRAL_TYPE_P (type)
7913 && TREE_CODE (op0) == MULT_EXPR
7914 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7915 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7917 /* Be careful not to introduce new overflows. */
7918 tree mult_type;
7919 if (TYPE_OVERFLOW_WRAPS (type))
7920 mult_type = type;
7921 else
7922 mult_type = unsigned_type_for (type);
7924 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7926 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7927 fold_convert_loc (loc, mult_type,
7928 TREE_OPERAND (op0, 0)),
7929 fold_convert_loc (loc, mult_type,
7930 TREE_OPERAND (op0, 1)));
7931 return fold_convert_loc (loc, type, tem);
7935 return NULL_TREE;
7937 case VIEW_CONVERT_EXPR:
7938 if (TREE_CODE (op0) == MEM_REF)
7940 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7941 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7942 tem = fold_build2_loc (loc, MEM_REF, type,
7943 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7944 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7945 return tem;
7948 return NULL_TREE;
7950 case NEGATE_EXPR:
7951 tem = fold_negate_expr (loc, arg0);
7952 if (tem)
7953 return fold_convert_loc (loc, type, tem);
7954 return NULL_TREE;
7956 case ABS_EXPR:
7957 /* Convert fabs((double)float) into (double)fabsf(float). */
7958 if (TREE_CODE (arg0) == NOP_EXPR
7959 && TREE_CODE (type) == REAL_TYPE)
7961 tree targ0 = strip_float_extensions (arg0);
7962 if (targ0 != arg0)
7963 return fold_convert_loc (loc, type,
7964 fold_build1_loc (loc, ABS_EXPR,
7965 TREE_TYPE (targ0),
7966 targ0));
7968 return NULL_TREE;
7970 case BIT_NOT_EXPR:
7971 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7972 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7973 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7974 fold_convert_loc (loc, type,
7975 TREE_OPERAND (arg0, 0)))))
7976 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7977 fold_convert_loc (loc, type,
7978 TREE_OPERAND (arg0, 1)));
7979 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7980 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7981 fold_convert_loc (loc, type,
7982 TREE_OPERAND (arg0, 1)))))
7983 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7984 fold_convert_loc (loc, type,
7985 TREE_OPERAND (arg0, 0)), tem);
7987 return NULL_TREE;
7989 case TRUTH_NOT_EXPR:
7990 /* Note that the operand of this must be an int
7991 and its values must be 0 or 1.
7992 ("true" is a fixed value perhaps depending on the language,
7993 but we don't handle values other than 1 correctly yet.) */
7994 tem = fold_truth_not_expr (loc, arg0);
7995 if (!tem)
7996 return NULL_TREE;
7997 return fold_convert_loc (loc, type, tem);
7999 case INDIRECT_REF:
8000 /* Fold *&X to X if X is an lvalue. */
8001 if (TREE_CODE (op0) == ADDR_EXPR)
8003 tree op00 = TREE_OPERAND (op0, 0);
8004 if ((VAR_P (op00)
8005 || TREE_CODE (op00) == PARM_DECL
8006 || TREE_CODE (op00) == RESULT_DECL)
8007 && !TREE_READONLY (op00))
8008 return op00;
8010 return NULL_TREE;
8012 default:
8013 return NULL_TREE;
8014 } /* switch (code) */
8018 /* If the operation was a conversion do _not_ mark a resulting constant
8019 with TREE_OVERFLOW if the original constant was not. These conversions
8020 have implementation defined behavior and retaining the TREE_OVERFLOW
8021 flag here would confuse later passes such as VRP. */
8022 tree
8023 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8024 tree type, tree op0)
8026 tree res = fold_unary_loc (loc, code, type, op0);
8027 if (res
8028 && TREE_CODE (res) == INTEGER_CST
8029 && TREE_CODE (op0) == INTEGER_CST
8030 && CONVERT_EXPR_CODE_P (code))
8031 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8033 return res;
8036 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8037 operands OP0 and OP1. LOC is the location of the resulting expression.
8038 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8039 Return the folded expression if folding is successful. Otherwise,
8040 return NULL_TREE. */
8041 static tree
8042 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8043 tree arg0, tree arg1, tree op0, tree op1)
8045 tree tem;
8047 /* We only do these simplifications if we are optimizing. */
8048 if (!optimize)
8049 return NULL_TREE;
8051 /* Check for things like (A || B) && (A || C). We can convert this
8052 to A || (B && C). Note that either operator can be any of the four
8053 truth and/or operations and the transformation will still be
8054 valid. Also note that we only care about order for the
8055 ANDIF and ORIF operators. If B contains side effects, this
8056 might change the truth-value of A. */
8057 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8058 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8059 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8060 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8061 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8062 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8064 tree a00 = TREE_OPERAND (arg0, 0);
8065 tree a01 = TREE_OPERAND (arg0, 1);
8066 tree a10 = TREE_OPERAND (arg1, 0);
8067 tree a11 = TREE_OPERAND (arg1, 1);
8068 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8069 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8070 && (code == TRUTH_AND_EXPR
8071 || code == TRUTH_OR_EXPR));
8073 if (operand_equal_p (a00, a10, 0))
8074 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8075 fold_build2_loc (loc, code, type, a01, a11));
8076 else if (commutative && operand_equal_p (a00, a11, 0))
8077 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8078 fold_build2_loc (loc, code, type, a01, a10));
8079 else if (commutative && operand_equal_p (a01, a10, 0))
8080 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8081 fold_build2_loc (loc, code, type, a00, a11));
8083 /* This case if tricky because we must either have commutative
8084 operators or else A10 must not have side-effects. */
8086 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8087 && operand_equal_p (a01, a11, 0))
8088 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8089 fold_build2_loc (loc, code, type, a00, a10),
8090 a01);
8093 /* See if we can build a range comparison. */
8094 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8095 return tem;
8097 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8098 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8100 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8101 if (tem)
8102 return fold_build2_loc (loc, code, type, tem, arg1);
8105 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8106 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8108 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8109 if (tem)
8110 return fold_build2_loc (loc, code, type, arg0, tem);
8113 /* Check for the possibility of merging component references. If our
8114 lhs is another similar operation, try to merge its rhs with our
8115 rhs. Then try to merge our lhs and rhs. */
8116 if (TREE_CODE (arg0) == code
8117 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8118 TREE_OPERAND (arg0, 1), arg1)))
8119 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8121 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8122 return tem;
8124 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8125 && (code == TRUTH_AND_EXPR
8126 || code == TRUTH_ANDIF_EXPR
8127 || code == TRUTH_OR_EXPR
8128 || code == TRUTH_ORIF_EXPR))
8130 enum tree_code ncode, icode;
8132 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8133 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8134 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8136 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8137 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8138 We don't want to pack more than two leafs to a non-IF AND/OR
8139 expression.
8140 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8141 equal to IF-CODE, then we don't want to add right-hand operand.
8142 If the inner right-hand side of left-hand operand has
8143 side-effects, or isn't simple, then we can't add to it,
8144 as otherwise we might destroy if-sequence. */
8145 if (TREE_CODE (arg0) == icode
8146 && simple_operand_p_2 (arg1)
8147 /* Needed for sequence points to handle trappings, and
8148 side-effects. */
8149 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8151 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8152 arg1);
8153 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8154 tem);
8156 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8157 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8158 else if (TREE_CODE (arg1) == icode
8159 && simple_operand_p_2 (arg0)
8160 /* Needed for sequence points to handle trappings, and
8161 side-effects. */
8162 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8164 tem = fold_build2_loc (loc, ncode, type,
8165 arg0, TREE_OPERAND (arg1, 0));
8166 return fold_build2_loc (loc, icode, type, tem,
8167 TREE_OPERAND (arg1, 1));
8169 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8170 into (A OR B).
8171 For sequence point consistancy, we need to check for trapping,
8172 and side-effects. */
8173 else if (code == icode && simple_operand_p_2 (arg0)
8174 && simple_operand_p_2 (arg1))
8175 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8178 return NULL_TREE;
8181 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8182 by changing CODE to reduce the magnitude of constants involved in
8183 ARG0 of the comparison.
8184 Returns a canonicalized comparison tree if a simplification was
8185 possible, otherwise returns NULL_TREE.
8186 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8187 valid if signed overflow is undefined. */
8189 static tree
8190 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8191 tree arg0, tree arg1,
8192 bool *strict_overflow_p)
8194 enum tree_code code0 = TREE_CODE (arg0);
8195 tree t, cst0 = NULL_TREE;
8196 int sgn0;
8198 /* Match A +- CST code arg1. We can change this only if overflow
8199 is undefined. */
8200 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8201 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8202 /* In principle pointers also have undefined overflow behavior,
8203 but that causes problems elsewhere. */
8204 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8205 && (code0 == MINUS_EXPR
8206 || code0 == PLUS_EXPR)
8207 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8208 return NULL_TREE;
8210 /* Identify the constant in arg0 and its sign. */
8211 cst0 = TREE_OPERAND (arg0, 1);
8212 sgn0 = tree_int_cst_sgn (cst0);
8214 /* Overflowed constants and zero will cause problems. */
8215 if (integer_zerop (cst0)
8216 || TREE_OVERFLOW (cst0))
8217 return NULL_TREE;
8219 /* See if we can reduce the magnitude of the constant in
8220 arg0 by changing the comparison code. */
8221 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8222 if (code == LT_EXPR
8223 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8224 code = LE_EXPR;
8225 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8226 else if (code == GT_EXPR
8227 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8228 code = GE_EXPR;
8229 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8230 else if (code == LE_EXPR
8231 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8232 code = LT_EXPR;
8233 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8234 else if (code == GE_EXPR
8235 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8236 code = GT_EXPR;
8237 else
8238 return NULL_TREE;
8239 *strict_overflow_p = true;
8241 /* Now build the constant reduced in magnitude. But not if that
8242 would produce one outside of its types range. */
8243 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8244 && ((sgn0 == 1
8245 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8246 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8247 || (sgn0 == -1
8248 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8249 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8250 return NULL_TREE;
8252 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8253 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8254 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8255 t = fold_convert (TREE_TYPE (arg1), t);
8257 return fold_build2_loc (loc, code, type, t, arg1);
8260 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8261 overflow further. Try to decrease the magnitude of constants involved
8262 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8263 and put sole constants at the second argument position.
8264 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8266 static tree
8267 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8268 tree arg0, tree arg1)
8270 tree t;
8271 bool strict_overflow_p;
8272 const char * const warnmsg = G_("assuming signed overflow does not occur "
8273 "when reducing constant in comparison");
8275 /* Try canonicalization by simplifying arg0. */
8276 strict_overflow_p = false;
8277 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8278 &strict_overflow_p);
8279 if (t)
8281 if (strict_overflow_p)
8282 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8283 return t;
8286 /* Try canonicalization by simplifying arg1 using the swapped
8287 comparison. */
8288 code = swap_tree_comparison (code);
8289 strict_overflow_p = false;
8290 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8291 &strict_overflow_p);
8292 if (t && strict_overflow_p)
8293 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8294 return t;
8297 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8298 space. This is used to avoid issuing overflow warnings for
8299 expressions like &p->x which can not wrap. */
8301 static bool
8302 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8304 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8305 return true;
8307 if (bitpos < 0)
8308 return true;
8310 wide_int wi_offset;
8311 int precision = TYPE_PRECISION (TREE_TYPE (base));
8312 if (offset == NULL_TREE)
8313 wi_offset = wi::zero (precision);
8314 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8315 return true;
8316 else
8317 wi_offset = offset;
8319 bool overflow;
8320 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8321 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8322 if (overflow)
8323 return true;
8325 if (!wi::fits_uhwi_p (total))
8326 return true;
8328 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8329 if (size <= 0)
8330 return true;
8332 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8333 array. */
8334 if (TREE_CODE (base) == ADDR_EXPR)
8336 HOST_WIDE_INT base_size;
8338 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8339 if (base_size > 0 && size < base_size)
8340 size = base_size;
8343 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8346 /* Return a positive integer when the symbol DECL is known to have
8347 a nonzero address, zero when it's known not to (e.g., it's a weak
8348 symbol), and a negative integer when the symbol is not yet in the
8349 symbol table and so whether or not its address is zero is unknown.
8350 For function local objects always return positive integer. */
8351 static int
8352 maybe_nonzero_address (tree decl)
8354 if (DECL_P (decl) && decl_in_symtab_p (decl))
8355 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8356 return symbol->nonzero_address ();
8358 /* Function local objects are never NULL. */
8359 if (DECL_P (decl)
8360 && (DECL_CONTEXT (decl)
8361 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8362 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8363 return 1;
8365 return -1;
8368 /* Subroutine of fold_binary. This routine performs all of the
8369 transformations that are common to the equality/inequality
8370 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8371 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8372 fold_binary should call fold_binary. Fold a comparison with
8373 tree code CODE and type TYPE with operands OP0 and OP1. Return
8374 the folded comparison or NULL_TREE. */
8376 static tree
8377 fold_comparison (location_t loc, enum tree_code code, tree type,
8378 tree op0, tree op1)
8380 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8381 tree arg0, arg1, tem;
8383 arg0 = op0;
8384 arg1 = op1;
8386 STRIP_SIGN_NOPS (arg0);
8387 STRIP_SIGN_NOPS (arg1);
8389 /* For comparisons of pointers we can decompose it to a compile time
8390 comparison of the base objects and the offsets into the object.
8391 This requires at least one operand being an ADDR_EXPR or a
8392 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8393 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8394 && (TREE_CODE (arg0) == ADDR_EXPR
8395 || TREE_CODE (arg1) == ADDR_EXPR
8396 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8397 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8399 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8400 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8401 machine_mode mode;
8402 int volatilep, reversep, unsignedp;
8403 bool indirect_base0 = false, indirect_base1 = false;
8405 /* Get base and offset for the access. Strip ADDR_EXPR for
8406 get_inner_reference, but put it back by stripping INDIRECT_REF
8407 off the base object if possible. indirect_baseN will be true
8408 if baseN is not an address but refers to the object itself. */
8409 base0 = arg0;
8410 if (TREE_CODE (arg0) == ADDR_EXPR)
8412 base0
8413 = get_inner_reference (TREE_OPERAND (arg0, 0),
8414 &bitsize, &bitpos0, &offset0, &mode,
8415 &unsignedp, &reversep, &volatilep);
8416 if (TREE_CODE (base0) == INDIRECT_REF)
8417 base0 = TREE_OPERAND (base0, 0);
8418 else
8419 indirect_base0 = true;
8421 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8423 base0 = TREE_OPERAND (arg0, 0);
8424 STRIP_SIGN_NOPS (base0);
8425 if (TREE_CODE (base0) == ADDR_EXPR)
8427 base0
8428 = get_inner_reference (TREE_OPERAND (base0, 0),
8429 &bitsize, &bitpos0, &offset0, &mode,
8430 &unsignedp, &reversep, &volatilep);
8431 if (TREE_CODE (base0) == INDIRECT_REF)
8432 base0 = TREE_OPERAND (base0, 0);
8433 else
8434 indirect_base0 = true;
8436 if (offset0 == NULL_TREE || integer_zerop (offset0))
8437 offset0 = TREE_OPERAND (arg0, 1);
8438 else
8439 offset0 = size_binop (PLUS_EXPR, offset0,
8440 TREE_OPERAND (arg0, 1));
8441 if (TREE_CODE (offset0) == INTEGER_CST)
8443 offset_int tem = wi::sext (wi::to_offset (offset0),
8444 TYPE_PRECISION (sizetype));
8445 tem <<= LOG2_BITS_PER_UNIT;
8446 tem += bitpos0;
8447 if (wi::fits_shwi_p (tem))
8449 bitpos0 = tem.to_shwi ();
8450 offset0 = NULL_TREE;
8455 base1 = arg1;
8456 if (TREE_CODE (arg1) == ADDR_EXPR)
8458 base1
8459 = get_inner_reference (TREE_OPERAND (arg1, 0),
8460 &bitsize, &bitpos1, &offset1, &mode,
8461 &unsignedp, &reversep, &volatilep);
8462 if (TREE_CODE (base1) == INDIRECT_REF)
8463 base1 = TREE_OPERAND (base1, 0);
8464 else
8465 indirect_base1 = true;
8467 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8469 base1 = TREE_OPERAND (arg1, 0);
8470 STRIP_SIGN_NOPS (base1);
8471 if (TREE_CODE (base1) == ADDR_EXPR)
8473 base1
8474 = get_inner_reference (TREE_OPERAND (base1, 0),
8475 &bitsize, &bitpos1, &offset1, &mode,
8476 &unsignedp, &reversep, &volatilep);
8477 if (TREE_CODE (base1) == INDIRECT_REF)
8478 base1 = TREE_OPERAND (base1, 0);
8479 else
8480 indirect_base1 = true;
8482 if (offset1 == NULL_TREE || integer_zerop (offset1))
8483 offset1 = TREE_OPERAND (arg1, 1);
8484 else
8485 offset1 = size_binop (PLUS_EXPR, offset1,
8486 TREE_OPERAND (arg1, 1));
8487 if (TREE_CODE (offset1) == INTEGER_CST)
8489 offset_int tem = wi::sext (wi::to_offset (offset1),
8490 TYPE_PRECISION (sizetype));
8491 tem <<= LOG2_BITS_PER_UNIT;
8492 tem += bitpos1;
8493 if (wi::fits_shwi_p (tem))
8495 bitpos1 = tem.to_shwi ();
8496 offset1 = NULL_TREE;
8501 /* If we have equivalent bases we might be able to simplify. */
8502 if (indirect_base0 == indirect_base1
8503 && operand_equal_p (base0, base1,
8504 indirect_base0 ? OEP_ADDRESS_OF : 0))
8506 /* We can fold this expression to a constant if the non-constant
8507 offset parts are equal. */
8508 if ((offset0 == offset1
8509 || (offset0 && offset1
8510 && operand_equal_p (offset0, offset1, 0)))
8511 && (equality_code
8512 || (indirect_base0
8513 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8514 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8517 if (!equality_code
8518 && bitpos0 != bitpos1
8519 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8520 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8521 fold_overflow_warning (("assuming pointer wraparound does not "
8522 "occur when comparing P +- C1 with "
8523 "P +- C2"),
8524 WARN_STRICT_OVERFLOW_CONDITIONAL);
8526 switch (code)
8528 case EQ_EXPR:
8529 return constant_boolean_node (bitpos0 == bitpos1, type);
8530 case NE_EXPR:
8531 return constant_boolean_node (bitpos0 != bitpos1, type);
8532 case LT_EXPR:
8533 return constant_boolean_node (bitpos0 < bitpos1, type);
8534 case LE_EXPR:
8535 return constant_boolean_node (bitpos0 <= bitpos1, type);
8536 case GE_EXPR:
8537 return constant_boolean_node (bitpos0 >= bitpos1, type);
8538 case GT_EXPR:
8539 return constant_boolean_node (bitpos0 > bitpos1, type);
8540 default:;
8543 /* We can simplify the comparison to a comparison of the variable
8544 offset parts if the constant offset parts are equal.
8545 Be careful to use signed sizetype here because otherwise we
8546 mess with array offsets in the wrong way. This is possible
8547 because pointer arithmetic is restricted to retain within an
8548 object and overflow on pointer differences is undefined as of
8549 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8550 else if (bitpos0 == bitpos1
8551 && (equality_code
8552 || (indirect_base0
8553 && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8554 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8556 /* By converting to signed sizetype we cover middle-end pointer
8557 arithmetic which operates on unsigned pointer types of size
8558 type size and ARRAY_REF offsets which are properly sign or
8559 zero extended from their type in case it is narrower than
8560 sizetype. */
8561 if (offset0 == NULL_TREE)
8562 offset0 = build_int_cst (ssizetype, 0);
8563 else
8564 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8565 if (offset1 == NULL_TREE)
8566 offset1 = build_int_cst (ssizetype, 0);
8567 else
8568 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8570 if (!equality_code
8571 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8572 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8573 fold_overflow_warning (("assuming pointer wraparound does not "
8574 "occur when comparing P +- C1 with "
8575 "P +- C2"),
8576 WARN_STRICT_OVERFLOW_COMPARISON);
8578 return fold_build2_loc (loc, code, type, offset0, offset1);
8581 /* For equal offsets we can simplify to a comparison of the
8582 base addresses. */
8583 else if (bitpos0 == bitpos1
8584 && (indirect_base0
8585 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8586 && (indirect_base1
8587 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8588 && ((offset0 == offset1)
8589 || (offset0 && offset1
8590 && operand_equal_p (offset0, offset1, 0))))
8592 if (indirect_base0)
8593 base0 = build_fold_addr_expr_loc (loc, base0);
8594 if (indirect_base1)
8595 base1 = build_fold_addr_expr_loc (loc, base1);
8596 return fold_build2_loc (loc, code, type, base0, base1);
8598 /* Comparison between an ordinary (non-weak) symbol and a null
8599 pointer can be eliminated since such symbols must have a non
8600 null address. In C, relational expressions between pointers
8601 to objects and null pointers are undefined. The results
8602 below follow the C++ rules with the additional property that
8603 every object pointer compares greater than a null pointer.
8605 else if (((DECL_P (base0)
8606 && maybe_nonzero_address (base0) > 0
8607 /* Avoid folding references to struct members at offset 0 to
8608 prevent tests like '&ptr->firstmember == 0' from getting
8609 eliminated. When ptr is null, although the -> expression
8610 is strictly speaking invalid, GCC retains it as a matter
8611 of QoI. See PR c/44555. */
8612 && (offset0 == NULL_TREE && bitpos0 != 0))
8613 || CONSTANT_CLASS_P (base0))
8614 && indirect_base0
8615 /* The caller guarantees that when one of the arguments is
8616 constant (i.e., null in this case) it is second. */
8617 && integer_zerop (arg1))
8619 switch (code)
8621 case EQ_EXPR:
8622 case LE_EXPR:
8623 case LT_EXPR:
8624 return constant_boolean_node (false, type);
8625 case GE_EXPR:
8626 case GT_EXPR:
8627 case NE_EXPR:
8628 return constant_boolean_node (true, type);
8629 default:
8630 gcc_unreachable ();
8635 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8636 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8637 the resulting offset is smaller in absolute value than the
8638 original one and has the same sign. */
8639 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8640 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8641 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8642 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8643 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8644 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8645 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8646 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8648 tree const1 = TREE_OPERAND (arg0, 1);
8649 tree const2 = TREE_OPERAND (arg1, 1);
8650 tree variable1 = TREE_OPERAND (arg0, 0);
8651 tree variable2 = TREE_OPERAND (arg1, 0);
8652 tree cst;
8653 const char * const warnmsg = G_("assuming signed overflow does not "
8654 "occur when combining constants around "
8655 "a comparison");
8657 /* Put the constant on the side where it doesn't overflow and is
8658 of lower absolute value and of same sign than before. */
8659 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8660 ? MINUS_EXPR : PLUS_EXPR,
8661 const2, const1);
8662 if (!TREE_OVERFLOW (cst)
8663 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8664 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8666 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8667 return fold_build2_loc (loc, code, type,
8668 variable1,
8669 fold_build2_loc (loc, TREE_CODE (arg1),
8670 TREE_TYPE (arg1),
8671 variable2, cst));
8674 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8675 ? MINUS_EXPR : PLUS_EXPR,
8676 const1, const2);
8677 if (!TREE_OVERFLOW (cst)
8678 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8679 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8681 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8682 return fold_build2_loc (loc, code, type,
8683 fold_build2_loc (loc, TREE_CODE (arg0),
8684 TREE_TYPE (arg0),
8685 variable1, cst),
8686 variable2);
8690 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8691 if (tem)
8692 return tem;
8694 /* If we are comparing an expression that just has comparisons
8695 of two integer values, arithmetic expressions of those comparisons,
8696 and constants, we can simplify it. There are only three cases
8697 to check: the two values can either be equal, the first can be
8698 greater, or the second can be greater. Fold the expression for
8699 those three values. Since each value must be 0 or 1, we have
8700 eight possibilities, each of which corresponds to the constant 0
8701 or 1 or one of the six possible comparisons.
8703 This handles common cases like (a > b) == 0 but also handles
8704 expressions like ((x > y) - (y > x)) > 0, which supposedly
8705 occur in macroized code. */
8707 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8709 tree cval1 = 0, cval2 = 0;
8710 int save_p = 0;
8712 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8713 /* Don't handle degenerate cases here; they should already
8714 have been handled anyway. */
8715 && cval1 != 0 && cval2 != 0
8716 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8717 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8718 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8719 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8720 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8721 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8722 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8724 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8725 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8727 /* We can't just pass T to eval_subst in case cval1 or cval2
8728 was the same as ARG1. */
8730 tree high_result
8731 = fold_build2_loc (loc, code, type,
8732 eval_subst (loc, arg0, cval1, maxval,
8733 cval2, minval),
8734 arg1);
8735 tree equal_result
8736 = fold_build2_loc (loc, code, type,
8737 eval_subst (loc, arg0, cval1, maxval,
8738 cval2, maxval),
8739 arg1);
8740 tree low_result
8741 = fold_build2_loc (loc, code, type,
8742 eval_subst (loc, arg0, cval1, minval,
8743 cval2, maxval),
8744 arg1);
8746 /* All three of these results should be 0 or 1. Confirm they are.
8747 Then use those values to select the proper code to use. */
8749 if (TREE_CODE (high_result) == INTEGER_CST
8750 && TREE_CODE (equal_result) == INTEGER_CST
8751 && TREE_CODE (low_result) == INTEGER_CST)
8753 /* Make a 3-bit mask with the high-order bit being the
8754 value for `>', the next for '=', and the low for '<'. */
8755 switch ((integer_onep (high_result) * 4)
8756 + (integer_onep (equal_result) * 2)
8757 + integer_onep (low_result))
8759 case 0:
8760 /* Always false. */
8761 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8762 case 1:
8763 code = LT_EXPR;
8764 break;
8765 case 2:
8766 code = EQ_EXPR;
8767 break;
8768 case 3:
8769 code = LE_EXPR;
8770 break;
8771 case 4:
8772 code = GT_EXPR;
8773 break;
8774 case 5:
8775 code = NE_EXPR;
8776 break;
8777 case 6:
8778 code = GE_EXPR;
8779 break;
8780 case 7:
8781 /* Always true. */
8782 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8785 if (save_p)
8787 tem = save_expr (build2 (code, type, cval1, cval2));
8788 protected_set_expr_location (tem, loc);
8789 return tem;
8791 return fold_build2_loc (loc, code, type, cval1, cval2);
8796 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8797 into a single range test. */
8798 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8799 && TREE_CODE (arg1) == INTEGER_CST
8800 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8801 && !integer_zerop (TREE_OPERAND (arg0, 1))
8802 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8803 && !TREE_OVERFLOW (arg1))
8805 tem = fold_div_compare (loc, code, type, arg0, arg1);
8806 if (tem != NULL_TREE)
8807 return tem;
8810 return NULL_TREE;
8814 /* Subroutine of fold_binary. Optimize complex multiplications of the
8815 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8816 argument EXPR represents the expression "z" of type TYPE. */
8818 static tree
8819 fold_mult_zconjz (location_t loc, tree type, tree expr)
8821 tree itype = TREE_TYPE (type);
8822 tree rpart, ipart, tem;
8824 if (TREE_CODE (expr) == COMPLEX_EXPR)
8826 rpart = TREE_OPERAND (expr, 0);
8827 ipart = TREE_OPERAND (expr, 1);
8829 else if (TREE_CODE (expr) == COMPLEX_CST)
8831 rpart = TREE_REALPART (expr);
8832 ipart = TREE_IMAGPART (expr);
8834 else
8836 expr = save_expr (expr);
8837 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8838 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8841 rpart = save_expr (rpart);
8842 ipart = save_expr (ipart);
8843 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8844 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8845 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8846 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8847 build_zero_cst (itype));
8851 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8852 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8854 static bool
8855 vec_cst_ctor_to_array (tree arg, tree *elts)
8857 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8859 if (TREE_CODE (arg) == VECTOR_CST)
8861 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8862 elts[i] = VECTOR_CST_ELT (arg, i);
8864 else if (TREE_CODE (arg) == CONSTRUCTOR)
8866 constructor_elt *elt;
8868 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8869 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8870 return false;
8871 else
8872 elts[i] = elt->value;
8874 else
8875 return false;
8876 for (; i < nelts; i++)
8877 elts[i]
8878 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8879 return true;
8882 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8883 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8884 NULL_TREE otherwise. */
8886 static tree
8887 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8889 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8890 tree *elts;
8891 bool need_ctor = false;
8893 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8894 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8895 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8896 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8897 return NULL_TREE;
8899 elts = XALLOCAVEC (tree, nelts * 3);
8900 if (!vec_cst_ctor_to_array (arg0, elts)
8901 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8902 return NULL_TREE;
8904 for (i = 0; i < nelts; i++)
8906 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8907 need_ctor = true;
8908 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8911 if (need_ctor)
8913 vec<constructor_elt, va_gc> *v;
8914 vec_alloc (v, nelts);
8915 for (i = 0; i < nelts; i++)
8916 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8917 return build_constructor (type, v);
8919 else
8920 return build_vector (type, &elts[2 * nelts]);
8923 /* Try to fold a pointer difference of type TYPE two address expressions of
8924 array references AREF0 and AREF1 using location LOC. Return a
8925 simplified expression for the difference or NULL_TREE. */
8927 static tree
8928 fold_addr_of_array_ref_difference (location_t loc, tree type,
8929 tree aref0, tree aref1)
8931 tree base0 = TREE_OPERAND (aref0, 0);
8932 tree base1 = TREE_OPERAND (aref1, 0);
8933 tree base_offset = build_int_cst (type, 0);
8935 /* If the bases are array references as well, recurse. If the bases
8936 are pointer indirections compute the difference of the pointers.
8937 If the bases are equal, we are set. */
8938 if ((TREE_CODE (base0) == ARRAY_REF
8939 && TREE_CODE (base1) == ARRAY_REF
8940 && (base_offset
8941 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8942 || (INDIRECT_REF_P (base0)
8943 && INDIRECT_REF_P (base1)
8944 && (base_offset
8945 = fold_binary_loc (loc, MINUS_EXPR, type,
8946 fold_convert (type, TREE_OPERAND (base0, 0)),
8947 fold_convert (type,
8948 TREE_OPERAND (base1, 0)))))
8949 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8951 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8952 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8953 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8954 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8955 return fold_build2_loc (loc, PLUS_EXPR, type,
8956 base_offset,
8957 fold_build2_loc (loc, MULT_EXPR, type,
8958 diff, esz));
8960 return NULL_TREE;
8963 /* If the real or vector real constant CST of type TYPE has an exact
8964 inverse, return it, else return NULL. */
8966 tree
8967 exact_inverse (tree type, tree cst)
8969 REAL_VALUE_TYPE r;
8970 tree unit_type, *elts;
8971 machine_mode mode;
8972 unsigned vec_nelts, i;
8974 switch (TREE_CODE (cst))
8976 case REAL_CST:
8977 r = TREE_REAL_CST (cst);
8979 if (exact_real_inverse (TYPE_MODE (type), &r))
8980 return build_real (type, r);
8982 return NULL_TREE;
8984 case VECTOR_CST:
8985 vec_nelts = VECTOR_CST_NELTS (cst);
8986 elts = XALLOCAVEC (tree, vec_nelts);
8987 unit_type = TREE_TYPE (type);
8988 mode = TYPE_MODE (unit_type);
8990 for (i = 0; i < vec_nelts; i++)
8992 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8993 if (!exact_real_inverse (mode, &r))
8994 return NULL_TREE;
8995 elts[i] = build_real (unit_type, r);
8998 return build_vector (type, elts);
9000 default:
9001 return NULL_TREE;
9005 /* Mask out the tz least significant bits of X of type TYPE where
9006 tz is the number of trailing zeroes in Y. */
9007 static wide_int
9008 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9010 int tz = wi::ctz (y);
9011 if (tz > 0)
9012 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9013 return x;
9016 /* Return true when T is an address and is known to be nonzero.
9017 For floating point we further ensure that T is not denormal.
9018 Similar logic is present in nonzero_address in rtlanal.h.
9020 If the return value is based on the assumption that signed overflow
9021 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9022 change *STRICT_OVERFLOW_P. */
9024 static bool
9025 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9027 tree type = TREE_TYPE (t);
9028 enum tree_code code;
9030 /* Doing something useful for floating point would need more work. */
9031 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9032 return false;
9034 code = TREE_CODE (t);
9035 switch (TREE_CODE_CLASS (code))
9037 case tcc_unary:
9038 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9039 strict_overflow_p);
9040 case tcc_binary:
9041 case tcc_comparison:
9042 return tree_binary_nonzero_warnv_p (code, type,
9043 TREE_OPERAND (t, 0),
9044 TREE_OPERAND (t, 1),
9045 strict_overflow_p);
9046 case tcc_constant:
9047 case tcc_declaration:
9048 case tcc_reference:
9049 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9051 default:
9052 break;
9055 switch (code)
9057 case TRUTH_NOT_EXPR:
9058 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9059 strict_overflow_p);
9061 case TRUTH_AND_EXPR:
9062 case TRUTH_OR_EXPR:
9063 case TRUTH_XOR_EXPR:
9064 return tree_binary_nonzero_warnv_p (code, type,
9065 TREE_OPERAND (t, 0),
9066 TREE_OPERAND (t, 1),
9067 strict_overflow_p);
9069 case COND_EXPR:
9070 case CONSTRUCTOR:
9071 case OBJ_TYPE_REF:
9072 case ASSERT_EXPR:
9073 case ADDR_EXPR:
9074 case WITH_SIZE_EXPR:
9075 case SSA_NAME:
9076 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9078 case COMPOUND_EXPR:
9079 case MODIFY_EXPR:
9080 case BIND_EXPR:
9081 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9082 strict_overflow_p);
9084 case SAVE_EXPR:
9085 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9086 strict_overflow_p);
9088 case CALL_EXPR:
9090 tree fndecl = get_callee_fndecl (t);
9091 if (!fndecl) return false;
9092 if (flag_delete_null_pointer_checks && !flag_check_new
9093 && DECL_IS_OPERATOR_NEW (fndecl)
9094 && !TREE_NOTHROW (fndecl))
9095 return true;
9096 if (flag_delete_null_pointer_checks
9097 && lookup_attribute ("returns_nonnull",
9098 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9099 return true;
9100 return alloca_call_p (t);
9103 default:
9104 break;
9106 return false;
9109 /* Return true when T is an address and is known to be nonzero.
9110 Handle warnings about undefined signed overflow. */
9112 bool
9113 tree_expr_nonzero_p (tree t)
9115 bool ret, strict_overflow_p;
9117 strict_overflow_p = false;
9118 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9119 if (strict_overflow_p)
9120 fold_overflow_warning (("assuming signed overflow does not occur when "
9121 "determining that expression is always "
9122 "non-zero"),
9123 WARN_STRICT_OVERFLOW_MISC);
9124 return ret;
9127 /* Return true if T is known not to be equal to an integer W. */
9129 bool
9130 expr_not_equal_to (tree t, const wide_int &w)
9132 wide_int min, max, nz;
9133 value_range_type rtype;
9134 switch (TREE_CODE (t))
9136 case INTEGER_CST:
9137 return wi::ne_p (t, w);
9139 case SSA_NAME:
9140 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9141 return false;
9142 rtype = get_range_info (t, &min, &max);
9143 if (rtype == VR_RANGE)
9145 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9146 return true;
9147 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9148 return true;
9150 else if (rtype == VR_ANTI_RANGE
9151 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9152 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9153 return true;
9154 /* If T has some known zero bits and W has any of those bits set,
9155 then T is known not to be equal to W. */
9156 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9157 TYPE_PRECISION (TREE_TYPE (t))), 0))
9158 return true;
9159 return false;
9161 default:
9162 return false;
9166 /* Fold a binary expression of code CODE and type TYPE with operands
9167 OP0 and OP1. LOC is the location of the resulting expression.
9168 Return the folded expression if folding is successful. Otherwise,
9169 return NULL_TREE. */
9171 tree
9172 fold_binary_loc (location_t loc,
9173 enum tree_code code, tree type, tree op0, tree op1)
9175 enum tree_code_class kind = TREE_CODE_CLASS (code);
9176 tree arg0, arg1, tem;
9177 tree t1 = NULL_TREE;
9178 bool strict_overflow_p;
9179 unsigned int prec;
9181 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9182 && TREE_CODE_LENGTH (code) == 2
9183 && op0 != NULL_TREE
9184 && op1 != NULL_TREE);
9186 arg0 = op0;
9187 arg1 = op1;
9189 /* Strip any conversions that don't change the mode. This is
9190 safe for every expression, except for a comparison expression
9191 because its signedness is derived from its operands. So, in
9192 the latter case, only strip conversions that don't change the
9193 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9194 preserved.
9196 Note that this is done as an internal manipulation within the
9197 constant folder, in order to find the simplest representation
9198 of the arguments so that their form can be studied. In any
9199 cases, the appropriate type conversions should be put back in
9200 the tree that will get out of the constant folder. */
9202 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9204 STRIP_SIGN_NOPS (arg0);
9205 STRIP_SIGN_NOPS (arg1);
9207 else
9209 STRIP_NOPS (arg0);
9210 STRIP_NOPS (arg1);
9213 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9214 constant but we can't do arithmetic on them. */
9215 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9217 tem = const_binop (code, type, arg0, arg1);
9218 if (tem != NULL_TREE)
9220 if (TREE_TYPE (tem) != type)
9221 tem = fold_convert_loc (loc, type, tem);
9222 return tem;
9226 /* If this is a commutative operation, and ARG0 is a constant, move it
9227 to ARG1 to reduce the number of tests below. */
9228 if (commutative_tree_code (code)
9229 && tree_swap_operands_p (arg0, arg1))
9230 return fold_build2_loc (loc, code, type, op1, op0);
9232 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9233 to ARG1 to reduce the number of tests below. */
9234 if (kind == tcc_comparison
9235 && tree_swap_operands_p (arg0, arg1))
9236 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9238 tem = generic_simplify (loc, code, type, op0, op1);
9239 if (tem)
9240 return tem;
9242 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9244 First check for cases where an arithmetic operation is applied to a
9245 compound, conditional, or comparison operation. Push the arithmetic
9246 operation inside the compound or conditional to see if any folding
9247 can then be done. Convert comparison to conditional for this purpose.
9248 The also optimizes non-constant cases that used to be done in
9249 expand_expr.
9251 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9252 one of the operands is a comparison and the other is a comparison, a
9253 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9254 code below would make the expression more complex. Change it to a
9255 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9256 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9258 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9259 || code == EQ_EXPR || code == NE_EXPR)
9260 && TREE_CODE (type) != VECTOR_TYPE
9261 && ((truth_value_p (TREE_CODE (arg0))
9262 && (truth_value_p (TREE_CODE (arg1))
9263 || (TREE_CODE (arg1) == BIT_AND_EXPR
9264 && integer_onep (TREE_OPERAND (arg1, 1)))))
9265 || (truth_value_p (TREE_CODE (arg1))
9266 && (truth_value_p (TREE_CODE (arg0))
9267 || (TREE_CODE (arg0) == BIT_AND_EXPR
9268 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9270 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9271 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9272 : TRUTH_XOR_EXPR,
9273 boolean_type_node,
9274 fold_convert_loc (loc, boolean_type_node, arg0),
9275 fold_convert_loc (loc, boolean_type_node, arg1));
9277 if (code == EQ_EXPR)
9278 tem = invert_truthvalue_loc (loc, tem);
9280 return fold_convert_loc (loc, type, tem);
9283 if (TREE_CODE_CLASS (code) == tcc_binary
9284 || TREE_CODE_CLASS (code) == tcc_comparison)
9286 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9288 tem = fold_build2_loc (loc, code, type,
9289 fold_convert_loc (loc, TREE_TYPE (op0),
9290 TREE_OPERAND (arg0, 1)), op1);
9291 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9292 tem);
9294 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9296 tem = fold_build2_loc (loc, code, type, op0,
9297 fold_convert_loc (loc, TREE_TYPE (op1),
9298 TREE_OPERAND (arg1, 1)));
9299 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9300 tem);
9303 if (TREE_CODE (arg0) == COND_EXPR
9304 || TREE_CODE (arg0) == VEC_COND_EXPR
9305 || COMPARISON_CLASS_P (arg0))
9307 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9308 arg0, arg1,
9309 /*cond_first_p=*/1);
9310 if (tem != NULL_TREE)
9311 return tem;
9314 if (TREE_CODE (arg1) == COND_EXPR
9315 || TREE_CODE (arg1) == VEC_COND_EXPR
9316 || COMPARISON_CLASS_P (arg1))
9318 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9319 arg1, arg0,
9320 /*cond_first_p=*/0);
9321 if (tem != NULL_TREE)
9322 return tem;
9326 switch (code)
9328 case MEM_REF:
9329 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9330 if (TREE_CODE (arg0) == ADDR_EXPR
9331 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9333 tree iref = TREE_OPERAND (arg0, 0);
9334 return fold_build2 (MEM_REF, type,
9335 TREE_OPERAND (iref, 0),
9336 int_const_binop (PLUS_EXPR, arg1,
9337 TREE_OPERAND (iref, 1)));
9340 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9341 if (TREE_CODE (arg0) == ADDR_EXPR
9342 && handled_component_p (TREE_OPERAND (arg0, 0)))
9344 tree base;
9345 HOST_WIDE_INT coffset;
9346 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9347 &coffset);
9348 if (!base)
9349 return NULL_TREE;
9350 return fold_build2 (MEM_REF, type,
9351 build_fold_addr_expr (base),
9352 int_const_binop (PLUS_EXPR, arg1,
9353 size_int (coffset)));
9356 return NULL_TREE;
9358 case POINTER_PLUS_EXPR:
9359 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9360 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9361 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9362 return fold_convert_loc (loc, type,
9363 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9364 fold_convert_loc (loc, sizetype,
9365 arg1),
9366 fold_convert_loc (loc, sizetype,
9367 arg0)));
9369 return NULL_TREE;
9371 case PLUS_EXPR:
9372 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9374 /* X + (X / CST) * -CST is X % CST. */
9375 if (TREE_CODE (arg1) == MULT_EXPR
9376 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9377 && operand_equal_p (arg0,
9378 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9380 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9381 tree cst1 = TREE_OPERAND (arg1, 1);
9382 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9383 cst1, cst0);
9384 if (sum && integer_zerop (sum))
9385 return fold_convert_loc (loc, type,
9386 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9387 TREE_TYPE (arg0), arg0,
9388 cst0));
9392 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9393 one. Make sure the type is not saturating and has the signedness of
9394 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9395 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9396 if ((TREE_CODE (arg0) == MULT_EXPR
9397 || TREE_CODE (arg1) == MULT_EXPR)
9398 && !TYPE_SATURATING (type)
9399 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9400 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9401 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9403 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9404 if (tem)
9405 return tem;
9408 if (! FLOAT_TYPE_P (type))
9410 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9411 (plus (plus (mult) (mult)) (foo)) so that we can
9412 take advantage of the factoring cases below. */
9413 if (ANY_INTEGRAL_TYPE_P (type)
9414 && TYPE_OVERFLOW_WRAPS (type)
9415 && (((TREE_CODE (arg0) == PLUS_EXPR
9416 || TREE_CODE (arg0) == MINUS_EXPR)
9417 && TREE_CODE (arg1) == MULT_EXPR)
9418 || ((TREE_CODE (arg1) == PLUS_EXPR
9419 || TREE_CODE (arg1) == MINUS_EXPR)
9420 && TREE_CODE (arg0) == MULT_EXPR)))
9422 tree parg0, parg1, parg, marg;
9423 enum tree_code pcode;
9425 if (TREE_CODE (arg1) == MULT_EXPR)
9426 parg = arg0, marg = arg1;
9427 else
9428 parg = arg1, marg = arg0;
9429 pcode = TREE_CODE (parg);
9430 parg0 = TREE_OPERAND (parg, 0);
9431 parg1 = TREE_OPERAND (parg, 1);
9432 STRIP_NOPS (parg0);
9433 STRIP_NOPS (parg1);
9435 if (TREE_CODE (parg0) == MULT_EXPR
9436 && TREE_CODE (parg1) != MULT_EXPR)
9437 return fold_build2_loc (loc, pcode, type,
9438 fold_build2_loc (loc, PLUS_EXPR, type,
9439 fold_convert_loc (loc, type,
9440 parg0),
9441 fold_convert_loc (loc, type,
9442 marg)),
9443 fold_convert_loc (loc, type, parg1));
9444 if (TREE_CODE (parg0) != MULT_EXPR
9445 && TREE_CODE (parg1) == MULT_EXPR)
9446 return
9447 fold_build2_loc (loc, PLUS_EXPR, type,
9448 fold_convert_loc (loc, type, parg0),
9449 fold_build2_loc (loc, pcode, type,
9450 fold_convert_loc (loc, type, marg),
9451 fold_convert_loc (loc, type,
9452 parg1)));
9455 else
9457 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9458 to __complex__ ( x, y ). This is not the same for SNaNs or
9459 if signed zeros are involved. */
9460 if (!HONOR_SNANS (element_mode (arg0))
9461 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9462 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9464 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9465 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9466 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9467 bool arg0rz = false, arg0iz = false;
9468 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9469 || (arg0i && (arg0iz = real_zerop (arg0i))))
9471 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9472 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9473 if (arg0rz && arg1i && real_zerop (arg1i))
9475 tree rp = arg1r ? arg1r
9476 : build1 (REALPART_EXPR, rtype, arg1);
9477 tree ip = arg0i ? arg0i
9478 : build1 (IMAGPART_EXPR, rtype, arg0);
9479 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9481 else if (arg0iz && arg1r && real_zerop (arg1r))
9483 tree rp = arg0r ? arg0r
9484 : build1 (REALPART_EXPR, rtype, arg0);
9485 tree ip = arg1i ? arg1i
9486 : build1 (IMAGPART_EXPR, rtype, arg1);
9487 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9492 if (flag_unsafe_math_optimizations
9493 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9494 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9495 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9496 return tem;
9498 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9499 We associate floats only if the user has specified
9500 -fassociative-math. */
9501 if (flag_associative_math
9502 && TREE_CODE (arg1) == PLUS_EXPR
9503 && TREE_CODE (arg0) != MULT_EXPR)
9505 tree tree10 = TREE_OPERAND (arg1, 0);
9506 tree tree11 = TREE_OPERAND (arg1, 1);
9507 if (TREE_CODE (tree11) == MULT_EXPR
9508 && TREE_CODE (tree10) == MULT_EXPR)
9510 tree tree0;
9511 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9512 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9515 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9516 We associate floats only if the user has specified
9517 -fassociative-math. */
9518 if (flag_associative_math
9519 && TREE_CODE (arg0) == PLUS_EXPR
9520 && TREE_CODE (arg1) != MULT_EXPR)
9522 tree tree00 = TREE_OPERAND (arg0, 0);
9523 tree tree01 = TREE_OPERAND (arg0, 1);
9524 if (TREE_CODE (tree01) == MULT_EXPR
9525 && TREE_CODE (tree00) == MULT_EXPR)
9527 tree tree0;
9528 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9529 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9534 bit_rotate:
9535 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9536 is a rotate of A by C1 bits. */
9537 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9538 is a rotate of A by B bits. */
9540 enum tree_code code0, code1;
9541 tree rtype;
9542 code0 = TREE_CODE (arg0);
9543 code1 = TREE_CODE (arg1);
9544 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9545 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9546 && operand_equal_p (TREE_OPERAND (arg0, 0),
9547 TREE_OPERAND (arg1, 0), 0)
9548 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9549 TYPE_UNSIGNED (rtype))
9550 /* Only create rotates in complete modes. Other cases are not
9551 expanded properly. */
9552 && (element_precision (rtype)
9553 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9555 tree tree01, tree11;
9556 enum tree_code code01, code11;
9558 tree01 = TREE_OPERAND (arg0, 1);
9559 tree11 = TREE_OPERAND (arg1, 1);
9560 STRIP_NOPS (tree01);
9561 STRIP_NOPS (tree11);
9562 code01 = TREE_CODE (tree01);
9563 code11 = TREE_CODE (tree11);
9564 if (code01 == INTEGER_CST
9565 && code11 == INTEGER_CST
9566 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9567 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9569 tem = build2_loc (loc, LROTATE_EXPR,
9570 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9571 TREE_OPERAND (arg0, 0),
9572 code0 == LSHIFT_EXPR
9573 ? TREE_OPERAND (arg0, 1)
9574 : TREE_OPERAND (arg1, 1));
9575 return fold_convert_loc (loc, type, tem);
9577 else if (code11 == MINUS_EXPR)
9579 tree tree110, tree111;
9580 tree110 = TREE_OPERAND (tree11, 0);
9581 tree111 = TREE_OPERAND (tree11, 1);
9582 STRIP_NOPS (tree110);
9583 STRIP_NOPS (tree111);
9584 if (TREE_CODE (tree110) == INTEGER_CST
9585 && 0 == compare_tree_int (tree110,
9586 element_precision
9587 (TREE_TYPE (TREE_OPERAND
9588 (arg0, 0))))
9589 && operand_equal_p (tree01, tree111, 0))
9590 return
9591 fold_convert_loc (loc, type,
9592 build2 ((code0 == LSHIFT_EXPR
9593 ? LROTATE_EXPR
9594 : RROTATE_EXPR),
9595 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9596 TREE_OPERAND (arg0, 0),
9597 TREE_OPERAND (arg0, 1)));
9599 else if (code01 == MINUS_EXPR)
9601 tree tree010, tree011;
9602 tree010 = TREE_OPERAND (tree01, 0);
9603 tree011 = TREE_OPERAND (tree01, 1);
9604 STRIP_NOPS (tree010);
9605 STRIP_NOPS (tree011);
9606 if (TREE_CODE (tree010) == INTEGER_CST
9607 && 0 == compare_tree_int (tree010,
9608 element_precision
9609 (TREE_TYPE (TREE_OPERAND
9610 (arg0, 0))))
9611 && operand_equal_p (tree11, tree011, 0))
9612 return fold_convert_loc
9613 (loc, type,
9614 build2 ((code0 != LSHIFT_EXPR
9615 ? LROTATE_EXPR
9616 : RROTATE_EXPR),
9617 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9618 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9623 associate:
9624 /* In most languages, can't associate operations on floats through
9625 parentheses. Rather than remember where the parentheses were, we
9626 don't associate floats at all, unless the user has specified
9627 -fassociative-math.
9628 And, we need to make sure type is not saturating. */
9630 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9631 && !TYPE_SATURATING (type))
9633 tree var0, con0, lit0, minus_lit0;
9634 tree var1, con1, lit1, minus_lit1;
9635 tree atype = type;
9636 bool ok = true;
9638 /* Split both trees into variables, constants, and literals. Then
9639 associate each group together, the constants with literals,
9640 then the result with variables. This increases the chances of
9641 literals being recombined later and of generating relocatable
9642 expressions for the sum of a constant and literal. */
9643 var0 = split_tree (loc, arg0, type, code,
9644 &con0, &lit0, &minus_lit0, 0);
9645 var1 = split_tree (loc, arg1, type, code,
9646 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9648 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9649 if (code == MINUS_EXPR)
9650 code = PLUS_EXPR;
9652 /* With undefined overflow prefer doing association in a type
9653 which wraps on overflow, if that is one of the operand types. */
9654 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9655 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9657 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9658 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9659 atype = TREE_TYPE (arg0);
9660 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9661 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9662 atype = TREE_TYPE (arg1);
9663 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9666 /* With undefined overflow we can only associate constants with one
9667 variable, and constants whose association doesn't overflow. */
9668 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9669 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9671 if (var0 && var1)
9673 tree tmp0 = var0;
9674 tree tmp1 = var1;
9675 bool one_neg = false;
9677 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9679 tmp0 = TREE_OPERAND (tmp0, 0);
9680 one_neg = !one_neg;
9682 if (CONVERT_EXPR_P (tmp0)
9683 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9684 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9685 <= TYPE_PRECISION (atype)))
9686 tmp0 = TREE_OPERAND (tmp0, 0);
9687 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9689 tmp1 = TREE_OPERAND (tmp1, 0);
9690 one_neg = !one_neg;
9692 if (CONVERT_EXPR_P (tmp1)
9693 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9694 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9695 <= TYPE_PRECISION (atype)))
9696 tmp1 = TREE_OPERAND (tmp1, 0);
9697 /* The only case we can still associate with two variables
9698 is if they cancel out. */
9699 if (!one_neg
9700 || !operand_equal_p (tmp0, tmp1, 0))
9701 ok = false;
9705 /* Only do something if we found more than two objects. Otherwise,
9706 nothing has changed and we risk infinite recursion. */
9707 if (ok
9708 && (2 < ((var0 != 0) + (var1 != 0)
9709 + (con0 != 0) + (con1 != 0)
9710 + (lit0 != 0) + (lit1 != 0)
9711 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9713 var0 = associate_trees (loc, var0, var1, code, atype);
9714 con0 = associate_trees (loc, con0, con1, code, atype);
9715 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9716 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9717 code, atype);
9719 /* Preserve the MINUS_EXPR if the negative part of the literal is
9720 greater than the positive part. Otherwise, the multiplicative
9721 folding code (i.e extract_muldiv) may be fooled in case
9722 unsigned constants are subtracted, like in the following
9723 example: ((X*2 + 4) - 8U)/2. */
9724 if (minus_lit0 && lit0)
9726 if (TREE_CODE (lit0) == INTEGER_CST
9727 && TREE_CODE (minus_lit0) == INTEGER_CST
9728 && tree_int_cst_lt (lit0, minus_lit0))
9730 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9731 MINUS_EXPR, atype);
9732 lit0 = 0;
9734 else
9736 lit0 = associate_trees (loc, lit0, minus_lit0,
9737 MINUS_EXPR, atype);
9738 minus_lit0 = 0;
9742 /* Don't introduce overflows through reassociation. */
9743 if ((lit0 && TREE_OVERFLOW_P (lit0))
9744 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9745 return NULL_TREE;
9747 if (minus_lit0)
9749 if (con0 == 0)
9750 return
9751 fold_convert_loc (loc, type,
9752 associate_trees (loc, var0, minus_lit0,
9753 MINUS_EXPR, atype));
9754 else
9756 con0 = associate_trees (loc, con0, minus_lit0,
9757 MINUS_EXPR, atype);
9758 return
9759 fold_convert_loc (loc, type,
9760 associate_trees (loc, var0, con0,
9761 PLUS_EXPR, atype));
9765 con0 = associate_trees (loc, con0, lit0, code, atype);
9766 return
9767 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9768 code, atype));
9772 return NULL_TREE;
9774 case MINUS_EXPR:
9775 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9776 if (TREE_CODE (arg0) == NEGATE_EXPR
9777 && negate_expr_p (op1))
9778 return fold_build2_loc (loc, MINUS_EXPR, type,
9779 negate_expr (op1),
9780 fold_convert_loc (loc, type,
9781 TREE_OPERAND (arg0, 0)));
9783 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9784 __complex__ ( x, -y ). This is not the same for SNaNs or if
9785 signed zeros are involved. */
9786 if (!HONOR_SNANS (element_mode (arg0))
9787 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9788 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9790 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9791 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9792 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9793 bool arg0rz = false, arg0iz = false;
9794 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9795 || (arg0i && (arg0iz = real_zerop (arg0i))))
9797 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9798 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9799 if (arg0rz && arg1i && real_zerop (arg1i))
9801 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9802 arg1r ? arg1r
9803 : build1 (REALPART_EXPR, rtype, arg1));
9804 tree ip = arg0i ? arg0i
9805 : build1 (IMAGPART_EXPR, rtype, arg0);
9806 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9808 else if (arg0iz && arg1r && real_zerop (arg1r))
9810 tree rp = arg0r ? arg0r
9811 : build1 (REALPART_EXPR, rtype, arg0);
9812 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9813 arg1i ? arg1i
9814 : build1 (IMAGPART_EXPR, rtype, arg1));
9815 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9820 /* A - B -> A + (-B) if B is easily negatable. */
9821 if (negate_expr_p (op1)
9822 && ! TYPE_OVERFLOW_SANITIZED (type)
9823 && ((FLOAT_TYPE_P (type)
9824 /* Avoid this transformation if B is a positive REAL_CST. */
9825 && (TREE_CODE (op1) != REAL_CST
9826 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9827 || INTEGRAL_TYPE_P (type)))
9828 return fold_build2_loc (loc, PLUS_EXPR, type,
9829 fold_convert_loc (loc, type, arg0),
9830 negate_expr (op1));
9832 /* Fold &a[i] - &a[j] to i-j. */
9833 if (TREE_CODE (arg0) == ADDR_EXPR
9834 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9835 && TREE_CODE (arg1) == ADDR_EXPR
9836 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9838 tree tem = fold_addr_of_array_ref_difference (loc, type,
9839 TREE_OPERAND (arg0, 0),
9840 TREE_OPERAND (arg1, 0));
9841 if (tem)
9842 return tem;
9845 if (FLOAT_TYPE_P (type)
9846 && flag_unsafe_math_optimizations
9847 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9848 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9849 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9850 return tem;
9852 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9853 one. Make sure the type is not saturating and has the signedness of
9854 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9855 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9856 if ((TREE_CODE (arg0) == MULT_EXPR
9857 || TREE_CODE (arg1) == MULT_EXPR)
9858 && !TYPE_SATURATING (type)
9859 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9860 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9861 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9863 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9864 if (tem)
9865 return tem;
9868 goto associate;
9870 case MULT_EXPR:
9871 if (! FLOAT_TYPE_P (type))
9873 /* Transform x * -C into -x * C if x is easily negatable. */
9874 if (TREE_CODE (op1) == INTEGER_CST
9875 && tree_int_cst_sgn (op1) == -1
9876 && negate_expr_p (op0)
9877 && negate_expr_p (op1)
9878 && (tem = negate_expr (op1)) != op1
9879 && ! TREE_OVERFLOW (tem))
9880 return fold_build2_loc (loc, MULT_EXPR, type,
9881 fold_convert_loc (loc, type,
9882 negate_expr (op0)), tem);
9884 strict_overflow_p = false;
9885 if (TREE_CODE (arg1) == INTEGER_CST
9886 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9887 &strict_overflow_p)))
9889 if (strict_overflow_p)
9890 fold_overflow_warning (("assuming signed overflow does not "
9891 "occur when simplifying "
9892 "multiplication"),
9893 WARN_STRICT_OVERFLOW_MISC);
9894 return fold_convert_loc (loc, type, tem);
9897 /* Optimize z * conj(z) for integer complex numbers. */
9898 if (TREE_CODE (arg0) == CONJ_EXPR
9899 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9900 return fold_mult_zconjz (loc, type, arg1);
9901 if (TREE_CODE (arg1) == CONJ_EXPR
9902 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9903 return fold_mult_zconjz (loc, type, arg0);
9905 else
9907 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9908 This is not the same for NaNs or if signed zeros are
9909 involved. */
9910 if (!HONOR_NANS (arg0)
9911 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9912 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9913 && TREE_CODE (arg1) == COMPLEX_CST
9914 && real_zerop (TREE_REALPART (arg1)))
9916 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9917 if (real_onep (TREE_IMAGPART (arg1)))
9918 return
9919 fold_build2_loc (loc, COMPLEX_EXPR, type,
9920 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9921 rtype, arg0)),
9922 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9923 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9924 return
9925 fold_build2_loc (loc, COMPLEX_EXPR, type,
9926 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9927 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9928 rtype, arg0)));
9931 /* Optimize z * conj(z) for floating point complex numbers.
9932 Guarded by flag_unsafe_math_optimizations as non-finite
9933 imaginary components don't produce scalar results. */
9934 if (flag_unsafe_math_optimizations
9935 && TREE_CODE (arg0) == CONJ_EXPR
9936 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9937 return fold_mult_zconjz (loc, type, arg1);
9938 if (flag_unsafe_math_optimizations
9939 && TREE_CODE (arg1) == CONJ_EXPR
9940 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9941 return fold_mult_zconjz (loc, type, arg0);
9943 goto associate;
9945 case BIT_IOR_EXPR:
9946 /* Canonicalize (X & C1) | C2. */
9947 if (TREE_CODE (arg0) == BIT_AND_EXPR
9948 && TREE_CODE (arg1) == INTEGER_CST
9949 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9951 int width = TYPE_PRECISION (type), w;
9952 wide_int c1 = TREE_OPERAND (arg0, 1);
9953 wide_int c2 = arg1;
9955 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9956 if ((c1 & c2) == c1)
9957 return omit_one_operand_loc (loc, type, arg1,
9958 TREE_OPERAND (arg0, 0));
9960 wide_int msk = wi::mask (width, false,
9961 TYPE_PRECISION (TREE_TYPE (arg1)));
9963 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9964 if (msk.and_not (c1 | c2) == 0)
9966 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9967 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9970 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9971 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9972 mode which allows further optimizations. */
9973 c1 &= msk;
9974 c2 &= msk;
9975 wide_int c3 = c1.and_not (c2);
9976 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9978 wide_int mask = wi::mask (w, false,
9979 TYPE_PRECISION (type));
9980 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
9982 c3 = mask;
9983 break;
9987 if (c3 != c1)
9989 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9990 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9991 wide_int_to_tree (type, c3));
9992 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9996 /* See if this can be simplified into a rotate first. If that
9997 is unsuccessful continue in the association code. */
9998 goto bit_rotate;
10000 case BIT_XOR_EXPR:
10001 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10002 if (TREE_CODE (arg0) == BIT_AND_EXPR
10003 && INTEGRAL_TYPE_P (type)
10004 && integer_onep (TREE_OPERAND (arg0, 1))
10005 && integer_onep (arg1))
10006 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10007 build_zero_cst (TREE_TYPE (arg0)));
10009 /* See if this can be simplified into a rotate first. If that
10010 is unsuccessful continue in the association code. */
10011 goto bit_rotate;
10013 case BIT_AND_EXPR:
10014 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10015 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10016 && INTEGRAL_TYPE_P (type)
10017 && integer_onep (TREE_OPERAND (arg0, 1))
10018 && integer_onep (arg1))
10020 tree tem2;
10021 tem = TREE_OPERAND (arg0, 0);
10022 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10023 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10024 tem, tem2);
10025 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10026 build_zero_cst (TREE_TYPE (tem)));
10028 /* Fold ~X & 1 as (X & 1) == 0. */
10029 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10030 && INTEGRAL_TYPE_P (type)
10031 && integer_onep (arg1))
10033 tree tem2;
10034 tem = TREE_OPERAND (arg0, 0);
10035 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10036 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10037 tem, tem2);
10038 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10039 build_zero_cst (TREE_TYPE (tem)));
10041 /* Fold !X & 1 as X == 0. */
10042 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10043 && integer_onep (arg1))
10045 tem = TREE_OPERAND (arg0, 0);
10046 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10047 build_zero_cst (TREE_TYPE (tem)));
10050 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10051 multiple of 1 << CST. */
10052 if (TREE_CODE (arg1) == INTEGER_CST)
10054 wide_int cst1 = arg1;
10055 wide_int ncst1 = -cst1;
10056 if ((cst1 & ncst1) == ncst1
10057 && multiple_of_p (type, arg0,
10058 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10059 return fold_convert_loc (loc, type, arg0);
10062 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10063 bits from CST2. */
10064 if (TREE_CODE (arg1) == INTEGER_CST
10065 && TREE_CODE (arg0) == MULT_EXPR
10066 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10068 wide_int warg1 = arg1;
10069 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10071 if (masked == 0)
10072 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10073 arg0, arg1);
10074 else if (masked != warg1)
10076 /* Avoid the transform if arg1 is a mask of some
10077 mode which allows further optimizations. */
10078 int pop = wi::popcount (warg1);
10079 if (!(pop >= BITS_PER_UNIT
10080 && pow2p_hwi (pop)
10081 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10082 return fold_build2_loc (loc, code, type, op0,
10083 wide_int_to_tree (type, masked));
10087 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10088 ((A & N) + B) & M -> (A + B) & M
10089 Similarly if (N & M) == 0,
10090 ((A | N) + B) & M -> (A + B) & M
10091 and for - instead of + (or unary - instead of +)
10092 and/or ^ instead of |.
10093 If B is constant and (B & M) == 0, fold into A & M. */
10094 if (TREE_CODE (arg1) == INTEGER_CST)
10096 wide_int cst1 = arg1;
10097 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10098 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10099 && (TREE_CODE (arg0) == PLUS_EXPR
10100 || TREE_CODE (arg0) == MINUS_EXPR
10101 || TREE_CODE (arg0) == NEGATE_EXPR)
10102 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10103 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10105 tree pmop[2];
10106 int which = 0;
10107 wide_int cst0;
10109 /* Now we know that arg0 is (C + D) or (C - D) or
10110 -C and arg1 (M) is == (1LL << cst) - 1.
10111 Store C into PMOP[0] and D into PMOP[1]. */
10112 pmop[0] = TREE_OPERAND (arg0, 0);
10113 pmop[1] = NULL;
10114 if (TREE_CODE (arg0) != NEGATE_EXPR)
10116 pmop[1] = TREE_OPERAND (arg0, 1);
10117 which = 1;
10120 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10121 which = -1;
10123 for (; which >= 0; which--)
10124 switch (TREE_CODE (pmop[which]))
10126 case BIT_AND_EXPR:
10127 case BIT_IOR_EXPR:
10128 case BIT_XOR_EXPR:
10129 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10130 != INTEGER_CST)
10131 break;
10132 cst0 = TREE_OPERAND (pmop[which], 1);
10133 cst0 &= cst1;
10134 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10136 if (cst0 != cst1)
10137 break;
10139 else if (cst0 != 0)
10140 break;
10141 /* If C or D is of the form (A & N) where
10142 (N & M) == M, or of the form (A | N) or
10143 (A ^ N) where (N & M) == 0, replace it with A. */
10144 pmop[which] = TREE_OPERAND (pmop[which], 0);
10145 break;
10146 case INTEGER_CST:
10147 /* If C or D is a N where (N & M) == 0, it can be
10148 omitted (assumed 0). */
10149 if ((TREE_CODE (arg0) == PLUS_EXPR
10150 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10151 && (cst1 & pmop[which]) == 0)
10152 pmop[which] = NULL;
10153 break;
10154 default:
10155 break;
10158 /* Only build anything new if we optimized one or both arguments
10159 above. */
10160 if (pmop[0] != TREE_OPERAND (arg0, 0)
10161 || (TREE_CODE (arg0) != NEGATE_EXPR
10162 && pmop[1] != TREE_OPERAND (arg0, 1)))
10164 tree utype = TREE_TYPE (arg0);
10165 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10167 /* Perform the operations in a type that has defined
10168 overflow behavior. */
10169 utype = unsigned_type_for (TREE_TYPE (arg0));
10170 if (pmop[0] != NULL)
10171 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10172 if (pmop[1] != NULL)
10173 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10176 if (TREE_CODE (arg0) == NEGATE_EXPR)
10177 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10178 else if (TREE_CODE (arg0) == PLUS_EXPR)
10180 if (pmop[0] != NULL && pmop[1] != NULL)
10181 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10182 pmop[0], pmop[1]);
10183 else if (pmop[0] != NULL)
10184 tem = pmop[0];
10185 else if (pmop[1] != NULL)
10186 tem = pmop[1];
10187 else
10188 return build_int_cst (type, 0);
10190 else if (pmop[0] == NULL)
10191 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10192 else
10193 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10194 pmop[0], pmop[1]);
10195 /* TEM is now the new binary +, - or unary - replacement. */
10196 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10197 fold_convert_loc (loc, utype, arg1));
10198 return fold_convert_loc (loc, type, tem);
10203 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10204 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10205 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10207 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10209 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10210 if (mask == -1)
10211 return
10212 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10215 goto associate;
10217 case RDIV_EXPR:
10218 /* Don't touch a floating-point divide by zero unless the mode
10219 of the constant can represent infinity. */
10220 if (TREE_CODE (arg1) == REAL_CST
10221 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10222 && real_zerop (arg1))
10223 return NULL_TREE;
10225 /* (-A) / (-B) -> A / B */
10226 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10227 return fold_build2_loc (loc, RDIV_EXPR, type,
10228 TREE_OPERAND (arg0, 0),
10229 negate_expr (arg1));
10230 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10231 return fold_build2_loc (loc, RDIV_EXPR, type,
10232 negate_expr (arg0),
10233 TREE_OPERAND (arg1, 0));
10234 return NULL_TREE;
10236 case TRUNC_DIV_EXPR:
10237 /* Fall through */
10239 case FLOOR_DIV_EXPR:
10240 /* Simplify A / (B << N) where A and B are positive and B is
10241 a power of 2, to A >> (N + log2(B)). */
10242 strict_overflow_p = false;
10243 if (TREE_CODE (arg1) == LSHIFT_EXPR
10244 && (TYPE_UNSIGNED (type)
10245 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10247 tree sval = TREE_OPERAND (arg1, 0);
10248 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10250 tree sh_cnt = TREE_OPERAND (arg1, 1);
10251 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10252 wi::exact_log2 (sval));
10254 if (strict_overflow_p)
10255 fold_overflow_warning (("assuming signed overflow does not "
10256 "occur when simplifying A / (B << N)"),
10257 WARN_STRICT_OVERFLOW_MISC);
10259 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10260 sh_cnt, pow2);
10261 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10262 fold_convert_loc (loc, type, arg0), sh_cnt);
10266 /* Fall through */
10268 case ROUND_DIV_EXPR:
10269 case CEIL_DIV_EXPR:
10270 case EXACT_DIV_EXPR:
10271 if (integer_zerop (arg1))
10272 return NULL_TREE;
10274 /* Convert -A / -B to A / B when the type is signed and overflow is
10275 undefined. */
10276 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10277 && TREE_CODE (op0) == NEGATE_EXPR
10278 && negate_expr_p (op1))
10280 if (INTEGRAL_TYPE_P (type))
10281 fold_overflow_warning (("assuming signed overflow does not occur "
10282 "when distributing negation across "
10283 "division"),
10284 WARN_STRICT_OVERFLOW_MISC);
10285 return fold_build2_loc (loc, code, type,
10286 fold_convert_loc (loc, type,
10287 TREE_OPERAND (arg0, 0)),
10288 negate_expr (op1));
10290 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10291 && TREE_CODE (arg1) == NEGATE_EXPR
10292 && negate_expr_p (op0))
10294 if (INTEGRAL_TYPE_P (type))
10295 fold_overflow_warning (("assuming signed overflow does not occur "
10296 "when distributing negation across "
10297 "division"),
10298 WARN_STRICT_OVERFLOW_MISC);
10299 return fold_build2_loc (loc, code, type,
10300 negate_expr (op0),
10301 fold_convert_loc (loc, type,
10302 TREE_OPERAND (arg1, 0)));
10305 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10306 operation, EXACT_DIV_EXPR.
10308 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10309 At one time others generated faster code, it's not clear if they do
10310 after the last round to changes to the DIV code in expmed.c. */
10311 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10312 && multiple_of_p (type, arg0, arg1))
10313 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10314 fold_convert (type, arg0),
10315 fold_convert (type, arg1));
10317 strict_overflow_p = false;
10318 if (TREE_CODE (arg1) == INTEGER_CST
10319 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10320 &strict_overflow_p)))
10322 if (strict_overflow_p)
10323 fold_overflow_warning (("assuming signed overflow does not occur "
10324 "when simplifying division"),
10325 WARN_STRICT_OVERFLOW_MISC);
10326 return fold_convert_loc (loc, type, tem);
10329 return NULL_TREE;
10331 case CEIL_MOD_EXPR:
10332 case FLOOR_MOD_EXPR:
10333 case ROUND_MOD_EXPR:
10334 case TRUNC_MOD_EXPR:
10335 strict_overflow_p = false;
10336 if (TREE_CODE (arg1) == INTEGER_CST
10337 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10338 &strict_overflow_p)))
10340 if (strict_overflow_p)
10341 fold_overflow_warning (("assuming signed overflow does not occur "
10342 "when simplifying modulus"),
10343 WARN_STRICT_OVERFLOW_MISC);
10344 return fold_convert_loc (loc, type, tem);
10347 return NULL_TREE;
10349 case LROTATE_EXPR:
10350 case RROTATE_EXPR:
10351 case RSHIFT_EXPR:
10352 case LSHIFT_EXPR:
10353 /* Since negative shift count is not well-defined,
10354 don't try to compute it in the compiler. */
10355 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10356 return NULL_TREE;
10358 prec = element_precision (type);
10360 /* If we have a rotate of a bit operation with the rotate count and
10361 the second operand of the bit operation both constant,
10362 permute the two operations. */
10363 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10364 && (TREE_CODE (arg0) == BIT_AND_EXPR
10365 || TREE_CODE (arg0) == BIT_IOR_EXPR
10366 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10367 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10369 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10370 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10371 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10372 fold_build2_loc (loc, code, type,
10373 arg00, arg1),
10374 fold_build2_loc (loc, code, type,
10375 arg01, arg1));
10378 /* Two consecutive rotates adding up to the some integer
10379 multiple of the precision of the type can be ignored. */
10380 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10381 && TREE_CODE (arg0) == RROTATE_EXPR
10382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10383 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10384 prec) == 0)
10385 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10387 return NULL_TREE;
10389 case MIN_EXPR:
10390 case MAX_EXPR:
10391 goto associate;
10393 case TRUTH_ANDIF_EXPR:
10394 /* Note that the operands of this must be ints
10395 and their values must be 0 or 1.
10396 ("true" is a fixed value perhaps depending on the language.) */
10397 /* If first arg is constant zero, return it. */
10398 if (integer_zerop (arg0))
10399 return fold_convert_loc (loc, type, arg0);
10400 /* FALLTHRU */
10401 case TRUTH_AND_EXPR:
10402 /* If either arg is constant true, drop it. */
10403 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10404 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10405 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10406 /* Preserve sequence points. */
10407 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10408 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10409 /* If second arg is constant zero, result is zero, but first arg
10410 must be evaluated. */
10411 if (integer_zerop (arg1))
10412 return omit_one_operand_loc (loc, type, arg1, arg0);
10413 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10414 case will be handled here. */
10415 if (integer_zerop (arg0))
10416 return omit_one_operand_loc (loc, type, arg0, arg1);
10418 /* !X && X is always false. */
10419 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10420 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10421 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10422 /* X && !X is always false. */
10423 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10424 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10425 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10427 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10428 means A >= Y && A != MAX, but in this case we know that
10429 A < X <= MAX. */
10431 if (!TREE_SIDE_EFFECTS (arg0)
10432 && !TREE_SIDE_EFFECTS (arg1))
10434 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10435 if (tem && !operand_equal_p (tem, arg0, 0))
10436 return fold_build2_loc (loc, code, type, tem, arg1);
10438 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10439 if (tem && !operand_equal_p (tem, arg1, 0))
10440 return fold_build2_loc (loc, code, type, arg0, tem);
10443 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10444 != NULL_TREE)
10445 return tem;
10447 return NULL_TREE;
10449 case TRUTH_ORIF_EXPR:
10450 /* Note that the operands of this must be ints
10451 and their values must be 0 or true.
10452 ("true" is a fixed value perhaps depending on the language.) */
10453 /* If first arg is constant true, return it. */
10454 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10455 return fold_convert_loc (loc, type, arg0);
10456 /* FALLTHRU */
10457 case TRUTH_OR_EXPR:
10458 /* If either arg is constant zero, drop it. */
10459 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10460 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10461 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10462 /* Preserve sequence points. */
10463 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10464 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10465 /* If second arg is constant true, result is true, but we must
10466 evaluate first arg. */
10467 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10468 return omit_one_operand_loc (loc, type, arg1, arg0);
10469 /* Likewise for first arg, but note this only occurs here for
10470 TRUTH_OR_EXPR. */
10471 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10472 return omit_one_operand_loc (loc, type, arg0, arg1);
10474 /* !X || X is always true. */
10475 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10476 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10477 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10478 /* X || !X is always true. */
10479 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10480 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10481 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10483 /* (X && !Y) || (!X && Y) is X ^ Y */
10484 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10485 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10487 tree a0, a1, l0, l1, n0, n1;
10489 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10490 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10492 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10493 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10495 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10496 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10498 if ((operand_equal_p (n0, a0, 0)
10499 && operand_equal_p (n1, a1, 0))
10500 || (operand_equal_p (n0, a1, 0)
10501 && operand_equal_p (n1, a0, 0)))
10502 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10505 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10506 != NULL_TREE)
10507 return tem;
10509 return NULL_TREE;
10511 case TRUTH_XOR_EXPR:
10512 /* If the second arg is constant zero, drop it. */
10513 if (integer_zerop (arg1))
10514 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10515 /* If the second arg is constant true, this is a logical inversion. */
10516 if (integer_onep (arg1))
10518 tem = invert_truthvalue_loc (loc, arg0);
10519 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10521 /* Identical arguments cancel to zero. */
10522 if (operand_equal_p (arg0, arg1, 0))
10523 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10525 /* !X ^ X is always true. */
10526 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10527 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10528 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10530 /* X ^ !X is always true. */
10531 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10532 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10533 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10535 return NULL_TREE;
10537 case EQ_EXPR:
10538 case NE_EXPR:
10539 STRIP_NOPS (arg0);
10540 STRIP_NOPS (arg1);
10542 tem = fold_comparison (loc, code, type, op0, op1);
10543 if (tem != NULL_TREE)
10544 return tem;
10546 /* bool_var != 1 becomes !bool_var. */
10547 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10548 && code == NE_EXPR)
10549 return fold_convert_loc (loc, type,
10550 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10551 TREE_TYPE (arg0), arg0));
10553 /* bool_var == 0 becomes !bool_var. */
10554 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10555 && code == EQ_EXPR)
10556 return fold_convert_loc (loc, type,
10557 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10558 TREE_TYPE (arg0), arg0));
10560 /* !exp != 0 becomes !exp */
10561 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10562 && code == NE_EXPR)
10563 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10565 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10566 if ((TREE_CODE (arg0) == PLUS_EXPR
10567 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10568 || TREE_CODE (arg0) == MINUS_EXPR)
10569 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10570 0)),
10571 arg1, 0)
10572 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10573 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10575 tree val = TREE_OPERAND (arg0, 1);
10576 val = fold_build2_loc (loc, code, type, val,
10577 build_int_cst (TREE_TYPE (val), 0));
10578 return omit_two_operands_loc (loc, type, val,
10579 TREE_OPERAND (arg0, 0), arg1);
10582 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10583 if ((TREE_CODE (arg1) == PLUS_EXPR
10584 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10585 || TREE_CODE (arg1) == MINUS_EXPR)
10586 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10587 0)),
10588 arg0, 0)
10589 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10590 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10592 tree val = TREE_OPERAND (arg1, 1);
10593 val = fold_build2_loc (loc, code, type, val,
10594 build_int_cst (TREE_TYPE (val), 0));
10595 return omit_two_operands_loc (loc, type, val,
10596 TREE_OPERAND (arg1, 0), arg0);
10599 /* If this is an EQ or NE comparison with zero and ARG0 is
10600 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10601 two operations, but the latter can be done in one less insn
10602 on machines that have only two-operand insns or on which a
10603 constant cannot be the first operand. */
10604 if (TREE_CODE (arg0) == BIT_AND_EXPR
10605 && integer_zerop (arg1))
10607 tree arg00 = TREE_OPERAND (arg0, 0);
10608 tree arg01 = TREE_OPERAND (arg0, 1);
10609 if (TREE_CODE (arg00) == LSHIFT_EXPR
10610 && integer_onep (TREE_OPERAND (arg00, 0)))
10612 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10613 arg01, TREE_OPERAND (arg00, 1));
10614 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10615 build_int_cst (TREE_TYPE (arg0), 1));
10616 return fold_build2_loc (loc, code, type,
10617 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10618 arg1);
10620 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10621 && integer_onep (TREE_OPERAND (arg01, 0)))
10623 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10624 arg00, TREE_OPERAND (arg01, 1));
10625 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10626 build_int_cst (TREE_TYPE (arg0), 1));
10627 return fold_build2_loc (loc, code, type,
10628 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10629 arg1);
10633 /* If this is an NE or EQ comparison of zero against the result of a
10634 signed MOD operation whose second operand is a power of 2, make
10635 the MOD operation unsigned since it is simpler and equivalent. */
10636 if (integer_zerop (arg1)
10637 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10638 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10639 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10640 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10641 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10642 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10644 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10645 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10646 fold_convert_loc (loc, newtype,
10647 TREE_OPERAND (arg0, 0)),
10648 fold_convert_loc (loc, newtype,
10649 TREE_OPERAND (arg0, 1)));
10651 return fold_build2_loc (loc, code, type, newmod,
10652 fold_convert_loc (loc, newtype, arg1));
10655 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10656 C1 is a valid shift constant, and C2 is a power of two, i.e.
10657 a single bit. */
10658 if (TREE_CODE (arg0) == BIT_AND_EXPR
10659 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10660 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10661 == INTEGER_CST
10662 && integer_pow2p (TREE_OPERAND (arg0, 1))
10663 && integer_zerop (arg1))
10665 tree itype = TREE_TYPE (arg0);
10666 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10667 prec = TYPE_PRECISION (itype);
10669 /* Check for a valid shift count. */
10670 if (wi::ltu_p (arg001, prec))
10672 tree arg01 = TREE_OPERAND (arg0, 1);
10673 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10674 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10675 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10676 can be rewritten as (X & (C2 << C1)) != 0. */
10677 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10679 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10680 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10681 return fold_build2_loc (loc, code, type, tem,
10682 fold_convert_loc (loc, itype, arg1));
10684 /* Otherwise, for signed (arithmetic) shifts,
10685 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10686 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10687 else if (!TYPE_UNSIGNED (itype))
10688 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10689 arg000, build_int_cst (itype, 0));
10690 /* Otherwise, of unsigned (logical) shifts,
10691 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10692 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10693 else
10694 return omit_one_operand_loc (loc, type,
10695 code == EQ_EXPR ? integer_one_node
10696 : integer_zero_node,
10697 arg000);
10701 /* If this is a comparison of a field, we may be able to simplify it. */
10702 if ((TREE_CODE (arg0) == COMPONENT_REF
10703 || TREE_CODE (arg0) == BIT_FIELD_REF)
10704 /* Handle the constant case even without -O
10705 to make sure the warnings are given. */
10706 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10708 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10709 if (t1)
10710 return t1;
10713 /* Optimize comparisons of strlen vs zero to a compare of the
10714 first character of the string vs zero. To wit,
10715 strlen(ptr) == 0 => *ptr == 0
10716 strlen(ptr) != 0 => *ptr != 0
10717 Other cases should reduce to one of these two (or a constant)
10718 due to the return value of strlen being unsigned. */
10719 if (TREE_CODE (arg0) == CALL_EXPR
10720 && integer_zerop (arg1))
10722 tree fndecl = get_callee_fndecl (arg0);
10724 if (fndecl
10725 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10726 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10727 && call_expr_nargs (arg0) == 1
10728 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10730 tree iref = build_fold_indirect_ref_loc (loc,
10731 CALL_EXPR_ARG (arg0, 0));
10732 return fold_build2_loc (loc, code, type, iref,
10733 build_int_cst (TREE_TYPE (iref), 0));
10737 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10738 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10739 if (TREE_CODE (arg0) == RSHIFT_EXPR
10740 && integer_zerop (arg1)
10741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10743 tree arg00 = TREE_OPERAND (arg0, 0);
10744 tree arg01 = TREE_OPERAND (arg0, 1);
10745 tree itype = TREE_TYPE (arg00);
10746 if (wi::eq_p (arg01, element_precision (itype) - 1))
10748 if (TYPE_UNSIGNED (itype))
10750 itype = signed_type_for (itype);
10751 arg00 = fold_convert_loc (loc, itype, arg00);
10753 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10754 type, arg00, build_zero_cst (itype));
10758 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10759 (X & C) == 0 when C is a single bit. */
10760 if (TREE_CODE (arg0) == BIT_AND_EXPR
10761 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10762 && integer_zerop (arg1)
10763 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10765 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10766 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10767 TREE_OPERAND (arg0, 1));
10768 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10769 type, tem,
10770 fold_convert_loc (loc, TREE_TYPE (arg0),
10771 arg1));
10774 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10775 constant C is a power of two, i.e. a single bit. */
10776 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10777 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10778 && integer_zerop (arg1)
10779 && integer_pow2p (TREE_OPERAND (arg0, 1))
10780 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10781 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10783 tree arg00 = TREE_OPERAND (arg0, 0);
10784 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10785 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10788 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10789 when is C is a power of two, i.e. a single bit. */
10790 if (TREE_CODE (arg0) == BIT_AND_EXPR
10791 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10792 && integer_zerop (arg1)
10793 && integer_pow2p (TREE_OPERAND (arg0, 1))
10794 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10795 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10797 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10798 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10799 arg000, TREE_OPERAND (arg0, 1));
10800 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10801 tem, build_int_cst (TREE_TYPE (tem), 0));
10804 if (integer_zerop (arg1)
10805 && tree_expr_nonzero_p (arg0))
10807 tree res = constant_boolean_node (code==NE_EXPR, type);
10808 return omit_one_operand_loc (loc, type, res, arg0);
10811 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10812 if (TREE_CODE (arg0) == BIT_AND_EXPR
10813 && TREE_CODE (arg1) == BIT_AND_EXPR)
10815 tree arg00 = TREE_OPERAND (arg0, 0);
10816 tree arg01 = TREE_OPERAND (arg0, 1);
10817 tree arg10 = TREE_OPERAND (arg1, 0);
10818 tree arg11 = TREE_OPERAND (arg1, 1);
10819 tree itype = TREE_TYPE (arg0);
10821 if (operand_equal_p (arg01, arg11, 0))
10823 tem = fold_convert_loc (loc, itype, arg10);
10824 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10825 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10826 return fold_build2_loc (loc, code, type, tem,
10827 build_zero_cst (itype));
10829 if (operand_equal_p (arg01, arg10, 0))
10831 tem = fold_convert_loc (loc, itype, arg11);
10832 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10833 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10834 return fold_build2_loc (loc, code, type, tem,
10835 build_zero_cst (itype));
10837 if (operand_equal_p (arg00, arg11, 0))
10839 tem = fold_convert_loc (loc, itype, arg10);
10840 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10841 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10842 return fold_build2_loc (loc, code, type, tem,
10843 build_zero_cst (itype));
10845 if (operand_equal_p (arg00, arg10, 0))
10847 tem = fold_convert_loc (loc, itype, arg11);
10848 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10849 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10850 return fold_build2_loc (loc, code, type, tem,
10851 build_zero_cst (itype));
10855 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10856 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10858 tree arg00 = TREE_OPERAND (arg0, 0);
10859 tree arg01 = TREE_OPERAND (arg0, 1);
10860 tree arg10 = TREE_OPERAND (arg1, 0);
10861 tree arg11 = TREE_OPERAND (arg1, 1);
10862 tree itype = TREE_TYPE (arg0);
10864 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10865 operand_equal_p guarantees no side-effects so we don't need
10866 to use omit_one_operand on Z. */
10867 if (operand_equal_p (arg01, arg11, 0))
10868 return fold_build2_loc (loc, code, type, arg00,
10869 fold_convert_loc (loc, TREE_TYPE (arg00),
10870 arg10));
10871 if (operand_equal_p (arg01, arg10, 0))
10872 return fold_build2_loc (loc, code, type, arg00,
10873 fold_convert_loc (loc, TREE_TYPE (arg00),
10874 arg11));
10875 if (operand_equal_p (arg00, arg11, 0))
10876 return fold_build2_loc (loc, code, type, arg01,
10877 fold_convert_loc (loc, TREE_TYPE (arg01),
10878 arg10));
10879 if (operand_equal_p (arg00, arg10, 0))
10880 return fold_build2_loc (loc, code, type, arg01,
10881 fold_convert_loc (loc, TREE_TYPE (arg01),
10882 arg11));
10884 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10885 if (TREE_CODE (arg01) == INTEGER_CST
10886 && TREE_CODE (arg11) == INTEGER_CST)
10888 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10889 fold_convert_loc (loc, itype, arg11));
10890 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10891 return fold_build2_loc (loc, code, type, tem,
10892 fold_convert_loc (loc, itype, arg10));
10896 /* Attempt to simplify equality/inequality comparisons of complex
10897 values. Only lower the comparison if the result is known or
10898 can be simplified to a single scalar comparison. */
10899 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10900 || TREE_CODE (arg0) == COMPLEX_CST)
10901 && (TREE_CODE (arg1) == COMPLEX_EXPR
10902 || TREE_CODE (arg1) == COMPLEX_CST))
10904 tree real0, imag0, real1, imag1;
10905 tree rcond, icond;
10907 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10909 real0 = TREE_OPERAND (arg0, 0);
10910 imag0 = TREE_OPERAND (arg0, 1);
10912 else
10914 real0 = TREE_REALPART (arg0);
10915 imag0 = TREE_IMAGPART (arg0);
10918 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10920 real1 = TREE_OPERAND (arg1, 0);
10921 imag1 = TREE_OPERAND (arg1, 1);
10923 else
10925 real1 = TREE_REALPART (arg1);
10926 imag1 = TREE_IMAGPART (arg1);
10929 rcond = fold_binary_loc (loc, code, type, real0, real1);
10930 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10932 if (integer_zerop (rcond))
10934 if (code == EQ_EXPR)
10935 return omit_two_operands_loc (loc, type, boolean_false_node,
10936 imag0, imag1);
10937 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10939 else
10941 if (code == NE_EXPR)
10942 return omit_two_operands_loc (loc, type, boolean_true_node,
10943 imag0, imag1);
10944 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10948 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10949 if (icond && TREE_CODE (icond) == INTEGER_CST)
10951 if (integer_zerop (icond))
10953 if (code == EQ_EXPR)
10954 return omit_two_operands_loc (loc, type, boolean_false_node,
10955 real0, real1);
10956 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10958 else
10960 if (code == NE_EXPR)
10961 return omit_two_operands_loc (loc, type, boolean_true_node,
10962 real0, real1);
10963 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10968 return NULL_TREE;
10970 case LT_EXPR:
10971 case GT_EXPR:
10972 case LE_EXPR:
10973 case GE_EXPR:
10974 tem = fold_comparison (loc, code, type, op0, op1);
10975 if (tem != NULL_TREE)
10976 return tem;
10978 /* Transform comparisons of the form X +- C CMP X. */
10979 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10980 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10981 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10982 && !HONOR_SNANS (arg0))
10983 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10984 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10986 tree arg01 = TREE_OPERAND (arg0, 1);
10987 enum tree_code code0 = TREE_CODE (arg0);
10988 int is_positive;
10990 if (TREE_CODE (arg01) == REAL_CST)
10991 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10992 else
10993 is_positive = tree_int_cst_sgn (arg01);
10995 /* (X - c) > X becomes false. */
10996 if (code == GT_EXPR
10997 && ((code0 == MINUS_EXPR && is_positive >= 0)
10998 || (code0 == PLUS_EXPR && is_positive <= 0)))
11000 if (TREE_CODE (arg01) == INTEGER_CST
11001 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11002 fold_overflow_warning (("assuming signed overflow does not "
11003 "occur when assuming that (X - c) > X "
11004 "is always false"),
11005 WARN_STRICT_OVERFLOW_ALL);
11006 return constant_boolean_node (0, type);
11009 /* Likewise (X + c) < X becomes false. */
11010 if (code == LT_EXPR
11011 && ((code0 == PLUS_EXPR && is_positive >= 0)
11012 || (code0 == MINUS_EXPR && is_positive <= 0)))
11014 if (TREE_CODE (arg01) == INTEGER_CST
11015 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11016 fold_overflow_warning (("assuming signed overflow does not "
11017 "occur when assuming that "
11018 "(X + c) < X is always false"),
11019 WARN_STRICT_OVERFLOW_ALL);
11020 return constant_boolean_node (0, type);
11023 /* Convert (X - c) <= X to true. */
11024 if (!HONOR_NANS (arg1)
11025 && code == LE_EXPR
11026 && ((code0 == MINUS_EXPR && is_positive >= 0)
11027 || (code0 == PLUS_EXPR && is_positive <= 0)))
11029 if (TREE_CODE (arg01) == INTEGER_CST
11030 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11031 fold_overflow_warning (("assuming signed overflow does not "
11032 "occur when assuming that "
11033 "(X - c) <= X is always true"),
11034 WARN_STRICT_OVERFLOW_ALL);
11035 return constant_boolean_node (1, type);
11038 /* Convert (X + c) >= X to true. */
11039 if (!HONOR_NANS (arg1)
11040 && code == GE_EXPR
11041 && ((code0 == PLUS_EXPR && is_positive >= 0)
11042 || (code0 == MINUS_EXPR && is_positive <= 0)))
11044 if (TREE_CODE (arg01) == INTEGER_CST
11045 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11046 fold_overflow_warning (("assuming signed overflow does not "
11047 "occur when assuming that "
11048 "(X + c) >= X is always true"),
11049 WARN_STRICT_OVERFLOW_ALL);
11050 return constant_boolean_node (1, type);
11053 if (TREE_CODE (arg01) == INTEGER_CST)
11055 /* Convert X + c > X and X - c < X to true for integers. */
11056 if (code == GT_EXPR
11057 && ((code0 == PLUS_EXPR && is_positive > 0)
11058 || (code0 == MINUS_EXPR && is_positive < 0)))
11060 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11061 fold_overflow_warning (("assuming signed overflow does "
11062 "not occur when assuming that "
11063 "(X + c) > X is always true"),
11064 WARN_STRICT_OVERFLOW_ALL);
11065 return constant_boolean_node (1, type);
11068 if (code == LT_EXPR
11069 && ((code0 == MINUS_EXPR && is_positive > 0)
11070 || (code0 == PLUS_EXPR && is_positive < 0)))
11072 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11073 fold_overflow_warning (("assuming signed overflow does "
11074 "not occur when assuming that "
11075 "(X - c) < X is always true"),
11076 WARN_STRICT_OVERFLOW_ALL);
11077 return constant_boolean_node (1, type);
11080 /* Convert X + c <= X and X - c >= X to false for integers. */
11081 if (code == LE_EXPR
11082 && ((code0 == PLUS_EXPR && is_positive > 0)
11083 || (code0 == MINUS_EXPR && is_positive < 0)))
11085 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11086 fold_overflow_warning (("assuming signed overflow does "
11087 "not occur when assuming that "
11088 "(X + c) <= X is always false"),
11089 WARN_STRICT_OVERFLOW_ALL);
11090 return constant_boolean_node (0, type);
11093 if (code == GE_EXPR
11094 && ((code0 == MINUS_EXPR && is_positive > 0)
11095 || (code0 == PLUS_EXPR && is_positive < 0)))
11097 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11098 fold_overflow_warning (("assuming signed overflow does "
11099 "not occur when assuming that "
11100 "(X - c) >= X is always false"),
11101 WARN_STRICT_OVERFLOW_ALL);
11102 return constant_boolean_node (0, type);
11107 /* If we are comparing an ABS_EXPR with a constant, we can
11108 convert all the cases into explicit comparisons, but they may
11109 well not be faster than doing the ABS and one comparison.
11110 But ABS (X) <= C is a range comparison, which becomes a subtraction
11111 and a comparison, and is probably faster. */
11112 if (code == LE_EXPR
11113 && TREE_CODE (arg1) == INTEGER_CST
11114 && TREE_CODE (arg0) == ABS_EXPR
11115 && ! TREE_SIDE_EFFECTS (arg0)
11116 && (0 != (tem = negate_expr (arg1)))
11117 && TREE_CODE (tem) == INTEGER_CST
11118 && !TREE_OVERFLOW (tem))
11119 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11120 build2 (GE_EXPR, type,
11121 TREE_OPERAND (arg0, 0), tem),
11122 build2 (LE_EXPR, type,
11123 TREE_OPERAND (arg0, 0), arg1));
11125 /* Convert ABS_EXPR<x> >= 0 to true. */
11126 strict_overflow_p = false;
11127 if (code == GE_EXPR
11128 && (integer_zerop (arg1)
11129 || (! HONOR_NANS (arg0)
11130 && real_zerop (arg1)))
11131 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11133 if (strict_overflow_p)
11134 fold_overflow_warning (("assuming signed overflow does not occur "
11135 "when simplifying comparison of "
11136 "absolute value and zero"),
11137 WARN_STRICT_OVERFLOW_CONDITIONAL);
11138 return omit_one_operand_loc (loc, type,
11139 constant_boolean_node (true, type),
11140 arg0);
11143 /* Convert ABS_EXPR<x> < 0 to false. */
11144 strict_overflow_p = false;
11145 if (code == LT_EXPR
11146 && (integer_zerop (arg1) || real_zerop (arg1))
11147 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11149 if (strict_overflow_p)
11150 fold_overflow_warning (("assuming signed overflow does not occur "
11151 "when simplifying comparison of "
11152 "absolute value and zero"),
11153 WARN_STRICT_OVERFLOW_CONDITIONAL);
11154 return omit_one_operand_loc (loc, type,
11155 constant_boolean_node (false, type),
11156 arg0);
11159 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11160 and similarly for >= into !=. */
11161 if ((code == LT_EXPR || code == GE_EXPR)
11162 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11163 && TREE_CODE (arg1) == LSHIFT_EXPR
11164 && integer_onep (TREE_OPERAND (arg1, 0)))
11165 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11166 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11167 TREE_OPERAND (arg1, 1)),
11168 build_zero_cst (TREE_TYPE (arg0)));
11170 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11171 otherwise Y might be >= # of bits in X's type and thus e.g.
11172 (unsigned char) (1 << Y) for Y 15 might be 0.
11173 If the cast is widening, then 1 << Y should have unsigned type,
11174 otherwise if Y is number of bits in the signed shift type minus 1,
11175 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11176 31 might be 0xffffffff80000000. */
11177 if ((code == LT_EXPR || code == GE_EXPR)
11178 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11179 && CONVERT_EXPR_P (arg1)
11180 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11181 && (element_precision (TREE_TYPE (arg1))
11182 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11183 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11184 || (element_precision (TREE_TYPE (arg1))
11185 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11186 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11188 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11189 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11190 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11191 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11192 build_zero_cst (TREE_TYPE (arg0)));
11195 return NULL_TREE;
11197 case UNORDERED_EXPR:
11198 case ORDERED_EXPR:
11199 case UNLT_EXPR:
11200 case UNLE_EXPR:
11201 case UNGT_EXPR:
11202 case UNGE_EXPR:
11203 case UNEQ_EXPR:
11204 case LTGT_EXPR:
11205 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11207 tree targ0 = strip_float_extensions (arg0);
11208 tree targ1 = strip_float_extensions (arg1);
11209 tree newtype = TREE_TYPE (targ0);
11211 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11212 newtype = TREE_TYPE (targ1);
11214 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11215 return fold_build2_loc (loc, code, type,
11216 fold_convert_loc (loc, newtype, targ0),
11217 fold_convert_loc (loc, newtype, targ1));
11220 return NULL_TREE;
11222 case COMPOUND_EXPR:
11223 /* When pedantic, a compound expression can be neither an lvalue
11224 nor an integer constant expression. */
11225 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11226 return NULL_TREE;
11227 /* Don't let (0, 0) be null pointer constant. */
11228 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11229 : fold_convert_loc (loc, type, arg1);
11230 return pedantic_non_lvalue_loc (loc, tem);
11232 case ASSERT_EXPR:
11233 /* An ASSERT_EXPR should never be passed to fold_binary. */
11234 gcc_unreachable ();
11236 default:
11237 return NULL_TREE;
11238 } /* switch (code) */
11241 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11242 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11243 of GOTO_EXPR. */
11245 static tree
11246 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11248 switch (TREE_CODE (*tp))
11250 case LABEL_EXPR:
11251 return *tp;
11253 case GOTO_EXPR:
11254 *walk_subtrees = 0;
11256 /* fall through */
11258 default:
11259 return NULL_TREE;
11263 /* Return whether the sub-tree ST contains a label which is accessible from
11264 outside the sub-tree. */
11266 static bool
11267 contains_label_p (tree st)
11269 return
11270 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11273 /* Fold a ternary expression of code CODE and type TYPE with operands
11274 OP0, OP1, and OP2. Return the folded expression if folding is
11275 successful. Otherwise, return NULL_TREE. */
11277 tree
11278 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11279 tree op0, tree op1, tree op2)
11281 tree tem;
11282 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11283 enum tree_code_class kind = TREE_CODE_CLASS (code);
11285 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11286 && TREE_CODE_LENGTH (code) == 3);
11288 /* If this is a commutative operation, and OP0 is a constant, move it
11289 to OP1 to reduce the number of tests below. */
11290 if (commutative_ternary_tree_code (code)
11291 && tree_swap_operands_p (op0, op1))
11292 return fold_build3_loc (loc, code, type, op1, op0, op2);
11294 tem = generic_simplify (loc, code, type, op0, op1, op2);
11295 if (tem)
11296 return tem;
11298 /* Strip any conversions that don't change the mode. This is safe
11299 for every expression, except for a comparison expression because
11300 its signedness is derived from its operands. So, in the latter
11301 case, only strip conversions that don't change the signedness.
11303 Note that this is done as an internal manipulation within the
11304 constant folder, in order to find the simplest representation of
11305 the arguments so that their form can be studied. In any cases,
11306 the appropriate type conversions should be put back in the tree
11307 that will get out of the constant folder. */
11308 if (op0)
11310 arg0 = op0;
11311 STRIP_NOPS (arg0);
11314 if (op1)
11316 arg1 = op1;
11317 STRIP_NOPS (arg1);
11320 if (op2)
11322 arg2 = op2;
11323 STRIP_NOPS (arg2);
11326 switch (code)
11328 case COMPONENT_REF:
11329 if (TREE_CODE (arg0) == CONSTRUCTOR
11330 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11332 unsigned HOST_WIDE_INT idx;
11333 tree field, value;
11334 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11335 if (field == arg1)
11336 return value;
11338 return NULL_TREE;
11340 case COND_EXPR:
11341 case VEC_COND_EXPR:
11342 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11343 so all simple results must be passed through pedantic_non_lvalue. */
11344 if (TREE_CODE (arg0) == INTEGER_CST)
11346 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11347 tem = integer_zerop (arg0) ? op2 : op1;
11348 /* Only optimize constant conditions when the selected branch
11349 has the same type as the COND_EXPR. This avoids optimizing
11350 away "c ? x : throw", where the throw has a void type.
11351 Avoid throwing away that operand which contains label. */
11352 if ((!TREE_SIDE_EFFECTS (unused_op)
11353 || !contains_label_p (unused_op))
11354 && (! VOID_TYPE_P (TREE_TYPE (tem))
11355 || VOID_TYPE_P (type)))
11356 return pedantic_non_lvalue_loc (loc, tem);
11357 return NULL_TREE;
11359 else if (TREE_CODE (arg0) == VECTOR_CST)
11361 if ((TREE_CODE (arg1) == VECTOR_CST
11362 || TREE_CODE (arg1) == CONSTRUCTOR)
11363 && (TREE_CODE (arg2) == VECTOR_CST
11364 || TREE_CODE (arg2) == CONSTRUCTOR))
11366 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11367 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11368 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11369 for (i = 0; i < nelts; i++)
11371 tree val = VECTOR_CST_ELT (arg0, i);
11372 if (integer_all_onesp (val))
11373 sel[i] = i;
11374 else if (integer_zerop (val))
11375 sel[i] = nelts + i;
11376 else /* Currently unreachable. */
11377 return NULL_TREE;
11379 tree t = fold_vec_perm (type, arg1, arg2, sel);
11380 if (t != NULL_TREE)
11381 return t;
11385 /* If we have A op B ? A : C, we may be able to convert this to a
11386 simpler expression, depending on the operation and the values
11387 of B and C. Signed zeros prevent all of these transformations,
11388 for reasons given above each one.
11390 Also try swapping the arguments and inverting the conditional. */
11391 if (COMPARISON_CLASS_P (arg0)
11392 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11393 arg1, TREE_OPERAND (arg0, 1))
11394 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11396 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11397 if (tem)
11398 return tem;
11401 if (COMPARISON_CLASS_P (arg0)
11402 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11403 op2,
11404 TREE_OPERAND (arg0, 1))
11405 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11407 location_t loc0 = expr_location_or (arg0, loc);
11408 tem = fold_invert_truthvalue (loc0, arg0);
11409 if (tem && COMPARISON_CLASS_P (tem))
11411 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11412 if (tem)
11413 return tem;
11417 /* If the second operand is simpler than the third, swap them
11418 since that produces better jump optimization results. */
11419 if (truth_value_p (TREE_CODE (arg0))
11420 && tree_swap_operands_p (op1, op2))
11422 location_t loc0 = expr_location_or (arg0, loc);
11423 /* See if this can be inverted. If it can't, possibly because
11424 it was a floating-point inequality comparison, don't do
11425 anything. */
11426 tem = fold_invert_truthvalue (loc0, arg0);
11427 if (tem)
11428 return fold_build3_loc (loc, code, type, tem, op2, op1);
11431 /* Convert A ? 1 : 0 to simply A. */
11432 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11433 : (integer_onep (op1)
11434 && !VECTOR_TYPE_P (type)))
11435 && integer_zerop (op2)
11436 /* If we try to convert OP0 to our type, the
11437 call to fold will try to move the conversion inside
11438 a COND, which will recurse. In that case, the COND_EXPR
11439 is probably the best choice, so leave it alone. */
11440 && type == TREE_TYPE (arg0))
11441 return pedantic_non_lvalue_loc (loc, arg0);
11443 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11444 over COND_EXPR in cases such as floating point comparisons. */
11445 if (integer_zerop (op1)
11446 && code == COND_EXPR
11447 && integer_onep (op2)
11448 && !VECTOR_TYPE_P (type)
11449 && truth_value_p (TREE_CODE (arg0)))
11450 return pedantic_non_lvalue_loc (loc,
11451 fold_convert_loc (loc, type,
11452 invert_truthvalue_loc (loc,
11453 arg0)));
11455 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11456 if (TREE_CODE (arg0) == LT_EXPR
11457 && integer_zerop (TREE_OPERAND (arg0, 1))
11458 && integer_zerop (op2)
11459 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11461 /* sign_bit_p looks through both zero and sign extensions,
11462 but for this optimization only sign extensions are
11463 usable. */
11464 tree tem2 = TREE_OPERAND (arg0, 0);
11465 while (tem != tem2)
11467 if (TREE_CODE (tem2) != NOP_EXPR
11468 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11470 tem = NULL_TREE;
11471 break;
11473 tem2 = TREE_OPERAND (tem2, 0);
11475 /* sign_bit_p only checks ARG1 bits within A's precision.
11476 If <sign bit of A> has wider type than A, bits outside
11477 of A's precision in <sign bit of A> need to be checked.
11478 If they are all 0, this optimization needs to be done
11479 in unsigned A's type, if they are all 1 in signed A's type,
11480 otherwise this can't be done. */
11481 if (tem
11482 && TYPE_PRECISION (TREE_TYPE (tem))
11483 < TYPE_PRECISION (TREE_TYPE (arg1))
11484 && TYPE_PRECISION (TREE_TYPE (tem))
11485 < TYPE_PRECISION (type))
11487 int inner_width, outer_width;
11488 tree tem_type;
11490 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11491 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11492 if (outer_width > TYPE_PRECISION (type))
11493 outer_width = TYPE_PRECISION (type);
11495 wide_int mask = wi::shifted_mask
11496 (inner_width, outer_width - inner_width, false,
11497 TYPE_PRECISION (TREE_TYPE (arg1)));
11499 wide_int common = mask & arg1;
11500 if (common == mask)
11502 tem_type = signed_type_for (TREE_TYPE (tem));
11503 tem = fold_convert_loc (loc, tem_type, tem);
11505 else if (common == 0)
11507 tem_type = unsigned_type_for (TREE_TYPE (tem));
11508 tem = fold_convert_loc (loc, tem_type, tem);
11510 else
11511 tem = NULL;
11514 if (tem)
11515 return
11516 fold_convert_loc (loc, type,
11517 fold_build2_loc (loc, BIT_AND_EXPR,
11518 TREE_TYPE (tem), tem,
11519 fold_convert_loc (loc,
11520 TREE_TYPE (tem),
11521 arg1)));
11524 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11525 already handled above. */
11526 if (TREE_CODE (arg0) == BIT_AND_EXPR
11527 && integer_onep (TREE_OPERAND (arg0, 1))
11528 && integer_zerop (op2)
11529 && integer_pow2p (arg1))
11531 tree tem = TREE_OPERAND (arg0, 0);
11532 STRIP_NOPS (tem);
11533 if (TREE_CODE (tem) == RSHIFT_EXPR
11534 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11535 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11536 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11537 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11538 fold_convert_loc (loc, type,
11539 TREE_OPERAND (tem, 0)),
11540 op1);
11543 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11544 is probably obsolete because the first operand should be a
11545 truth value (that's why we have the two cases above), but let's
11546 leave it in until we can confirm this for all front-ends. */
11547 if (integer_zerop (op2)
11548 && TREE_CODE (arg0) == NE_EXPR
11549 && integer_zerop (TREE_OPERAND (arg0, 1))
11550 && integer_pow2p (arg1)
11551 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11552 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11553 arg1, OEP_ONLY_CONST))
11554 return pedantic_non_lvalue_loc (loc,
11555 fold_convert_loc (loc, type,
11556 TREE_OPERAND (arg0, 0)));
11558 /* Disable the transformations below for vectors, since
11559 fold_binary_op_with_conditional_arg may undo them immediately,
11560 yielding an infinite loop. */
11561 if (code == VEC_COND_EXPR)
11562 return NULL_TREE;
11564 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11565 if (integer_zerop (op2)
11566 && truth_value_p (TREE_CODE (arg0))
11567 && truth_value_p (TREE_CODE (arg1))
11568 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11569 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11570 : TRUTH_ANDIF_EXPR,
11571 type, fold_convert_loc (loc, type, arg0), op1);
11573 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11574 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11575 && truth_value_p (TREE_CODE (arg0))
11576 && truth_value_p (TREE_CODE (arg1))
11577 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11579 location_t loc0 = expr_location_or (arg0, loc);
11580 /* Only perform transformation if ARG0 is easily inverted. */
11581 tem = fold_invert_truthvalue (loc0, arg0);
11582 if (tem)
11583 return fold_build2_loc (loc, code == VEC_COND_EXPR
11584 ? BIT_IOR_EXPR
11585 : TRUTH_ORIF_EXPR,
11586 type, fold_convert_loc (loc, type, tem),
11587 op1);
11590 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11591 if (integer_zerop (arg1)
11592 && truth_value_p (TREE_CODE (arg0))
11593 && truth_value_p (TREE_CODE (op2))
11594 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11596 location_t loc0 = expr_location_or (arg0, loc);
11597 /* Only perform transformation if ARG0 is easily inverted. */
11598 tem = fold_invert_truthvalue (loc0, arg0);
11599 if (tem)
11600 return fold_build2_loc (loc, code == VEC_COND_EXPR
11601 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11602 type, fold_convert_loc (loc, type, tem),
11603 op2);
11606 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11607 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11608 && truth_value_p (TREE_CODE (arg0))
11609 && truth_value_p (TREE_CODE (op2))
11610 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11611 return fold_build2_loc (loc, code == VEC_COND_EXPR
11612 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11613 type, fold_convert_loc (loc, type, arg0), op2);
11615 return NULL_TREE;
11617 case CALL_EXPR:
11618 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11619 of fold_ternary on them. */
11620 gcc_unreachable ();
11622 case BIT_FIELD_REF:
11623 if (TREE_CODE (arg0) == VECTOR_CST
11624 && (type == TREE_TYPE (TREE_TYPE (arg0))
11625 || (TREE_CODE (type) == VECTOR_TYPE
11626 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11628 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11629 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11630 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11631 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11633 if (n != 0
11634 && (idx % width) == 0
11635 && (n % width) == 0
11636 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11638 idx = idx / width;
11639 n = n / width;
11641 if (TREE_CODE (arg0) == VECTOR_CST)
11643 if (n == 1)
11644 return VECTOR_CST_ELT (arg0, idx);
11646 tree *vals = XALLOCAVEC (tree, n);
11647 for (unsigned i = 0; i < n; ++i)
11648 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11649 return build_vector (type, vals);
11654 /* On constants we can use native encode/interpret to constant
11655 fold (nearly) all BIT_FIELD_REFs. */
11656 if (CONSTANT_CLASS_P (arg0)
11657 && can_native_interpret_type_p (type)
11658 && BITS_PER_UNIT == 8)
11660 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11661 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11662 /* Limit us to a reasonable amount of work. To relax the
11663 other limitations we need bit-shifting of the buffer
11664 and rounding up the size. */
11665 if (bitpos % BITS_PER_UNIT == 0
11666 && bitsize % BITS_PER_UNIT == 0
11667 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11669 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11670 unsigned HOST_WIDE_INT len
11671 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11672 bitpos / BITS_PER_UNIT);
11673 if (len > 0
11674 && len * BITS_PER_UNIT >= bitsize)
11676 tree v = native_interpret_expr (type, b,
11677 bitsize / BITS_PER_UNIT);
11678 if (v)
11679 return v;
11684 return NULL_TREE;
11686 case FMA_EXPR:
11687 /* For integers we can decompose the FMA if possible. */
11688 if (TREE_CODE (arg0) == INTEGER_CST
11689 && TREE_CODE (arg1) == INTEGER_CST)
11690 return fold_build2_loc (loc, PLUS_EXPR, type,
11691 const_binop (MULT_EXPR, arg0, arg1), arg2);
11692 if (integer_zerop (arg2))
11693 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11695 return fold_fma (loc, type, arg0, arg1, arg2);
11697 case VEC_PERM_EXPR:
11698 if (TREE_CODE (arg2) == VECTOR_CST)
11700 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11701 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11702 unsigned char *sel2 = sel + nelts;
11703 bool need_mask_canon = false;
11704 bool need_mask_canon2 = false;
11705 bool all_in_vec0 = true;
11706 bool all_in_vec1 = true;
11707 bool maybe_identity = true;
11708 bool single_arg = (op0 == op1);
11709 bool changed = false;
11711 mask2 = 2 * nelts - 1;
11712 mask = single_arg ? (nelts - 1) : mask2;
11713 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11714 for (i = 0; i < nelts; i++)
11716 tree val = VECTOR_CST_ELT (arg2, i);
11717 if (TREE_CODE (val) != INTEGER_CST)
11718 return NULL_TREE;
11720 /* Make sure that the perm value is in an acceptable
11721 range. */
11722 wide_int t = val;
11723 need_mask_canon |= wi::gtu_p (t, mask);
11724 need_mask_canon2 |= wi::gtu_p (t, mask2);
11725 sel[i] = t.to_uhwi () & mask;
11726 sel2[i] = t.to_uhwi () & mask2;
11728 if (sel[i] < nelts)
11729 all_in_vec1 = false;
11730 else
11731 all_in_vec0 = false;
11733 if ((sel[i] & (nelts-1)) != i)
11734 maybe_identity = false;
11737 if (maybe_identity)
11739 if (all_in_vec0)
11740 return op0;
11741 if (all_in_vec1)
11742 return op1;
11745 if (all_in_vec0)
11746 op1 = op0;
11747 else if (all_in_vec1)
11749 op0 = op1;
11750 for (i = 0; i < nelts; i++)
11751 sel[i] -= nelts;
11752 need_mask_canon = true;
11755 if ((TREE_CODE (op0) == VECTOR_CST
11756 || TREE_CODE (op0) == CONSTRUCTOR)
11757 && (TREE_CODE (op1) == VECTOR_CST
11758 || TREE_CODE (op1) == CONSTRUCTOR))
11760 tree t = fold_vec_perm (type, op0, op1, sel);
11761 if (t != NULL_TREE)
11762 return t;
11765 if (op0 == op1 && !single_arg)
11766 changed = true;
11768 /* Some targets are deficient and fail to expand a single
11769 argument permutation while still allowing an equivalent
11770 2-argument version. */
11771 if (need_mask_canon && arg2 == op2
11772 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11773 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11775 need_mask_canon = need_mask_canon2;
11776 sel = sel2;
11779 if (need_mask_canon && arg2 == op2)
11781 tree *tsel = XALLOCAVEC (tree, nelts);
11782 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11783 for (i = 0; i < nelts; i++)
11784 tsel[i] = build_int_cst (eltype, sel[i]);
11785 op2 = build_vector (TREE_TYPE (arg2), tsel);
11786 changed = true;
11789 if (changed)
11790 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11792 return NULL_TREE;
11794 case BIT_INSERT_EXPR:
11795 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11796 if (TREE_CODE (arg0) == INTEGER_CST
11797 && TREE_CODE (arg1) == INTEGER_CST)
11799 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11800 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11801 wide_int tem = wi::bit_and (arg0,
11802 wi::shifted_mask (bitpos, bitsize, true,
11803 TYPE_PRECISION (type)));
11804 wide_int tem2
11805 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11806 bitsize), bitpos);
11807 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11809 else if (TREE_CODE (arg0) == VECTOR_CST
11810 && CONSTANT_CLASS_P (arg1)
11811 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11812 TREE_TYPE (arg1)))
11814 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11815 unsigned HOST_WIDE_INT elsize
11816 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11817 if (bitpos % elsize == 0)
11819 unsigned k = bitpos / elsize;
11820 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11821 return arg0;
11822 else
11824 tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11825 memcpy (elts, VECTOR_CST_ELTS (arg0),
11826 sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11827 elts[k] = arg1;
11828 return build_vector (type, elts);
11832 return NULL_TREE;
11834 default:
11835 return NULL_TREE;
11836 } /* switch (code) */
11839 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11840 of an array (or vector). */
11842 tree
11843 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11845 tree index_type = NULL_TREE;
11846 offset_int low_bound = 0;
11848 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11850 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11851 if (domain_type && TYPE_MIN_VALUE (domain_type))
11853 /* Static constructors for variably sized objects makes no sense. */
11854 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11855 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11856 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11860 if (index_type)
11861 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11862 TYPE_SIGN (index_type));
11864 offset_int index = low_bound - 1;
11865 if (index_type)
11866 index = wi::ext (index, TYPE_PRECISION (index_type),
11867 TYPE_SIGN (index_type));
11869 offset_int max_index;
11870 unsigned HOST_WIDE_INT cnt;
11871 tree cfield, cval;
11873 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11875 /* Array constructor might explicitly set index, or specify a range,
11876 or leave index NULL meaning that it is next index after previous
11877 one. */
11878 if (cfield)
11880 if (TREE_CODE (cfield) == INTEGER_CST)
11881 max_index = index = wi::to_offset (cfield);
11882 else
11884 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11885 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11886 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11889 else
11891 index += 1;
11892 if (index_type)
11893 index = wi::ext (index, TYPE_PRECISION (index_type),
11894 TYPE_SIGN (index_type));
11895 max_index = index;
11898 /* Do we have match? */
11899 if (wi::cmpu (access_index, index) >= 0
11900 && wi::cmpu (access_index, max_index) <= 0)
11901 return cval;
11903 return NULL_TREE;
11906 /* Perform constant folding and related simplification of EXPR.
11907 The related simplifications include x*1 => x, x*0 => 0, etc.,
11908 and application of the associative law.
11909 NOP_EXPR conversions may be removed freely (as long as we
11910 are careful not to change the type of the overall expression).
11911 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11912 but we can constant-fold them if they have constant operands. */
11914 #ifdef ENABLE_FOLD_CHECKING
11915 # define fold(x) fold_1 (x)
11916 static tree fold_1 (tree);
11917 static
11918 #endif
11919 tree
11920 fold (tree expr)
11922 const tree t = expr;
11923 enum tree_code code = TREE_CODE (t);
11924 enum tree_code_class kind = TREE_CODE_CLASS (code);
11925 tree tem;
11926 location_t loc = EXPR_LOCATION (expr);
11928 /* Return right away if a constant. */
11929 if (kind == tcc_constant)
11930 return t;
11932 /* CALL_EXPR-like objects with variable numbers of operands are
11933 treated specially. */
11934 if (kind == tcc_vl_exp)
11936 if (code == CALL_EXPR)
11938 tem = fold_call_expr (loc, expr, false);
11939 return tem ? tem : expr;
11941 return expr;
11944 if (IS_EXPR_CODE_CLASS (kind))
11946 tree type = TREE_TYPE (t);
11947 tree op0, op1, op2;
11949 switch (TREE_CODE_LENGTH (code))
11951 case 1:
11952 op0 = TREE_OPERAND (t, 0);
11953 tem = fold_unary_loc (loc, code, type, op0);
11954 return tem ? tem : expr;
11955 case 2:
11956 op0 = TREE_OPERAND (t, 0);
11957 op1 = TREE_OPERAND (t, 1);
11958 tem = fold_binary_loc (loc, code, type, op0, op1);
11959 return tem ? tem : expr;
11960 case 3:
11961 op0 = TREE_OPERAND (t, 0);
11962 op1 = TREE_OPERAND (t, 1);
11963 op2 = TREE_OPERAND (t, 2);
11964 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11965 return tem ? tem : expr;
11966 default:
11967 break;
11971 switch (code)
11973 case ARRAY_REF:
11975 tree op0 = TREE_OPERAND (t, 0);
11976 tree op1 = TREE_OPERAND (t, 1);
11978 if (TREE_CODE (op1) == INTEGER_CST
11979 && TREE_CODE (op0) == CONSTRUCTOR
11980 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11982 tree val = get_array_ctor_element_at_index (op0,
11983 wi::to_offset (op1));
11984 if (val)
11985 return val;
11988 return t;
11991 /* Return a VECTOR_CST if possible. */
11992 case CONSTRUCTOR:
11994 tree type = TREE_TYPE (t);
11995 if (TREE_CODE (type) != VECTOR_TYPE)
11996 return t;
11998 unsigned i;
11999 tree val;
12000 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12001 if (! CONSTANT_CLASS_P (val))
12002 return t;
12004 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12007 case CONST_DECL:
12008 return fold (DECL_INITIAL (t));
12010 default:
12011 return t;
12012 } /* switch (code) */
12015 #ifdef ENABLE_FOLD_CHECKING
12016 #undef fold
12018 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12019 hash_table<nofree_ptr_hash<const tree_node> > *);
12020 static void fold_check_failed (const_tree, const_tree);
12021 void print_fold_checksum (const_tree);
12023 /* When --enable-checking=fold, compute a digest of expr before
12024 and after actual fold call to see if fold did not accidentally
12025 change original expr. */
12027 tree
12028 fold (tree expr)
12030 tree ret;
12031 struct md5_ctx ctx;
12032 unsigned char checksum_before[16], checksum_after[16];
12033 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12035 md5_init_ctx (&ctx);
12036 fold_checksum_tree (expr, &ctx, &ht);
12037 md5_finish_ctx (&ctx, checksum_before);
12038 ht.empty ();
12040 ret = fold_1 (expr);
12042 md5_init_ctx (&ctx);
12043 fold_checksum_tree (expr, &ctx, &ht);
12044 md5_finish_ctx (&ctx, checksum_after);
12046 if (memcmp (checksum_before, checksum_after, 16))
12047 fold_check_failed (expr, ret);
12049 return ret;
12052 void
12053 print_fold_checksum (const_tree expr)
12055 struct md5_ctx ctx;
12056 unsigned char checksum[16], cnt;
12057 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12059 md5_init_ctx (&ctx);
12060 fold_checksum_tree (expr, &ctx, &ht);
12061 md5_finish_ctx (&ctx, checksum);
12062 for (cnt = 0; cnt < 16; ++cnt)
12063 fprintf (stderr, "%02x", checksum[cnt]);
12064 putc ('\n', stderr);
12067 static void
12068 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12070 internal_error ("fold check: original tree changed by fold");
12073 static void
12074 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12075 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12077 const tree_node **slot;
12078 enum tree_code code;
12079 union tree_node buf;
12080 int i, len;
12082 recursive_label:
12083 if (expr == NULL)
12084 return;
12085 slot = ht->find_slot (expr, INSERT);
12086 if (*slot != NULL)
12087 return;
12088 *slot = expr;
12089 code = TREE_CODE (expr);
12090 if (TREE_CODE_CLASS (code) == tcc_declaration
12091 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12093 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12094 memcpy ((char *) &buf, expr, tree_size (expr));
12095 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12096 buf.decl_with_vis.symtab_node = NULL;
12097 expr = (tree) &buf;
12099 else if (TREE_CODE_CLASS (code) == tcc_type
12100 && (TYPE_POINTER_TO (expr)
12101 || TYPE_REFERENCE_TO (expr)
12102 || TYPE_CACHED_VALUES_P (expr)
12103 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12104 || TYPE_NEXT_VARIANT (expr)
12105 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12107 /* Allow these fields to be modified. */
12108 tree tmp;
12109 memcpy ((char *) &buf, expr, tree_size (expr));
12110 expr = tmp = (tree) &buf;
12111 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12112 TYPE_POINTER_TO (tmp) = NULL;
12113 TYPE_REFERENCE_TO (tmp) = NULL;
12114 TYPE_NEXT_VARIANT (tmp) = NULL;
12115 TYPE_ALIAS_SET (tmp) = -1;
12116 if (TYPE_CACHED_VALUES_P (tmp))
12118 TYPE_CACHED_VALUES_P (tmp) = 0;
12119 TYPE_CACHED_VALUES (tmp) = NULL;
12122 md5_process_bytes (expr, tree_size (expr), ctx);
12123 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12124 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12125 if (TREE_CODE_CLASS (code) != tcc_type
12126 && TREE_CODE_CLASS (code) != tcc_declaration
12127 && code != TREE_LIST
12128 && code != SSA_NAME
12129 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12130 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12131 switch (TREE_CODE_CLASS (code))
12133 case tcc_constant:
12134 switch (code)
12136 case STRING_CST:
12137 md5_process_bytes (TREE_STRING_POINTER (expr),
12138 TREE_STRING_LENGTH (expr), ctx);
12139 break;
12140 case COMPLEX_CST:
12141 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12142 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12143 break;
12144 case VECTOR_CST:
12145 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12146 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12147 break;
12148 default:
12149 break;
12151 break;
12152 case tcc_exceptional:
12153 switch (code)
12155 case TREE_LIST:
12156 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12157 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12158 expr = TREE_CHAIN (expr);
12159 goto recursive_label;
12160 break;
12161 case TREE_VEC:
12162 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12163 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12164 break;
12165 default:
12166 break;
12168 break;
12169 case tcc_expression:
12170 case tcc_reference:
12171 case tcc_comparison:
12172 case tcc_unary:
12173 case tcc_binary:
12174 case tcc_statement:
12175 case tcc_vl_exp:
12176 len = TREE_OPERAND_LENGTH (expr);
12177 for (i = 0; i < len; ++i)
12178 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12179 break;
12180 case tcc_declaration:
12181 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12182 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12183 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12185 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12186 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12187 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12188 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12189 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12192 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12194 if (TREE_CODE (expr) == FUNCTION_DECL)
12196 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12197 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12199 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12201 break;
12202 case tcc_type:
12203 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12204 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12205 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12206 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12207 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12208 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12209 if (INTEGRAL_TYPE_P (expr)
12210 || SCALAR_FLOAT_TYPE_P (expr))
12212 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12213 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12215 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12216 if (TREE_CODE (expr) == RECORD_TYPE
12217 || TREE_CODE (expr) == UNION_TYPE
12218 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12219 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12220 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12221 break;
12222 default:
12223 break;
12227 /* Helper function for outputting the checksum of a tree T. When
12228 debugging with gdb, you can "define mynext" to be "next" followed
12229 by "call debug_fold_checksum (op0)", then just trace down till the
12230 outputs differ. */
12232 DEBUG_FUNCTION void
12233 debug_fold_checksum (const_tree t)
12235 int i;
12236 unsigned char checksum[16];
12237 struct md5_ctx ctx;
12238 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12240 md5_init_ctx (&ctx);
12241 fold_checksum_tree (t, &ctx, &ht);
12242 md5_finish_ctx (&ctx, checksum);
12243 ht.empty ();
12245 for (i = 0; i < 16; i++)
12246 fprintf (stderr, "%d ", checksum[i]);
12248 fprintf (stderr, "\n");
12251 #endif
12253 /* Fold a unary tree expression with code CODE of type TYPE with an
12254 operand OP0. LOC is the location of the resulting expression.
12255 Return a folded expression if successful. Otherwise, return a tree
12256 expression with code CODE of type TYPE with an operand OP0. */
12258 tree
12259 fold_build1_stat_loc (location_t loc,
12260 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12262 tree tem;
12263 #ifdef ENABLE_FOLD_CHECKING
12264 unsigned char checksum_before[16], checksum_after[16];
12265 struct md5_ctx ctx;
12266 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12268 md5_init_ctx (&ctx);
12269 fold_checksum_tree (op0, &ctx, &ht);
12270 md5_finish_ctx (&ctx, checksum_before);
12271 ht.empty ();
12272 #endif
12274 tem = fold_unary_loc (loc, code, type, op0);
12275 if (!tem)
12276 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12278 #ifdef ENABLE_FOLD_CHECKING
12279 md5_init_ctx (&ctx);
12280 fold_checksum_tree (op0, &ctx, &ht);
12281 md5_finish_ctx (&ctx, checksum_after);
12283 if (memcmp (checksum_before, checksum_after, 16))
12284 fold_check_failed (op0, tem);
12285 #endif
12286 return tem;
12289 /* Fold a binary tree expression with code CODE of type TYPE with
12290 operands OP0 and OP1. LOC is the location of the resulting
12291 expression. Return a folded expression if successful. Otherwise,
12292 return a tree expression with code CODE of type TYPE with operands
12293 OP0 and OP1. */
12295 tree
12296 fold_build2_stat_loc (location_t loc,
12297 enum tree_code code, tree type, tree op0, tree op1
12298 MEM_STAT_DECL)
12300 tree tem;
12301 #ifdef ENABLE_FOLD_CHECKING
12302 unsigned char checksum_before_op0[16],
12303 checksum_before_op1[16],
12304 checksum_after_op0[16],
12305 checksum_after_op1[16];
12306 struct md5_ctx ctx;
12307 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12309 md5_init_ctx (&ctx);
12310 fold_checksum_tree (op0, &ctx, &ht);
12311 md5_finish_ctx (&ctx, checksum_before_op0);
12312 ht.empty ();
12314 md5_init_ctx (&ctx);
12315 fold_checksum_tree (op1, &ctx, &ht);
12316 md5_finish_ctx (&ctx, checksum_before_op1);
12317 ht.empty ();
12318 #endif
12320 tem = fold_binary_loc (loc, code, type, op0, op1);
12321 if (!tem)
12322 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12324 #ifdef ENABLE_FOLD_CHECKING
12325 md5_init_ctx (&ctx);
12326 fold_checksum_tree (op0, &ctx, &ht);
12327 md5_finish_ctx (&ctx, checksum_after_op0);
12328 ht.empty ();
12330 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12331 fold_check_failed (op0, tem);
12333 md5_init_ctx (&ctx);
12334 fold_checksum_tree (op1, &ctx, &ht);
12335 md5_finish_ctx (&ctx, checksum_after_op1);
12337 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12338 fold_check_failed (op1, tem);
12339 #endif
12340 return tem;
12343 /* Fold a ternary tree expression with code CODE of type TYPE with
12344 operands OP0, OP1, and OP2. Return a folded expression if
12345 successful. Otherwise, return a tree expression with code CODE of
12346 type TYPE with operands OP0, OP1, and OP2. */
12348 tree
12349 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12350 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12352 tree tem;
12353 #ifdef ENABLE_FOLD_CHECKING
12354 unsigned char checksum_before_op0[16],
12355 checksum_before_op1[16],
12356 checksum_before_op2[16],
12357 checksum_after_op0[16],
12358 checksum_after_op1[16],
12359 checksum_after_op2[16];
12360 struct md5_ctx ctx;
12361 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12363 md5_init_ctx (&ctx);
12364 fold_checksum_tree (op0, &ctx, &ht);
12365 md5_finish_ctx (&ctx, checksum_before_op0);
12366 ht.empty ();
12368 md5_init_ctx (&ctx);
12369 fold_checksum_tree (op1, &ctx, &ht);
12370 md5_finish_ctx (&ctx, checksum_before_op1);
12371 ht.empty ();
12373 md5_init_ctx (&ctx);
12374 fold_checksum_tree (op2, &ctx, &ht);
12375 md5_finish_ctx (&ctx, checksum_before_op2);
12376 ht.empty ();
12377 #endif
12379 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12380 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12381 if (!tem)
12382 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12384 #ifdef ENABLE_FOLD_CHECKING
12385 md5_init_ctx (&ctx);
12386 fold_checksum_tree (op0, &ctx, &ht);
12387 md5_finish_ctx (&ctx, checksum_after_op0);
12388 ht.empty ();
12390 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12391 fold_check_failed (op0, tem);
12393 md5_init_ctx (&ctx);
12394 fold_checksum_tree (op1, &ctx, &ht);
12395 md5_finish_ctx (&ctx, checksum_after_op1);
12396 ht.empty ();
12398 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12399 fold_check_failed (op1, tem);
12401 md5_init_ctx (&ctx);
12402 fold_checksum_tree (op2, &ctx, &ht);
12403 md5_finish_ctx (&ctx, checksum_after_op2);
12405 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12406 fold_check_failed (op2, tem);
12407 #endif
12408 return tem;
12411 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12412 arguments in ARGARRAY, and a null static chain.
12413 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12414 of type TYPE from the given operands as constructed by build_call_array. */
12416 tree
12417 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12418 int nargs, tree *argarray)
12420 tree tem;
12421 #ifdef ENABLE_FOLD_CHECKING
12422 unsigned char checksum_before_fn[16],
12423 checksum_before_arglist[16],
12424 checksum_after_fn[16],
12425 checksum_after_arglist[16];
12426 struct md5_ctx ctx;
12427 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12428 int i;
12430 md5_init_ctx (&ctx);
12431 fold_checksum_tree (fn, &ctx, &ht);
12432 md5_finish_ctx (&ctx, checksum_before_fn);
12433 ht.empty ();
12435 md5_init_ctx (&ctx);
12436 for (i = 0; i < nargs; i++)
12437 fold_checksum_tree (argarray[i], &ctx, &ht);
12438 md5_finish_ctx (&ctx, checksum_before_arglist);
12439 ht.empty ();
12440 #endif
12442 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12443 if (!tem)
12444 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12446 #ifdef ENABLE_FOLD_CHECKING
12447 md5_init_ctx (&ctx);
12448 fold_checksum_tree (fn, &ctx, &ht);
12449 md5_finish_ctx (&ctx, checksum_after_fn);
12450 ht.empty ();
12452 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12453 fold_check_failed (fn, tem);
12455 md5_init_ctx (&ctx);
12456 for (i = 0; i < nargs; i++)
12457 fold_checksum_tree (argarray[i], &ctx, &ht);
12458 md5_finish_ctx (&ctx, checksum_after_arglist);
12460 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12461 fold_check_failed (NULL_TREE, tem);
12462 #endif
12463 return tem;
12466 /* Perform constant folding and related simplification of initializer
12467 expression EXPR. These behave identically to "fold_buildN" but ignore
12468 potential run-time traps and exceptions that fold must preserve. */
12470 #define START_FOLD_INIT \
12471 int saved_signaling_nans = flag_signaling_nans;\
12472 int saved_trapping_math = flag_trapping_math;\
12473 int saved_rounding_math = flag_rounding_math;\
12474 int saved_trapv = flag_trapv;\
12475 int saved_folding_initializer = folding_initializer;\
12476 flag_signaling_nans = 0;\
12477 flag_trapping_math = 0;\
12478 flag_rounding_math = 0;\
12479 flag_trapv = 0;\
12480 folding_initializer = 1;
12482 #define END_FOLD_INIT \
12483 flag_signaling_nans = saved_signaling_nans;\
12484 flag_trapping_math = saved_trapping_math;\
12485 flag_rounding_math = saved_rounding_math;\
12486 flag_trapv = saved_trapv;\
12487 folding_initializer = saved_folding_initializer;
12489 tree
12490 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12491 tree type, tree op)
12493 tree result;
12494 START_FOLD_INIT;
12496 result = fold_build1_loc (loc, code, type, op);
12498 END_FOLD_INIT;
12499 return result;
12502 tree
12503 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12504 tree type, tree op0, tree op1)
12506 tree result;
12507 START_FOLD_INIT;
12509 result = fold_build2_loc (loc, code, type, op0, op1);
12511 END_FOLD_INIT;
12512 return result;
12515 tree
12516 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12517 int nargs, tree *argarray)
12519 tree result;
12520 START_FOLD_INIT;
12522 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12524 END_FOLD_INIT;
12525 return result;
12528 #undef START_FOLD_INIT
12529 #undef END_FOLD_INIT
12531 /* Determine if first argument is a multiple of second argument. Return 0 if
12532 it is not, or we cannot easily determined it to be.
12534 An example of the sort of thing we care about (at this point; this routine
12535 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12536 fold cases do now) is discovering that
12538 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12540 is a multiple of
12542 SAVE_EXPR (J * 8)
12544 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12546 This code also handles discovering that
12548 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12550 is a multiple of 8 so we don't have to worry about dealing with a
12551 possible remainder.
12553 Note that we *look* inside a SAVE_EXPR only to determine how it was
12554 calculated; it is not safe for fold to do much of anything else with the
12555 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12556 at run time. For example, the latter example above *cannot* be implemented
12557 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12558 evaluation time of the original SAVE_EXPR is not necessarily the same at
12559 the time the new expression is evaluated. The only optimization of this
12560 sort that would be valid is changing
12562 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12564 divided by 8 to
12566 SAVE_EXPR (I) * SAVE_EXPR (J)
12568 (where the same SAVE_EXPR (J) is used in the original and the
12569 transformed version). */
12572 multiple_of_p (tree type, const_tree top, const_tree bottom)
12574 gimple *stmt;
12575 tree t1, op1, op2;
12577 if (operand_equal_p (top, bottom, 0))
12578 return 1;
12580 if (TREE_CODE (type) != INTEGER_TYPE)
12581 return 0;
12583 switch (TREE_CODE (top))
12585 case BIT_AND_EXPR:
12586 /* Bitwise and provides a power of two multiple. If the mask is
12587 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12588 if (!integer_pow2p (bottom))
12589 return 0;
12590 /* FALLTHRU */
12592 case MULT_EXPR:
12593 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12594 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12596 case MINUS_EXPR:
12597 /* It is impossible to prove if op0 - op1 is multiple of bottom
12598 precisely, so be conservative here checking if both op0 and op1
12599 are multiple of bottom. Note we check the second operand first
12600 since it's usually simpler. */
12601 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12602 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12604 case PLUS_EXPR:
12605 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12606 as op0 - 3 if the expression has unsigned type. For example,
12607 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12608 op1 = TREE_OPERAND (top, 1);
12609 if (TYPE_UNSIGNED (type)
12610 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12611 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12612 return (multiple_of_p (type, op1, bottom)
12613 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12615 case LSHIFT_EXPR:
12616 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12618 op1 = TREE_OPERAND (top, 1);
12619 /* const_binop may not detect overflow correctly,
12620 so check for it explicitly here. */
12621 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12622 && 0 != (t1 = fold_convert (type,
12623 const_binop (LSHIFT_EXPR,
12624 size_one_node,
12625 op1)))
12626 && !TREE_OVERFLOW (t1))
12627 return multiple_of_p (type, t1, bottom);
12629 return 0;
12631 case NOP_EXPR:
12632 /* Can't handle conversions from non-integral or wider integral type. */
12633 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12634 || (TYPE_PRECISION (type)
12635 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12636 return 0;
12638 /* fall through */
12640 case SAVE_EXPR:
12641 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12643 case COND_EXPR:
12644 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12645 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12647 case INTEGER_CST:
12648 if (TREE_CODE (bottom) != INTEGER_CST
12649 || integer_zerop (bottom)
12650 || (TYPE_UNSIGNED (type)
12651 && (tree_int_cst_sgn (top) < 0
12652 || tree_int_cst_sgn (bottom) < 0)))
12653 return 0;
12654 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12655 SIGNED);
12657 case SSA_NAME:
12658 if (TREE_CODE (bottom) == INTEGER_CST
12659 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12660 && gimple_code (stmt) == GIMPLE_ASSIGN)
12662 enum tree_code code = gimple_assign_rhs_code (stmt);
12664 /* Check for special cases to see if top is defined as multiple
12665 of bottom:
12667 top = (X & ~(bottom - 1) ; bottom is power of 2
12671 Y = X % bottom
12672 top = X - Y. */
12673 if (code == BIT_AND_EXPR
12674 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12675 && TREE_CODE (op2) == INTEGER_CST
12676 && integer_pow2p (bottom)
12677 && wi::multiple_of_p (wi::to_widest (op2),
12678 wi::to_widest (bottom), UNSIGNED))
12679 return 1;
12681 op1 = gimple_assign_rhs1 (stmt);
12682 if (code == MINUS_EXPR
12683 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12684 && TREE_CODE (op2) == SSA_NAME
12685 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12686 && gimple_code (stmt) == GIMPLE_ASSIGN
12687 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12688 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12689 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12690 return 1;
12693 /* fall through */
12695 default:
12696 return 0;
12700 #define tree_expr_nonnegative_warnv_p(X, Y) \
12701 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12703 #define RECURSE(X) \
12704 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12706 /* Return true if CODE or TYPE is known to be non-negative. */
12708 static bool
12709 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12711 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12712 && truth_value_p (code))
12713 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12714 have a signed:1 type (where the value is -1 and 0). */
12715 return true;
12716 return false;
12719 /* Return true if (CODE OP0) is known to be non-negative. If the return
12720 value is based on the assumption that signed overflow is undefined,
12721 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12722 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12724 bool
12725 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12726 bool *strict_overflow_p, int depth)
12728 if (TYPE_UNSIGNED (type))
12729 return true;
12731 switch (code)
12733 case ABS_EXPR:
12734 /* We can't return 1 if flag_wrapv is set because
12735 ABS_EXPR<INT_MIN> = INT_MIN. */
12736 if (!ANY_INTEGRAL_TYPE_P (type))
12737 return true;
12738 if (TYPE_OVERFLOW_UNDEFINED (type))
12740 *strict_overflow_p = true;
12741 return true;
12743 break;
12745 case NON_LVALUE_EXPR:
12746 case FLOAT_EXPR:
12747 case FIX_TRUNC_EXPR:
12748 return RECURSE (op0);
12750 CASE_CONVERT:
12752 tree inner_type = TREE_TYPE (op0);
12753 tree outer_type = type;
12755 if (TREE_CODE (outer_type) == REAL_TYPE)
12757 if (TREE_CODE (inner_type) == REAL_TYPE)
12758 return RECURSE (op0);
12759 if (INTEGRAL_TYPE_P (inner_type))
12761 if (TYPE_UNSIGNED (inner_type))
12762 return true;
12763 return RECURSE (op0);
12766 else if (INTEGRAL_TYPE_P (outer_type))
12768 if (TREE_CODE (inner_type) == REAL_TYPE)
12769 return RECURSE (op0);
12770 if (INTEGRAL_TYPE_P (inner_type))
12771 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12772 && TYPE_UNSIGNED (inner_type);
12775 break;
12777 default:
12778 return tree_simple_nonnegative_warnv_p (code, type);
12781 /* We don't know sign of `t', so be conservative and return false. */
12782 return false;
12785 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12786 value is based on the assumption that signed overflow is undefined,
12787 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12788 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12790 bool
12791 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12792 tree op1, bool *strict_overflow_p,
12793 int depth)
12795 if (TYPE_UNSIGNED (type))
12796 return true;
12798 switch (code)
12800 case POINTER_PLUS_EXPR:
12801 case PLUS_EXPR:
12802 if (FLOAT_TYPE_P (type))
12803 return RECURSE (op0) && RECURSE (op1);
12805 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12806 both unsigned and at least 2 bits shorter than the result. */
12807 if (TREE_CODE (type) == INTEGER_TYPE
12808 && TREE_CODE (op0) == NOP_EXPR
12809 && TREE_CODE (op1) == NOP_EXPR)
12811 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12812 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12813 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12814 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12816 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12817 TYPE_PRECISION (inner2)) + 1;
12818 return prec < TYPE_PRECISION (type);
12821 break;
12823 case MULT_EXPR:
12824 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12826 /* x * x is always non-negative for floating point x
12827 or without overflow. */
12828 if (operand_equal_p (op0, op1, 0)
12829 || (RECURSE (op0) && RECURSE (op1)))
12831 if (ANY_INTEGRAL_TYPE_P (type)
12832 && TYPE_OVERFLOW_UNDEFINED (type))
12833 *strict_overflow_p = true;
12834 return true;
12838 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12839 both unsigned and their total bits is shorter than the result. */
12840 if (TREE_CODE (type) == INTEGER_TYPE
12841 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12842 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12844 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12845 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12846 : TREE_TYPE (op0);
12847 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12848 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12849 : TREE_TYPE (op1);
12851 bool unsigned0 = TYPE_UNSIGNED (inner0);
12852 bool unsigned1 = TYPE_UNSIGNED (inner1);
12854 if (TREE_CODE (op0) == INTEGER_CST)
12855 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12857 if (TREE_CODE (op1) == INTEGER_CST)
12858 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12860 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12861 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12863 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12864 ? tree_int_cst_min_precision (op0, UNSIGNED)
12865 : TYPE_PRECISION (inner0);
12867 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12868 ? tree_int_cst_min_precision (op1, UNSIGNED)
12869 : TYPE_PRECISION (inner1);
12871 return precision0 + precision1 < TYPE_PRECISION (type);
12874 return false;
12876 case BIT_AND_EXPR:
12877 case MAX_EXPR:
12878 return RECURSE (op0) || RECURSE (op1);
12880 case BIT_IOR_EXPR:
12881 case BIT_XOR_EXPR:
12882 case MIN_EXPR:
12883 case RDIV_EXPR:
12884 case TRUNC_DIV_EXPR:
12885 case CEIL_DIV_EXPR:
12886 case FLOOR_DIV_EXPR:
12887 case ROUND_DIV_EXPR:
12888 return RECURSE (op0) && RECURSE (op1);
12890 case TRUNC_MOD_EXPR:
12891 return RECURSE (op0);
12893 case FLOOR_MOD_EXPR:
12894 return RECURSE (op1);
12896 case CEIL_MOD_EXPR:
12897 case ROUND_MOD_EXPR:
12898 default:
12899 return tree_simple_nonnegative_warnv_p (code, type);
12902 /* We don't know sign of `t', so be conservative and return false. */
12903 return false;
12906 /* Return true if T is known to be non-negative. If the return
12907 value is based on the assumption that signed overflow is undefined,
12908 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12909 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12911 bool
12912 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12914 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12915 return true;
12917 switch (TREE_CODE (t))
12919 case INTEGER_CST:
12920 return tree_int_cst_sgn (t) >= 0;
12922 case REAL_CST:
12923 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12925 case FIXED_CST:
12926 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12928 case COND_EXPR:
12929 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12931 case SSA_NAME:
12932 /* Limit the depth of recursion to avoid quadratic behavior.
12933 This is expected to catch almost all occurrences in practice.
12934 If this code misses important cases that unbounded recursion
12935 would not, passes that need this information could be revised
12936 to provide it through dataflow propagation. */
12937 return (!name_registered_for_update_p (t)
12938 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12939 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12940 strict_overflow_p, depth));
12942 default:
12943 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12947 /* Return true if T is known to be non-negative. If the return
12948 value is based on the assumption that signed overflow is undefined,
12949 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12950 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12952 bool
12953 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12954 bool *strict_overflow_p, int depth)
12956 switch (fn)
12958 CASE_CFN_ACOS:
12959 CASE_CFN_ACOSH:
12960 CASE_CFN_CABS:
12961 CASE_CFN_COSH:
12962 CASE_CFN_ERFC:
12963 CASE_CFN_EXP:
12964 CASE_CFN_EXP10:
12965 CASE_CFN_EXP2:
12966 CASE_CFN_FABS:
12967 CASE_CFN_FDIM:
12968 CASE_CFN_HYPOT:
12969 CASE_CFN_POW10:
12970 CASE_CFN_FFS:
12971 CASE_CFN_PARITY:
12972 CASE_CFN_POPCOUNT:
12973 CASE_CFN_CLZ:
12974 CASE_CFN_CLRSB:
12975 case CFN_BUILT_IN_BSWAP32:
12976 case CFN_BUILT_IN_BSWAP64:
12977 /* Always true. */
12978 return true;
12980 CASE_CFN_SQRT:
12981 /* sqrt(-0.0) is -0.0. */
12982 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12983 return true;
12984 return RECURSE (arg0);
12986 CASE_CFN_ASINH:
12987 CASE_CFN_ATAN:
12988 CASE_CFN_ATANH:
12989 CASE_CFN_CBRT:
12990 CASE_CFN_CEIL:
12991 CASE_CFN_ERF:
12992 CASE_CFN_EXPM1:
12993 CASE_CFN_FLOOR:
12994 CASE_CFN_FMOD:
12995 CASE_CFN_FREXP:
12996 CASE_CFN_ICEIL:
12997 CASE_CFN_IFLOOR:
12998 CASE_CFN_IRINT:
12999 CASE_CFN_IROUND:
13000 CASE_CFN_LCEIL:
13001 CASE_CFN_LDEXP:
13002 CASE_CFN_LFLOOR:
13003 CASE_CFN_LLCEIL:
13004 CASE_CFN_LLFLOOR:
13005 CASE_CFN_LLRINT:
13006 CASE_CFN_LLROUND:
13007 CASE_CFN_LRINT:
13008 CASE_CFN_LROUND:
13009 CASE_CFN_MODF:
13010 CASE_CFN_NEARBYINT:
13011 CASE_CFN_RINT:
13012 CASE_CFN_ROUND:
13013 CASE_CFN_SCALB:
13014 CASE_CFN_SCALBLN:
13015 CASE_CFN_SCALBN:
13016 CASE_CFN_SIGNBIT:
13017 CASE_CFN_SIGNIFICAND:
13018 CASE_CFN_SINH:
13019 CASE_CFN_TANH:
13020 CASE_CFN_TRUNC:
13021 /* True if the 1st argument is nonnegative. */
13022 return RECURSE (arg0);
13024 CASE_CFN_FMAX:
13025 /* True if the 1st OR 2nd arguments are nonnegative. */
13026 return RECURSE (arg0) || RECURSE (arg1);
13028 CASE_CFN_FMIN:
13029 /* True if the 1st AND 2nd arguments are nonnegative. */
13030 return RECURSE (arg0) && RECURSE (arg1);
13032 CASE_CFN_COPYSIGN:
13033 /* True if the 2nd argument is nonnegative. */
13034 return RECURSE (arg1);
13036 CASE_CFN_POWI:
13037 /* True if the 1st argument is nonnegative or the second
13038 argument is an even integer. */
13039 if (TREE_CODE (arg1) == INTEGER_CST
13040 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13041 return true;
13042 return RECURSE (arg0);
13044 CASE_CFN_POW:
13045 /* True if the 1st argument is nonnegative or the second
13046 argument is an even integer valued real. */
13047 if (TREE_CODE (arg1) == REAL_CST)
13049 REAL_VALUE_TYPE c;
13050 HOST_WIDE_INT n;
13052 c = TREE_REAL_CST (arg1);
13053 n = real_to_integer (&c);
13054 if ((n & 1) == 0)
13056 REAL_VALUE_TYPE cint;
13057 real_from_integer (&cint, VOIDmode, n, SIGNED);
13058 if (real_identical (&c, &cint))
13059 return true;
13062 return RECURSE (arg0);
13064 default:
13065 break;
13067 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13070 /* Return true if T is known to be non-negative. If the return
13071 value is based on the assumption that signed overflow is undefined,
13072 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13073 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13075 static bool
13076 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13078 enum tree_code code = TREE_CODE (t);
13079 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13080 return true;
13082 switch (code)
13084 case TARGET_EXPR:
13086 tree temp = TARGET_EXPR_SLOT (t);
13087 t = TARGET_EXPR_INITIAL (t);
13089 /* If the initializer is non-void, then it's a normal expression
13090 that will be assigned to the slot. */
13091 if (!VOID_TYPE_P (t))
13092 return RECURSE (t);
13094 /* Otherwise, the initializer sets the slot in some way. One common
13095 way is an assignment statement at the end of the initializer. */
13096 while (1)
13098 if (TREE_CODE (t) == BIND_EXPR)
13099 t = expr_last (BIND_EXPR_BODY (t));
13100 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13101 || TREE_CODE (t) == TRY_CATCH_EXPR)
13102 t = expr_last (TREE_OPERAND (t, 0));
13103 else if (TREE_CODE (t) == STATEMENT_LIST)
13104 t = expr_last (t);
13105 else
13106 break;
13108 if (TREE_CODE (t) == MODIFY_EXPR
13109 && TREE_OPERAND (t, 0) == temp)
13110 return RECURSE (TREE_OPERAND (t, 1));
13112 return false;
13115 case CALL_EXPR:
13117 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13118 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13120 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13121 get_call_combined_fn (t),
13122 arg0,
13123 arg1,
13124 strict_overflow_p, depth);
13126 case COMPOUND_EXPR:
13127 case MODIFY_EXPR:
13128 return RECURSE (TREE_OPERAND (t, 1));
13130 case BIND_EXPR:
13131 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13133 case SAVE_EXPR:
13134 return RECURSE (TREE_OPERAND (t, 0));
13136 default:
13137 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13141 #undef RECURSE
13142 #undef tree_expr_nonnegative_warnv_p
13144 /* Return true if T is known to be non-negative. If the return
13145 value is based on the assumption that signed overflow is undefined,
13146 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13147 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13149 bool
13150 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13152 enum tree_code code;
13153 if (t == error_mark_node)
13154 return false;
13156 code = TREE_CODE (t);
13157 switch (TREE_CODE_CLASS (code))
13159 case tcc_binary:
13160 case tcc_comparison:
13161 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13162 TREE_TYPE (t),
13163 TREE_OPERAND (t, 0),
13164 TREE_OPERAND (t, 1),
13165 strict_overflow_p, depth);
13167 case tcc_unary:
13168 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13169 TREE_TYPE (t),
13170 TREE_OPERAND (t, 0),
13171 strict_overflow_p, depth);
13173 case tcc_constant:
13174 case tcc_declaration:
13175 case tcc_reference:
13176 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13178 default:
13179 break;
13182 switch (code)
13184 case TRUTH_AND_EXPR:
13185 case TRUTH_OR_EXPR:
13186 case TRUTH_XOR_EXPR:
13187 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13188 TREE_TYPE (t),
13189 TREE_OPERAND (t, 0),
13190 TREE_OPERAND (t, 1),
13191 strict_overflow_p, depth);
13192 case TRUTH_NOT_EXPR:
13193 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13194 TREE_TYPE (t),
13195 TREE_OPERAND (t, 0),
13196 strict_overflow_p, depth);
13198 case COND_EXPR:
13199 case CONSTRUCTOR:
13200 case OBJ_TYPE_REF:
13201 case ASSERT_EXPR:
13202 case ADDR_EXPR:
13203 case WITH_SIZE_EXPR:
13204 case SSA_NAME:
13205 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13207 default:
13208 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13212 /* Return true if `t' is known to be non-negative. Handle warnings
13213 about undefined signed overflow. */
13215 bool
13216 tree_expr_nonnegative_p (tree t)
13218 bool ret, strict_overflow_p;
13220 strict_overflow_p = false;
13221 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13222 if (strict_overflow_p)
13223 fold_overflow_warning (("assuming signed overflow does not occur when "
13224 "determining that expression is always "
13225 "non-negative"),
13226 WARN_STRICT_OVERFLOW_MISC);
13227 return ret;
13231 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13232 For floating point we further ensure that T is not denormal.
13233 Similar logic is present in nonzero_address in rtlanal.h.
13235 If the return value is based on the assumption that signed overflow
13236 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13237 change *STRICT_OVERFLOW_P. */
13239 bool
13240 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13241 bool *strict_overflow_p)
13243 switch (code)
13245 case ABS_EXPR:
13246 return tree_expr_nonzero_warnv_p (op0,
13247 strict_overflow_p);
13249 case NOP_EXPR:
13251 tree inner_type = TREE_TYPE (op0);
13252 tree outer_type = type;
13254 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13255 && tree_expr_nonzero_warnv_p (op0,
13256 strict_overflow_p));
13258 break;
13260 case NON_LVALUE_EXPR:
13261 return tree_expr_nonzero_warnv_p (op0,
13262 strict_overflow_p);
13264 default:
13265 break;
13268 return false;
13271 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13272 For floating point we further ensure that T is not denormal.
13273 Similar logic is present in nonzero_address in rtlanal.h.
13275 If the return value is based on the assumption that signed overflow
13276 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13277 change *STRICT_OVERFLOW_P. */
13279 bool
13280 tree_binary_nonzero_warnv_p (enum tree_code code,
13281 tree type,
13282 tree op0,
13283 tree op1, bool *strict_overflow_p)
13285 bool sub_strict_overflow_p;
13286 switch (code)
13288 case POINTER_PLUS_EXPR:
13289 case PLUS_EXPR:
13290 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13292 /* With the presence of negative values it is hard
13293 to say something. */
13294 sub_strict_overflow_p = false;
13295 if (!tree_expr_nonnegative_warnv_p (op0,
13296 &sub_strict_overflow_p)
13297 || !tree_expr_nonnegative_warnv_p (op1,
13298 &sub_strict_overflow_p))
13299 return false;
13300 /* One of operands must be positive and the other non-negative. */
13301 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13302 overflows, on a twos-complement machine the sum of two
13303 nonnegative numbers can never be zero. */
13304 return (tree_expr_nonzero_warnv_p (op0,
13305 strict_overflow_p)
13306 || tree_expr_nonzero_warnv_p (op1,
13307 strict_overflow_p));
13309 break;
13311 case MULT_EXPR:
13312 if (TYPE_OVERFLOW_UNDEFINED (type))
13314 if (tree_expr_nonzero_warnv_p (op0,
13315 strict_overflow_p)
13316 && tree_expr_nonzero_warnv_p (op1,
13317 strict_overflow_p))
13319 *strict_overflow_p = true;
13320 return true;
13323 break;
13325 case MIN_EXPR:
13326 sub_strict_overflow_p = false;
13327 if (tree_expr_nonzero_warnv_p (op0,
13328 &sub_strict_overflow_p)
13329 && tree_expr_nonzero_warnv_p (op1,
13330 &sub_strict_overflow_p))
13332 if (sub_strict_overflow_p)
13333 *strict_overflow_p = true;
13335 break;
13337 case MAX_EXPR:
13338 sub_strict_overflow_p = false;
13339 if (tree_expr_nonzero_warnv_p (op0,
13340 &sub_strict_overflow_p))
13342 if (sub_strict_overflow_p)
13343 *strict_overflow_p = true;
13345 /* When both operands are nonzero, then MAX must be too. */
13346 if (tree_expr_nonzero_warnv_p (op1,
13347 strict_overflow_p))
13348 return true;
13350 /* MAX where operand 0 is positive is positive. */
13351 return tree_expr_nonnegative_warnv_p (op0,
13352 strict_overflow_p);
13354 /* MAX where operand 1 is positive is positive. */
13355 else if (tree_expr_nonzero_warnv_p (op1,
13356 &sub_strict_overflow_p)
13357 && tree_expr_nonnegative_warnv_p (op1,
13358 &sub_strict_overflow_p))
13360 if (sub_strict_overflow_p)
13361 *strict_overflow_p = true;
13362 return true;
13364 break;
13366 case BIT_IOR_EXPR:
13367 return (tree_expr_nonzero_warnv_p (op1,
13368 strict_overflow_p)
13369 || tree_expr_nonzero_warnv_p (op0,
13370 strict_overflow_p));
13372 default:
13373 break;
13376 return false;
13379 /* Return true when T is an address and is known to be nonzero.
13380 For floating point we further ensure that T is not denormal.
13381 Similar logic is present in nonzero_address in rtlanal.h.
13383 If the return value is based on the assumption that signed overflow
13384 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13385 change *STRICT_OVERFLOW_P. */
13387 bool
13388 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13390 bool sub_strict_overflow_p;
13391 switch (TREE_CODE (t))
13393 case INTEGER_CST:
13394 return !integer_zerop (t);
13396 case ADDR_EXPR:
13398 tree base = TREE_OPERAND (t, 0);
13400 if (!DECL_P (base))
13401 base = get_base_address (base);
13403 if (base && TREE_CODE (base) == TARGET_EXPR)
13404 base = TARGET_EXPR_SLOT (base);
13406 if (!base)
13407 return false;
13409 /* For objects in symbol table check if we know they are non-zero.
13410 Don't do anything for variables and functions before symtab is built;
13411 it is quite possible that they will be declared weak later. */
13412 int nonzero_addr = maybe_nonzero_address (base);
13413 if (nonzero_addr >= 0)
13414 return nonzero_addr;
13416 /* Constants are never weak. */
13417 if (CONSTANT_CLASS_P (base))
13418 return true;
13420 return false;
13423 case COND_EXPR:
13424 sub_strict_overflow_p = false;
13425 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13426 &sub_strict_overflow_p)
13427 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13428 &sub_strict_overflow_p))
13430 if (sub_strict_overflow_p)
13431 *strict_overflow_p = true;
13432 return true;
13434 break;
13436 case SSA_NAME:
13437 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13438 break;
13439 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13441 default:
13442 break;
13444 return false;
13447 #define integer_valued_real_p(X) \
13448 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13450 #define RECURSE(X) \
13451 ((integer_valued_real_p) (X, depth + 1))
13453 /* Return true if the floating point result of (CODE OP0) has an
13454 integer value. We also allow +Inf, -Inf and NaN to be considered
13455 integer values. Return false for signaling NaN.
13457 DEPTH is the current nesting depth of the query. */
13459 bool
13460 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13462 switch (code)
13464 case FLOAT_EXPR:
13465 return true;
13467 case ABS_EXPR:
13468 return RECURSE (op0);
13470 CASE_CONVERT:
13472 tree type = TREE_TYPE (op0);
13473 if (TREE_CODE (type) == INTEGER_TYPE)
13474 return true;
13475 if (TREE_CODE (type) == REAL_TYPE)
13476 return RECURSE (op0);
13477 break;
13480 default:
13481 break;
13483 return false;
13486 /* Return true if the floating point result of (CODE OP0 OP1) has an
13487 integer value. We also allow +Inf, -Inf and NaN to be considered
13488 integer values. Return false for signaling NaN.
13490 DEPTH is the current nesting depth of the query. */
13492 bool
13493 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13495 switch (code)
13497 case PLUS_EXPR:
13498 case MINUS_EXPR:
13499 case MULT_EXPR:
13500 case MIN_EXPR:
13501 case MAX_EXPR:
13502 return RECURSE (op0) && RECURSE (op1);
13504 default:
13505 break;
13507 return false;
13510 /* Return true if the floating point result of calling FNDECL with arguments
13511 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13512 considered integer values. Return false for signaling NaN. If FNDECL
13513 takes fewer than 2 arguments, the remaining ARGn are null.
13515 DEPTH is the current nesting depth of the query. */
13517 bool
13518 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13520 switch (fn)
13522 CASE_CFN_CEIL:
13523 CASE_CFN_FLOOR:
13524 CASE_CFN_NEARBYINT:
13525 CASE_CFN_RINT:
13526 CASE_CFN_ROUND:
13527 CASE_CFN_TRUNC:
13528 return true;
13530 CASE_CFN_FMIN:
13531 CASE_CFN_FMAX:
13532 return RECURSE (arg0) && RECURSE (arg1);
13534 default:
13535 break;
13537 return false;
13540 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13541 has an integer value. We also allow +Inf, -Inf and NaN to be
13542 considered integer values. Return false for signaling NaN.
13544 DEPTH is the current nesting depth of the query. */
13546 bool
13547 integer_valued_real_single_p (tree t, int depth)
13549 switch (TREE_CODE (t))
13551 case REAL_CST:
13552 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13554 case COND_EXPR:
13555 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13557 case SSA_NAME:
13558 /* Limit the depth of recursion to avoid quadratic behavior.
13559 This is expected to catch almost all occurrences in practice.
13560 If this code misses important cases that unbounded recursion
13561 would not, passes that need this information could be revised
13562 to provide it through dataflow propagation. */
13563 return (!name_registered_for_update_p (t)
13564 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13565 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13566 depth));
13568 default:
13569 break;
13571 return false;
13574 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13575 has an integer value. We also allow +Inf, -Inf and NaN to be
13576 considered integer values. Return false for signaling NaN.
13578 DEPTH is the current nesting depth of the query. */
13580 static bool
13581 integer_valued_real_invalid_p (tree t, int depth)
13583 switch (TREE_CODE (t))
13585 case COMPOUND_EXPR:
13586 case MODIFY_EXPR:
13587 case BIND_EXPR:
13588 return RECURSE (TREE_OPERAND (t, 1));
13590 case SAVE_EXPR:
13591 return RECURSE (TREE_OPERAND (t, 0));
13593 default:
13594 break;
13596 return false;
13599 #undef RECURSE
13600 #undef integer_valued_real_p
13602 /* Return true if the floating point expression T has an integer value.
13603 We also allow +Inf, -Inf and NaN to be considered integer values.
13604 Return false for signaling NaN.
13606 DEPTH is the current nesting depth of the query. */
13608 bool
13609 integer_valued_real_p (tree t, int depth)
13611 if (t == error_mark_node)
13612 return false;
13614 tree_code code = TREE_CODE (t);
13615 switch (TREE_CODE_CLASS (code))
13617 case tcc_binary:
13618 case tcc_comparison:
13619 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13620 TREE_OPERAND (t, 1), depth);
13622 case tcc_unary:
13623 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13625 case tcc_constant:
13626 case tcc_declaration:
13627 case tcc_reference:
13628 return integer_valued_real_single_p (t, depth);
13630 default:
13631 break;
13634 switch (code)
13636 case COND_EXPR:
13637 case SSA_NAME:
13638 return integer_valued_real_single_p (t, depth);
13640 case CALL_EXPR:
13642 tree arg0 = (call_expr_nargs (t) > 0
13643 ? CALL_EXPR_ARG (t, 0)
13644 : NULL_TREE);
13645 tree arg1 = (call_expr_nargs (t) > 1
13646 ? CALL_EXPR_ARG (t, 1)
13647 : NULL_TREE);
13648 return integer_valued_real_call_p (get_call_combined_fn (t),
13649 arg0, arg1, depth);
13652 default:
13653 return integer_valued_real_invalid_p (t, depth);
13657 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13658 attempt to fold the expression to a constant without modifying TYPE,
13659 OP0 or OP1.
13661 If the expression could be simplified to a constant, then return
13662 the constant. If the expression would not be simplified to a
13663 constant, then return NULL_TREE. */
13665 tree
13666 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13668 tree tem = fold_binary (code, type, op0, op1);
13669 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13672 /* Given the components of a unary expression CODE, TYPE and OP0,
13673 attempt to fold the expression to a constant without modifying
13674 TYPE or OP0.
13676 If the expression could be simplified to a constant, then return
13677 the constant. If the expression would not be simplified to a
13678 constant, then return NULL_TREE. */
13680 tree
13681 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13683 tree tem = fold_unary (code, type, op0);
13684 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13687 /* If EXP represents referencing an element in a constant string
13688 (either via pointer arithmetic or array indexing), return the
13689 tree representing the value accessed, otherwise return NULL. */
13691 tree
13692 fold_read_from_constant_string (tree exp)
13694 if ((TREE_CODE (exp) == INDIRECT_REF
13695 || TREE_CODE (exp) == ARRAY_REF)
13696 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13698 tree exp1 = TREE_OPERAND (exp, 0);
13699 tree index;
13700 tree string;
13701 location_t loc = EXPR_LOCATION (exp);
13703 if (TREE_CODE (exp) == INDIRECT_REF)
13704 string = string_constant (exp1, &index);
13705 else
13707 tree low_bound = array_ref_low_bound (exp);
13708 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13710 /* Optimize the special-case of a zero lower bound.
13712 We convert the low_bound to sizetype to avoid some problems
13713 with constant folding. (E.g. suppose the lower bound is 1,
13714 and its mode is QI. Without the conversion,l (ARRAY
13715 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13716 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13717 if (! integer_zerop (low_bound))
13718 index = size_diffop_loc (loc, index,
13719 fold_convert_loc (loc, sizetype, low_bound));
13721 string = exp1;
13724 if (string
13725 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13726 && TREE_CODE (string) == STRING_CST
13727 && TREE_CODE (index) == INTEGER_CST
13728 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13729 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13730 == MODE_INT)
13731 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13732 return build_int_cst_type (TREE_TYPE (exp),
13733 (TREE_STRING_POINTER (string)
13734 [TREE_INT_CST_LOW (index)]));
13736 return NULL;
13739 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13740 an integer constant, real, or fixed-point constant.
13742 TYPE is the type of the result. */
13744 static tree
13745 fold_negate_const (tree arg0, tree type)
13747 tree t = NULL_TREE;
13749 switch (TREE_CODE (arg0))
13751 case INTEGER_CST:
13753 bool overflow;
13754 wide_int val = wi::neg (arg0, &overflow);
13755 t = force_fit_type (type, val, 1,
13756 (overflow | TREE_OVERFLOW (arg0))
13757 && !TYPE_UNSIGNED (type));
13758 break;
13761 case REAL_CST:
13762 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13763 break;
13765 case FIXED_CST:
13767 FIXED_VALUE_TYPE f;
13768 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13769 &(TREE_FIXED_CST (arg0)), NULL,
13770 TYPE_SATURATING (type));
13771 t = build_fixed (type, f);
13772 /* Propagate overflow flags. */
13773 if (overflow_p | TREE_OVERFLOW (arg0))
13774 TREE_OVERFLOW (t) = 1;
13775 break;
13778 default:
13779 gcc_unreachable ();
13782 return t;
13785 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13786 an integer constant or real constant.
13788 TYPE is the type of the result. */
13790 tree
13791 fold_abs_const (tree arg0, tree type)
13793 tree t = NULL_TREE;
13795 switch (TREE_CODE (arg0))
13797 case INTEGER_CST:
13799 /* If the value is unsigned or non-negative, then the absolute value
13800 is the same as the ordinary value. */
13801 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13802 t = arg0;
13804 /* If the value is negative, then the absolute value is
13805 its negation. */
13806 else
13808 bool overflow;
13809 wide_int val = wi::neg (arg0, &overflow);
13810 t = force_fit_type (type, val, -1,
13811 overflow | TREE_OVERFLOW (arg0));
13814 break;
13816 case REAL_CST:
13817 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13818 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13819 else
13820 t = arg0;
13821 break;
13823 default:
13824 gcc_unreachable ();
13827 return t;
13830 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13831 constant. TYPE is the type of the result. */
13833 static tree
13834 fold_not_const (const_tree arg0, tree type)
13836 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13838 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13841 /* Given CODE, a relational operator, the target type, TYPE and two
13842 constant operands OP0 and OP1, return the result of the
13843 relational operation. If the result is not a compile time
13844 constant, then return NULL_TREE. */
13846 static tree
13847 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13849 int result, invert;
13851 /* From here on, the only cases we handle are when the result is
13852 known to be a constant. */
13854 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13856 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13857 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13859 /* Handle the cases where either operand is a NaN. */
13860 if (real_isnan (c0) || real_isnan (c1))
13862 switch (code)
13864 case EQ_EXPR:
13865 case ORDERED_EXPR:
13866 result = 0;
13867 break;
13869 case NE_EXPR:
13870 case UNORDERED_EXPR:
13871 case UNLT_EXPR:
13872 case UNLE_EXPR:
13873 case UNGT_EXPR:
13874 case UNGE_EXPR:
13875 case UNEQ_EXPR:
13876 result = 1;
13877 break;
13879 case LT_EXPR:
13880 case LE_EXPR:
13881 case GT_EXPR:
13882 case GE_EXPR:
13883 case LTGT_EXPR:
13884 if (flag_trapping_math)
13885 return NULL_TREE;
13886 result = 0;
13887 break;
13889 default:
13890 gcc_unreachable ();
13893 return constant_boolean_node (result, type);
13896 return constant_boolean_node (real_compare (code, c0, c1), type);
13899 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13901 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13902 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13903 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13906 /* Handle equality/inequality of complex constants. */
13907 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13909 tree rcond = fold_relational_const (code, type,
13910 TREE_REALPART (op0),
13911 TREE_REALPART (op1));
13912 tree icond = fold_relational_const (code, type,
13913 TREE_IMAGPART (op0),
13914 TREE_IMAGPART (op1));
13915 if (code == EQ_EXPR)
13916 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13917 else if (code == NE_EXPR)
13918 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13919 else
13920 return NULL_TREE;
13923 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13925 if (!VECTOR_TYPE_P (type))
13927 /* Have vector comparison with scalar boolean result. */
13928 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13929 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13930 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13932 tree elem0 = VECTOR_CST_ELT (op0, i);
13933 tree elem1 = VECTOR_CST_ELT (op1, i);
13934 tree tmp = fold_relational_const (code, type, elem0, elem1);
13935 if (tmp == NULL_TREE)
13936 return NULL_TREE;
13937 if (integer_zerop (tmp))
13938 return constant_boolean_node (false, type);
13940 return constant_boolean_node (true, type);
13942 unsigned count = VECTOR_CST_NELTS (op0);
13943 tree *elts = XALLOCAVEC (tree, count);
13944 gcc_assert (VECTOR_CST_NELTS (op1) == count
13945 && TYPE_VECTOR_SUBPARTS (type) == count);
13947 for (unsigned i = 0; i < count; i++)
13949 tree elem_type = TREE_TYPE (type);
13950 tree elem0 = VECTOR_CST_ELT (op0, i);
13951 tree elem1 = VECTOR_CST_ELT (op1, i);
13953 tree tem = fold_relational_const (code, elem_type,
13954 elem0, elem1);
13956 if (tem == NULL_TREE)
13957 return NULL_TREE;
13959 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13962 return build_vector (type, elts);
13965 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13967 To compute GT, swap the arguments and do LT.
13968 To compute GE, do LT and invert the result.
13969 To compute LE, swap the arguments, do LT and invert the result.
13970 To compute NE, do EQ and invert the result.
13972 Therefore, the code below must handle only EQ and LT. */
13974 if (code == LE_EXPR || code == GT_EXPR)
13976 std::swap (op0, op1);
13977 code = swap_tree_comparison (code);
13980 /* Note that it is safe to invert for real values here because we
13981 have already handled the one case that it matters. */
13983 invert = 0;
13984 if (code == NE_EXPR || code == GE_EXPR)
13986 invert = 1;
13987 code = invert_tree_comparison (code, false);
13990 /* Compute a result for LT or EQ if args permit;
13991 Otherwise return T. */
13992 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13994 if (code == EQ_EXPR)
13995 result = tree_int_cst_equal (op0, op1);
13996 else
13997 result = tree_int_cst_lt (op0, op1);
13999 else
14000 return NULL_TREE;
14002 if (invert)
14003 result ^= 1;
14004 return constant_boolean_node (result, type);
14007 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14008 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14009 itself. */
14011 tree
14012 fold_build_cleanup_point_expr (tree type, tree expr)
14014 /* If the expression does not have side effects then we don't have to wrap
14015 it with a cleanup point expression. */
14016 if (!TREE_SIDE_EFFECTS (expr))
14017 return expr;
14019 /* If the expression is a return, check to see if the expression inside the
14020 return has no side effects or the right hand side of the modify expression
14021 inside the return. If either don't have side effects set we don't need to
14022 wrap the expression in a cleanup point expression. Note we don't check the
14023 left hand side of the modify because it should always be a return decl. */
14024 if (TREE_CODE (expr) == RETURN_EXPR)
14026 tree op = TREE_OPERAND (expr, 0);
14027 if (!op || !TREE_SIDE_EFFECTS (op))
14028 return expr;
14029 op = TREE_OPERAND (op, 1);
14030 if (!TREE_SIDE_EFFECTS (op))
14031 return expr;
14034 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14037 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14038 of an indirection through OP0, or NULL_TREE if no simplification is
14039 possible. */
14041 tree
14042 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14044 tree sub = op0;
14045 tree subtype;
14047 STRIP_NOPS (sub);
14048 subtype = TREE_TYPE (sub);
14049 if (!POINTER_TYPE_P (subtype)
14050 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14051 return NULL_TREE;
14053 if (TREE_CODE (sub) == ADDR_EXPR)
14055 tree op = TREE_OPERAND (sub, 0);
14056 tree optype = TREE_TYPE (op);
14057 /* *&CONST_DECL -> to the value of the const decl. */
14058 if (TREE_CODE (op) == CONST_DECL)
14059 return DECL_INITIAL (op);
14060 /* *&p => p; make sure to handle *&"str"[cst] here. */
14061 if (type == optype)
14063 tree fop = fold_read_from_constant_string (op);
14064 if (fop)
14065 return fop;
14066 else
14067 return op;
14069 /* *(foo *)&fooarray => fooarray[0] */
14070 else if (TREE_CODE (optype) == ARRAY_TYPE
14071 && type == TREE_TYPE (optype)
14072 && (!in_gimple_form
14073 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14075 tree type_domain = TYPE_DOMAIN (optype);
14076 tree min_val = size_zero_node;
14077 if (type_domain && TYPE_MIN_VALUE (type_domain))
14078 min_val = TYPE_MIN_VALUE (type_domain);
14079 if (in_gimple_form
14080 && TREE_CODE (min_val) != INTEGER_CST)
14081 return NULL_TREE;
14082 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14083 NULL_TREE, NULL_TREE);
14085 /* *(foo *)&complexfoo => __real__ complexfoo */
14086 else if (TREE_CODE (optype) == COMPLEX_TYPE
14087 && type == TREE_TYPE (optype))
14088 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14089 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14090 else if (TREE_CODE (optype) == VECTOR_TYPE
14091 && type == TREE_TYPE (optype))
14093 tree part_width = TYPE_SIZE (type);
14094 tree index = bitsize_int (0);
14095 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14099 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14100 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14102 tree op00 = TREE_OPERAND (sub, 0);
14103 tree op01 = TREE_OPERAND (sub, 1);
14105 STRIP_NOPS (op00);
14106 if (TREE_CODE (op00) == ADDR_EXPR)
14108 tree op00type;
14109 op00 = TREE_OPERAND (op00, 0);
14110 op00type = TREE_TYPE (op00);
14112 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14113 if (TREE_CODE (op00type) == VECTOR_TYPE
14114 && type == TREE_TYPE (op00type))
14116 tree part_width = TYPE_SIZE (type);
14117 unsigned HOST_WIDE_INT max_offset
14118 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14119 * TYPE_VECTOR_SUBPARTS (op00type));
14120 if (tree_int_cst_sign_bit (op01) == 0
14121 && compare_tree_int (op01, max_offset) == -1)
14123 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14124 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14125 tree index = bitsize_int (indexi);
14126 return fold_build3_loc (loc,
14127 BIT_FIELD_REF, type, op00,
14128 part_width, index);
14131 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14132 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14133 && type == TREE_TYPE (op00type))
14135 tree size = TYPE_SIZE_UNIT (type);
14136 if (tree_int_cst_equal (size, op01))
14137 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14139 /* ((foo *)&fooarray)[1] => fooarray[1] */
14140 else if (TREE_CODE (op00type) == ARRAY_TYPE
14141 && type == TREE_TYPE (op00type))
14143 tree type_domain = TYPE_DOMAIN (op00type);
14144 tree min_val = size_zero_node;
14145 if (type_domain && TYPE_MIN_VALUE (type_domain))
14146 min_val = TYPE_MIN_VALUE (type_domain);
14147 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14148 TYPE_SIZE_UNIT (type));
14149 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14150 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14151 NULL_TREE, NULL_TREE);
14156 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14157 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14158 && type == TREE_TYPE (TREE_TYPE (subtype))
14159 && (!in_gimple_form
14160 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14162 tree type_domain;
14163 tree min_val = size_zero_node;
14164 sub = build_fold_indirect_ref_loc (loc, sub);
14165 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14166 if (type_domain && TYPE_MIN_VALUE (type_domain))
14167 min_val = TYPE_MIN_VALUE (type_domain);
14168 if (in_gimple_form
14169 && TREE_CODE (min_val) != INTEGER_CST)
14170 return NULL_TREE;
14171 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14172 NULL_TREE);
14175 return NULL_TREE;
14178 /* Builds an expression for an indirection through T, simplifying some
14179 cases. */
14181 tree
14182 build_fold_indirect_ref_loc (location_t loc, tree t)
14184 tree type = TREE_TYPE (TREE_TYPE (t));
14185 tree sub = fold_indirect_ref_1 (loc, type, t);
14187 if (sub)
14188 return sub;
14190 return build1_loc (loc, INDIRECT_REF, type, t);
14193 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14195 tree
14196 fold_indirect_ref_loc (location_t loc, tree t)
14198 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14200 if (sub)
14201 return sub;
14202 else
14203 return t;
14206 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14207 whose result is ignored. The type of the returned tree need not be
14208 the same as the original expression. */
14210 tree
14211 fold_ignored_result (tree t)
14213 if (!TREE_SIDE_EFFECTS (t))
14214 return integer_zero_node;
14216 for (;;)
14217 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14219 case tcc_unary:
14220 t = TREE_OPERAND (t, 0);
14221 break;
14223 case tcc_binary:
14224 case tcc_comparison:
14225 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14226 t = TREE_OPERAND (t, 0);
14227 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14228 t = TREE_OPERAND (t, 1);
14229 else
14230 return t;
14231 break;
14233 case tcc_expression:
14234 switch (TREE_CODE (t))
14236 case COMPOUND_EXPR:
14237 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14238 return t;
14239 t = TREE_OPERAND (t, 0);
14240 break;
14242 case COND_EXPR:
14243 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14244 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14245 return t;
14246 t = TREE_OPERAND (t, 0);
14247 break;
14249 default:
14250 return t;
14252 break;
14254 default:
14255 return t;
14259 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14261 tree
14262 round_up_loc (location_t loc, tree value, unsigned int divisor)
14264 tree div = NULL_TREE;
14266 if (divisor == 1)
14267 return value;
14269 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14270 have to do anything. Only do this when we are not given a const,
14271 because in that case, this check is more expensive than just
14272 doing it. */
14273 if (TREE_CODE (value) != INTEGER_CST)
14275 div = build_int_cst (TREE_TYPE (value), divisor);
14277 if (multiple_of_p (TREE_TYPE (value), value, div))
14278 return value;
14281 /* If divisor is a power of two, simplify this to bit manipulation. */
14282 if (pow2_or_zerop (divisor))
14284 if (TREE_CODE (value) == INTEGER_CST)
14286 wide_int val = value;
14287 bool overflow_p;
14289 if ((val & (divisor - 1)) == 0)
14290 return value;
14292 overflow_p = TREE_OVERFLOW (value);
14293 val += divisor - 1;
14294 val &= (int) -divisor;
14295 if (val == 0)
14296 overflow_p = true;
14298 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14300 else
14302 tree t;
14304 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14305 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14306 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14307 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14310 else
14312 if (!div)
14313 div = build_int_cst (TREE_TYPE (value), divisor);
14314 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14315 value = size_binop_loc (loc, MULT_EXPR, value, div);
14318 return value;
14321 /* Likewise, but round down. */
14323 tree
14324 round_down_loc (location_t loc, tree value, int divisor)
14326 tree div = NULL_TREE;
14328 gcc_assert (divisor > 0);
14329 if (divisor == 1)
14330 return value;
14332 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14333 have to do anything. Only do this when we are not given a const,
14334 because in that case, this check is more expensive than just
14335 doing it. */
14336 if (TREE_CODE (value) != INTEGER_CST)
14338 div = build_int_cst (TREE_TYPE (value), divisor);
14340 if (multiple_of_p (TREE_TYPE (value), value, div))
14341 return value;
14344 /* If divisor is a power of two, simplify this to bit manipulation. */
14345 if (pow2_or_zerop (divisor))
14347 tree t;
14349 t = build_int_cst (TREE_TYPE (value), -divisor);
14350 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14352 else
14354 if (!div)
14355 div = build_int_cst (TREE_TYPE (value), divisor);
14356 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14357 value = size_binop_loc (loc, MULT_EXPR, value, div);
14360 return value;
14363 /* Returns the pointer to the base of the object addressed by EXP and
14364 extracts the information about the offset of the access, storing it
14365 to PBITPOS and POFFSET. */
14367 static tree
14368 split_address_to_core_and_offset (tree exp,
14369 HOST_WIDE_INT *pbitpos, tree *poffset)
14371 tree core;
14372 machine_mode mode;
14373 int unsignedp, reversep, volatilep;
14374 HOST_WIDE_INT bitsize;
14375 location_t loc = EXPR_LOCATION (exp);
14377 if (TREE_CODE (exp) == ADDR_EXPR)
14379 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14380 poffset, &mode, &unsignedp, &reversep,
14381 &volatilep);
14382 core = build_fold_addr_expr_loc (loc, core);
14384 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14386 core = TREE_OPERAND (exp, 0);
14387 STRIP_NOPS (core);
14388 *pbitpos = 0;
14389 *poffset = TREE_OPERAND (exp, 1);
14390 if (TREE_CODE (*poffset) == INTEGER_CST)
14392 offset_int tem = wi::sext (wi::to_offset (*poffset),
14393 TYPE_PRECISION (TREE_TYPE (*poffset)));
14394 tem <<= LOG2_BITS_PER_UNIT;
14395 if (wi::fits_shwi_p (tem))
14397 *pbitpos = tem.to_shwi ();
14398 *poffset = NULL_TREE;
14402 else
14404 core = exp;
14405 *pbitpos = 0;
14406 *poffset = NULL_TREE;
14409 return core;
14412 /* Returns true if addresses of E1 and E2 differ by a constant, false
14413 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14415 bool
14416 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14418 tree core1, core2;
14419 HOST_WIDE_INT bitpos1, bitpos2;
14420 tree toffset1, toffset2, tdiff, type;
14422 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14423 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14425 if (bitpos1 % BITS_PER_UNIT != 0
14426 || bitpos2 % BITS_PER_UNIT != 0
14427 || !operand_equal_p (core1, core2, 0))
14428 return false;
14430 if (toffset1 && toffset2)
14432 type = TREE_TYPE (toffset1);
14433 if (type != TREE_TYPE (toffset2))
14434 toffset2 = fold_convert (type, toffset2);
14436 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14437 if (!cst_and_fits_in_hwi (tdiff))
14438 return false;
14440 *diff = int_cst_value (tdiff);
14442 else if (toffset1 || toffset2)
14444 /* If only one of the offsets is non-constant, the difference cannot
14445 be a constant. */
14446 return false;
14448 else
14449 *diff = 0;
14451 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14452 return true;
14455 /* Return OFF converted to a pointer offset type suitable as offset for
14456 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14457 tree
14458 convert_to_ptrofftype_loc (location_t loc, tree off)
14460 return fold_convert_loc (loc, sizetype, off);
14463 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14464 tree
14465 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14467 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14468 ptr, convert_to_ptrofftype_loc (loc, off));
14471 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14472 tree
14473 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14475 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14476 ptr, size_int (off));
14479 /* Return a char pointer for a C string if it is a string constant
14480 or sum of string constant and integer constant. We only support
14481 string constants properly terminated with '\0' character.
14482 If STRLEN is a valid pointer, length (including terminating character)
14483 of returned string is stored to the argument. */
14485 const char *
14486 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14488 tree offset_node;
14490 if (strlen)
14491 *strlen = 0;
14493 src = string_constant (src, &offset_node);
14494 if (src == 0)
14495 return NULL;
14497 unsigned HOST_WIDE_INT offset = 0;
14498 if (offset_node != NULL_TREE)
14500 if (!tree_fits_uhwi_p (offset_node))
14501 return NULL;
14502 else
14503 offset = tree_to_uhwi (offset_node);
14506 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14507 const char *string = TREE_STRING_POINTER (src);
14509 /* Support only properly null-terminated strings. */
14510 if (string_length == 0
14511 || string[string_length - 1] != '\0'
14512 || offset >= string_length)
14513 return NULL;
14515 if (strlen)
14516 *strlen = string_length - offset;
14517 return string + offset;
14520 #if CHECKING_P
14522 namespace selftest {
14524 /* Helper functions for writing tests of folding trees. */
14526 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14528 static void
14529 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14530 tree constant)
14532 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14535 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14536 wrapping WRAPPED_EXPR. */
14538 static void
14539 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14540 tree wrapped_expr)
14542 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14543 ASSERT_NE (wrapped_expr, result);
14544 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14545 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14548 /* Verify that various arithmetic binary operations are folded
14549 correctly. */
14551 static void
14552 test_arithmetic_folding ()
14554 tree type = integer_type_node;
14555 tree x = create_tmp_var_raw (type, "x");
14556 tree zero = build_zero_cst (type);
14557 tree one = build_int_cst (type, 1);
14559 /* Addition. */
14560 /* 1 <-- (0 + 1) */
14561 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14562 one);
14563 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14564 one);
14566 /* (nonlvalue)x <-- (x + 0) */
14567 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14570 /* Subtraction. */
14571 /* 0 <-- (x - x) */
14572 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14573 zero);
14574 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14577 /* Multiplication. */
14578 /* 0 <-- (x * 0) */
14579 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14580 zero);
14582 /* (nonlvalue)x <-- (x * 1) */
14583 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14587 /* Verify that various binary operations on vectors are folded
14588 correctly. */
14590 static void
14591 test_vector_folding ()
14593 tree inner_type = integer_type_node;
14594 tree type = build_vector_type (inner_type, 4);
14595 tree zero = build_zero_cst (type);
14596 tree one = build_one_cst (type);
14598 /* Verify equality tests that return a scalar boolean result. */
14599 tree res_type = boolean_type_node;
14600 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14601 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14602 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14603 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14606 /* Run all of the selftests within this file. */
14608 void
14609 fold_const_c_tests ()
14611 test_arithmetic_folding ();
14612 test_vector_folding ();
14615 } // namespace selftest
14617 #endif /* CHECKING_P */