Use tree_vector_builder::new_binary_operation for folding
[official-gcc.git] / gcc / fold-const.c
blob1b098d9f4c91af002aeacd433ad92ffda0afb9ab
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
146 static location_t
147 expr_location_or (tree t, location_t loc)
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
168 return x;
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 widest_int quo;
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 return NULL_TREE;
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
196 static int fold_deferring_overflow_warnings;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
213 void
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 const char *warnmsg;
232 location_t locus;
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
248 if (!issue || warnmsg == NULL)
249 return;
251 if (gimple_no_warning_p (stmt))
252 return;
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
259 if (!issue_strict_overflow_warning (code))
260 return;
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL, 0);
278 /* Whether we are deferring overflow warnings. */
280 bool
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings > 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 if (fold_deferring_overflow_warnings > 0)
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
308 bool
309 negate_mathfn_p (combined_fn fn)
311 switch (fn)
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_SIN:
332 CASE_CFN_SINH:
333 CASE_CFN_TAN:
334 CASE_CFN_TANH:
335 CASE_CFN_TRUNC:
336 return true;
338 CASE_CFN_LLRINT:
339 CASE_CFN_LRINT:
340 CASE_CFN_NEARBYINT:
341 CASE_CFN_RINT:
342 return !flag_rounding_math;
344 default:
345 break;
347 return false;
350 /* Check whether we may negate an integer constant T without causing
351 overflow. */
353 bool
354 may_negate_without_overflow_p (const_tree t)
356 tree type;
358 gcc_assert (TREE_CODE (t) == INTEGER_CST);
360 type = TREE_TYPE (t);
361 if (TYPE_UNSIGNED (type))
362 return false;
364 return !wi::only_sign_bit_p (wi::to_wide (t));
367 /* Determine whether an expression T can be cheaply negated using
368 the function negate_expr without introducing undefined overflow. */
370 static bool
371 negate_expr_p (tree t)
373 tree type;
375 if (t == 0)
376 return false;
378 type = TREE_TYPE (t);
380 STRIP_SIGN_NOPS (t);
381 switch (TREE_CODE (t))
383 case INTEGER_CST:
384 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
385 return true;
387 /* Check that -CST will not overflow type. */
388 return may_negate_without_overflow_p (t);
389 case BIT_NOT_EXPR:
390 return (INTEGRAL_TYPE_P (type)
391 && TYPE_OVERFLOW_WRAPS (type));
393 case FIXED_CST:
394 return true;
396 case NEGATE_EXPR:
397 return !TYPE_OVERFLOW_SANITIZED (type);
399 case REAL_CST:
400 /* We want to canonicalize to positive real constants. Pretend
401 that only negative ones can be easily negated. */
402 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
404 case COMPLEX_CST:
405 return negate_expr_p (TREE_REALPART (t))
406 && negate_expr_p (TREE_IMAGPART (t));
408 case VECTOR_CST:
410 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
411 return true;
413 int count = VECTOR_CST_NELTS (t), i;
415 for (i = 0; i < count; i++)
416 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
417 return false;
419 return true;
422 case COMPLEX_EXPR:
423 return negate_expr_p (TREE_OPERAND (t, 0))
424 && negate_expr_p (TREE_OPERAND (t, 1));
426 case CONJ_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0));
429 case PLUS_EXPR:
430 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
431 || HONOR_SIGNED_ZEROS (element_mode (type))
432 || (ANY_INTEGRAL_TYPE_P (type)
433 && ! TYPE_OVERFLOW_WRAPS (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1)))
437 return true;
438 /* -(A + B) -> (-A) - B. */
439 return negate_expr_p (TREE_OPERAND (t, 0));
441 case MINUS_EXPR:
442 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
443 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
444 && !HONOR_SIGNED_ZEROS (element_mode (type))
445 && (! ANY_INTEGRAL_TYPE_P (type)
446 || TYPE_OVERFLOW_WRAPS (type));
448 case MULT_EXPR:
449 if (TYPE_UNSIGNED (type))
450 break;
451 /* INT_MIN/n * n doesn't overflow while negating one operand it does
452 if n is a (negative) power of two. */
453 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
454 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
455 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
456 && (wi::popcount
457 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
458 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
459 && (wi::popcount
460 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
461 break;
463 /* Fall through. */
465 case RDIV_EXPR:
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
469 break;
471 case TRUNC_DIV_EXPR:
472 case ROUND_DIV_EXPR:
473 case EXACT_DIV_EXPR:
474 if (TYPE_UNSIGNED (type))
475 break;
476 if (negate_expr_p (TREE_OPERAND (t, 0)))
477 return true;
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
483 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t, 1))))
485 return negate_expr_p (TREE_OPERAND (t, 1));
486 break;
488 case NOP_EXPR:
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type) == REAL_TYPE)
492 tree tem = strip_float_extensions (t);
493 if (tem != t)
494 return negate_expr_p (tem);
496 break;
498 case CALL_EXPR:
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t)))
501 return negate_expr_p (CALL_EXPR_ARG (t, 0));
502 break;
504 case RSHIFT_EXPR:
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 tree op1 = TREE_OPERAND (t, 1);
509 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
510 return true;
512 break;
514 default:
515 break;
517 return false;
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
523 returned. */
525 static tree
526 fold_negate_expr_1 (location_t loc, tree t)
528 tree type = TREE_TYPE (t);
529 tree tem;
531 switch (TREE_CODE (t))
533 /* Convert - (~A) to A + 1. */
534 case BIT_NOT_EXPR:
535 if (INTEGRAL_TYPE_P (type))
536 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
537 build_one_cst (type));
538 break;
540 case INTEGER_CST:
541 tem = fold_negate_const (t, type);
542 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
543 || (ANY_INTEGRAL_TYPE_P (type)
544 && !TYPE_OVERFLOW_TRAPS (type)
545 && TYPE_OVERFLOW_WRAPS (type))
546 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 return tem;
554 case FIXED_CST:
555 tem = fold_negate_const (t, type);
556 return tem;
558 case COMPLEX_CST:
560 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
561 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
562 if (rpart && ipart)
563 return build_complex (type, rpart, ipart);
565 break;
567 case VECTOR_CST:
569 tree_vector_builder elts;
570 elts.new_unary_operation (type, t, true);
571 unsigned int count = elts.encoded_nelts ();
572 for (unsigned int i = 0; i < count; ++i)
574 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
575 if (elt == NULL_TREE)
576 return NULL_TREE;
577 elts.quick_push (elt);
580 return elts.build ();
583 case COMPLEX_EXPR:
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
588 break;
590 case CONJ_EXPR:
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
594 break;
596 case NEGATE_EXPR:
597 if (!TYPE_OVERFLOW_SANITIZED (type))
598 return TREE_OPERAND (t, 0);
599 break;
601 case PLUS_EXPR:
602 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
603 && !HONOR_SIGNED_ZEROS (element_mode (type)))
605 /* -(A + B) -> (-B) - A. */
606 if (negate_expr_p (TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
621 break;
623 case MINUS_EXPR:
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
626 && !HONOR_SIGNED_ZEROS (element_mode (type)))
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
629 break;
631 case MULT_EXPR:
632 if (TYPE_UNSIGNED (type))
633 break;
635 /* Fall through. */
637 case RDIV_EXPR:
638 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
640 tem = TREE_OPERAND (t, 1);
641 if (negate_expr_p (tem))
642 return fold_build2_loc (loc, TREE_CODE (t), type,
643 TREE_OPERAND (t, 0), negate_expr (tem));
644 tem = TREE_OPERAND (t, 0);
645 if (negate_expr_p (tem))
646 return fold_build2_loc (loc, TREE_CODE (t), type,
647 negate_expr (tem), TREE_OPERAND (t, 1));
649 break;
651 case TRUNC_DIV_EXPR:
652 case ROUND_DIV_EXPR:
653 case EXACT_DIV_EXPR:
654 if (TYPE_UNSIGNED (type))
655 break;
656 if (negate_expr_p (TREE_OPERAND (t, 0)))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 negate_expr (TREE_OPERAND (t, 0)),
659 TREE_OPERAND (t, 1));
660 /* In general we can't negate B in A / B, because if A is INT_MIN and
661 B is 1, we may turn this into INT_MIN / -1 which is undefined
662 and actually traps on some architectures. */
663 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
664 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
665 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
666 && ! integer_onep (TREE_OPERAND (t, 1))))
667 && negate_expr_p (TREE_OPERAND (t, 1)))
668 return fold_build2_loc (loc, TREE_CODE (t), type,
669 TREE_OPERAND (t, 0),
670 negate_expr (TREE_OPERAND (t, 1)));
671 break;
673 case NOP_EXPR:
674 /* Convert -((double)float) into (double)(-float). */
675 if (TREE_CODE (type) == REAL_TYPE)
677 tem = strip_float_extensions (t);
678 if (tem != t && negate_expr_p (tem))
679 return fold_convert_loc (loc, type, negate_expr (tem));
681 break;
683 case CALL_EXPR:
684 /* Negate -f(x) as f(-x). */
685 if (negate_mathfn_p (get_call_combined_fn (t))
686 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
688 tree fndecl, arg;
690 fndecl = get_callee_fndecl (t);
691 arg = negate_expr (CALL_EXPR_ARG (t, 0));
692 return build_call_expr_loc (loc, fndecl, 1, arg);
694 break;
696 case RSHIFT_EXPR:
697 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
698 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
700 tree op1 = TREE_OPERAND (t, 1);
701 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
711 break;
713 default:
714 break;
717 return NULL_TREE;
720 /* A wrapper for fold_negate_expr_1. */
722 static tree
723 fold_negate_expr (location_t loc, tree t)
725 tree type = TREE_TYPE (t);
726 STRIP_SIGN_NOPS (t);
727 tree tem = fold_negate_expr_1 (loc, t);
728 if (tem == NULL_TREE)
729 return NULL_TREE;
730 return fold_convert_loc (loc, type, tem);
733 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
734 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
735 return NULL_TREE. */
737 static tree
738 negate_expr (tree t)
740 tree type, tem;
741 location_t loc;
743 if (t == NULL_TREE)
744 return NULL_TREE;
746 loc = EXPR_LOCATION (t);
747 type = TREE_TYPE (t);
748 STRIP_SIGN_NOPS (t);
750 tem = fold_negate_expr (loc, t);
751 if (!tem)
752 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
753 return fold_convert_loc (loc, type, tem);
756 /* Split a tree IN into a constant, literal and variable parts that could be
757 combined with CODE to make IN. "constant" means an expression with
758 TREE_CONSTANT but that isn't an actual constant. CODE must be a
759 commutative arithmetic operation. Store the constant part into *CONP,
760 the literal in *LITP and return the variable part. If a part isn't
761 present, set it to null. If the tree does not decompose in this way,
762 return the entire tree as the variable part and the other parts as null.
764 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
765 case, we negate an operand that was subtracted. Except if it is a
766 literal for which we use *MINUS_LITP instead.
768 If NEGATE_P is true, we are negating all of IN, again except a literal
769 for which we use *MINUS_LITP instead. If a variable part is of pointer
770 type, it is negated after converting to TYPE. This prevents us from
771 generating illegal MINUS pointer expression. LOC is the location of
772 the converted variable part.
774 If IN is itself a literal or constant, return it as appropriate.
776 Note that we do not guarantee that any of the three values will be the
777 same type as IN, but they will have the same signedness and mode. */
779 static tree
780 split_tree (tree in, tree type, enum tree_code code,
781 tree *minus_varp, tree *conp, tree *minus_conp,
782 tree *litp, tree *minus_litp, int negate_p)
784 tree var = 0;
785 *minus_varp = 0;
786 *conp = 0;
787 *minus_conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
805 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
806 || (code == MINUS_EXPR
807 && (TREE_CODE (in) == PLUS_EXPR
808 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
810 tree op0 = TREE_OPERAND (in, 0);
811 tree op1 = TREE_OPERAND (in, 1);
812 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
813 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
815 /* First see if either of the operands is a literal, then a constant. */
816 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
817 || TREE_CODE (op0) == FIXED_CST)
818 *litp = op0, op0 = 0;
819 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
820 || TREE_CODE (op1) == FIXED_CST)
821 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
823 if (op0 != 0 && TREE_CONSTANT (op0))
824 *conp = op0, op0 = 0;
825 else if (op1 != 0 && TREE_CONSTANT (op1))
826 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
828 /* If we haven't dealt with either operand, this is not a case we can
829 decompose. Otherwise, VAR is either of the ones remaining, if any. */
830 if (op0 != 0 && op1 != 0)
831 var = in;
832 else if (op0 != 0)
833 var = op0;
834 else
835 var = op1, neg_var_p = neg1_p;
837 /* Now do any needed negations. */
838 if (neg_litp_p)
839 *minus_litp = *litp, *litp = 0;
840 if (neg_conp_p && *conp)
841 *minus_conp = *conp, *conp = 0;
842 if (neg_var_p && var)
843 *minus_varp = var, var = 0;
845 else if (TREE_CONSTANT (in))
846 *conp = in;
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
850 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
851 when IN is constant. */
852 *litp = build_minus_one_cst (type);
853 *minus_varp = TREE_OPERAND (in, 0);
855 else
856 var = in;
858 if (negate_p)
860 if (*litp)
861 *minus_litp = *litp, *litp = 0;
862 else if (*minus_litp)
863 *litp = *minus_litp, *minus_litp = 0;
864 if (*conp)
865 *minus_conp = *conp, *conp = 0;
866 else if (*minus_conp)
867 *conp = *minus_conp, *minus_conp = 0;
868 if (var)
869 *minus_varp = var, var = 0;
870 else if (*minus_varp)
871 var = *minus_varp, *minus_varp = 0;
874 if (*litp
875 && TREE_OVERFLOW_P (*litp))
876 *litp = drop_tree_overflow (*litp);
877 if (*minus_litp
878 && TREE_OVERFLOW_P (*minus_litp))
879 *minus_litp = drop_tree_overflow (*minus_litp);
881 return var;
884 /* Re-associate trees split by the above function. T1 and T2 are
885 either expressions to associate or null. Return the new
886 expression, if any. LOC is the location of the new expression. If
887 we build an operation, do it in TYPE and with CODE. */
889 static tree
890 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
892 if (t1 == 0)
894 gcc_assert (t2 == 0 || code != MINUS_EXPR);
895 return t2;
897 else if (t2 == 0)
898 return t1;
900 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
901 try to fold this since we will have infinite recursion. But do
902 deal with any NEGATE_EXPRs. */
903 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
904 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
905 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
907 if (code == PLUS_EXPR)
909 if (TREE_CODE (t1) == NEGATE_EXPR)
910 return build2_loc (loc, MINUS_EXPR, type,
911 fold_convert_loc (loc, type, t2),
912 fold_convert_loc (loc, type,
913 TREE_OPERAND (t1, 0)));
914 else if (TREE_CODE (t2) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t1),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t2, 0)));
919 else if (integer_zerop (t2))
920 return fold_convert_loc (loc, type, t1);
922 else if (code == MINUS_EXPR)
924 if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
928 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
929 fold_convert_loc (loc, type, t2));
932 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
933 fold_convert_loc (loc, type, t2));
936 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
937 for use in int_const_binop, size_binop and size_diffop. */
939 static bool
940 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
942 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
943 return false;
944 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
945 return false;
947 switch (code)
949 case LSHIFT_EXPR:
950 case RSHIFT_EXPR:
951 case LROTATE_EXPR:
952 case RROTATE_EXPR:
953 return true;
955 default:
956 break;
959 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
960 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
961 && TYPE_MODE (type1) == TYPE_MODE (type2);
965 /* Combine two integer constants PARG1 and PARG2 under operation CODE
966 to produce a new constant. Return NULL_TREE if we don't know how
967 to evaluate CODE at compile-time. */
969 static tree
970 int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
971 int overflowable)
973 wide_int res;
974 tree t;
975 tree type = TREE_TYPE (parg1);
976 signop sign = TYPE_SIGN (type);
977 bool overflow = false;
979 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
980 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
982 switch (code)
984 case BIT_IOR_EXPR:
985 res = wi::bit_or (arg1, arg2);
986 break;
988 case BIT_XOR_EXPR:
989 res = wi::bit_xor (arg1, arg2);
990 break;
992 case BIT_AND_EXPR:
993 res = wi::bit_and (arg1, arg2);
994 break;
996 case RSHIFT_EXPR:
997 case LSHIFT_EXPR:
998 if (wi::neg_p (arg2))
1000 arg2 = -arg2;
1001 if (code == RSHIFT_EXPR)
1002 code = LSHIFT_EXPR;
1003 else
1004 code = RSHIFT_EXPR;
1007 if (code == RSHIFT_EXPR)
1008 /* It's unclear from the C standard whether shifts can overflow.
1009 The following code ignores overflow; perhaps a C standard
1010 interpretation ruling is needed. */
1011 res = wi::rshift (arg1, arg2, sign);
1012 else
1013 res = wi::lshift (arg1, arg2);
1014 break;
1016 case RROTATE_EXPR:
1017 case LROTATE_EXPR:
1018 if (wi::neg_p (arg2))
1020 arg2 = -arg2;
1021 if (code == RROTATE_EXPR)
1022 code = LROTATE_EXPR;
1023 else
1024 code = RROTATE_EXPR;
1027 if (code == RROTATE_EXPR)
1028 res = wi::rrotate (arg1, arg2);
1029 else
1030 res = wi::lrotate (arg1, arg2);
1031 break;
1033 case PLUS_EXPR:
1034 res = wi::add (arg1, arg2, sign, &overflow);
1035 break;
1037 case MINUS_EXPR:
1038 res = wi::sub (arg1, arg2, sign, &overflow);
1039 break;
1041 case MULT_EXPR:
1042 res = wi::mul (arg1, arg2, sign, &overflow);
1043 break;
1045 case MULT_HIGHPART_EXPR:
1046 res = wi::mul_high (arg1, arg2, sign);
1047 break;
1049 case TRUNC_DIV_EXPR:
1050 case EXACT_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1054 break;
1056 case FLOOR_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_floor (arg1, arg2, sign, &overflow);
1060 break;
1062 case CEIL_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1066 break;
1068 case ROUND_DIV_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::div_round (arg1, arg2, sign, &overflow);
1072 break;
1074 case TRUNC_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1078 break;
1080 case FLOOR_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1084 break;
1086 case CEIL_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1090 break;
1092 case ROUND_MOD_EXPR:
1093 if (arg2 == 0)
1094 return NULL_TREE;
1095 res = wi::mod_round (arg1, arg2, sign, &overflow);
1096 break;
1098 case MIN_EXPR:
1099 res = wi::min (arg1, arg2, sign);
1100 break;
1102 case MAX_EXPR:
1103 res = wi::max (arg1, arg2, sign);
1104 break;
1106 default:
1107 return NULL_TREE;
1110 t = force_fit_type (type, res, overflowable,
1111 (((sign == SIGNED || overflowable == -1)
1112 && overflow)
1113 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1115 return t;
1118 tree
1119 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1121 return int_const_binop_1 (code, arg1, arg2, 1);
1124 /* Return true if binary operation OP distributes over addition in operand
1125 OPNO, with the other operand being held constant. OPNO counts from 1. */
1127 static bool
1128 distributes_over_addition_p (tree_code op, int opno)
1130 switch (op)
1132 case PLUS_EXPR:
1133 case MINUS_EXPR:
1134 case MULT_EXPR:
1135 return true;
1137 case LSHIFT_EXPR:
1138 return opno == 1;
1140 default:
1141 return false;
1145 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1146 constant. We assume ARG1 and ARG2 have the same data type, or at least
1147 are the same kind of constant and the same machine mode. Return zero if
1148 combining the constants is not allowed in the current operating mode. */
1150 static tree
1151 const_binop (enum tree_code code, tree arg1, tree arg2)
1153 /* Sanity check for the recursive cases. */
1154 if (!arg1 || !arg2)
1155 return NULL_TREE;
1157 STRIP_NOPS (arg1);
1158 STRIP_NOPS (arg2);
1160 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1162 if (code == POINTER_PLUS_EXPR)
1163 return int_const_binop (PLUS_EXPR,
1164 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1166 return int_const_binop (code, arg1, arg2);
1169 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1171 machine_mode mode;
1172 REAL_VALUE_TYPE d1;
1173 REAL_VALUE_TYPE d2;
1174 REAL_VALUE_TYPE value;
1175 REAL_VALUE_TYPE result;
1176 bool inexact;
1177 tree t, type;
1179 /* The following codes are handled by real_arithmetic. */
1180 switch (code)
1182 case PLUS_EXPR:
1183 case MINUS_EXPR:
1184 case MULT_EXPR:
1185 case RDIV_EXPR:
1186 case MIN_EXPR:
1187 case MAX_EXPR:
1188 break;
1190 default:
1191 return NULL_TREE;
1194 d1 = TREE_REAL_CST (arg1);
1195 d2 = TREE_REAL_CST (arg2);
1197 type = TREE_TYPE (arg1);
1198 mode = TYPE_MODE (type);
1200 /* Don't perform operation if we honor signaling NaNs and
1201 either operand is a signaling NaN. */
1202 if (HONOR_SNANS (mode)
1203 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1204 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1205 return NULL_TREE;
1207 /* Don't perform operation if it would raise a division
1208 by zero exception. */
1209 if (code == RDIV_EXPR
1210 && real_equal (&d2, &dconst0)
1211 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1212 return NULL_TREE;
1214 /* If either operand is a NaN, just return it. Otherwise, set up
1215 for floating-point trap; we return an overflow. */
1216 if (REAL_VALUE_ISNAN (d1))
1218 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1219 is off. */
1220 d1.signalling = 0;
1221 t = build_real (type, d1);
1222 return t;
1224 else if (REAL_VALUE_ISNAN (d2))
1226 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1227 is off. */
1228 d2.signalling = 0;
1229 t = build_real (type, d2);
1230 return t;
1233 inexact = real_arithmetic (&value, code, &d1, &d2);
1234 real_convert (&result, mode, &value);
1236 /* Don't constant fold this floating point operation if
1237 the result has overflowed and flag_trapping_math. */
1238 if (flag_trapping_math
1239 && MODE_HAS_INFINITIES (mode)
1240 && REAL_VALUE_ISINF (result)
1241 && !REAL_VALUE_ISINF (d1)
1242 && !REAL_VALUE_ISINF (d2))
1243 return NULL_TREE;
1245 /* Don't constant fold this floating point operation if the
1246 result may dependent upon the run-time rounding mode and
1247 flag_rounding_math is set, or if GCC's software emulation
1248 is unable to accurately represent the result. */
1249 if ((flag_rounding_math
1250 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1251 && (inexact || !real_identical (&result, &value)))
1252 return NULL_TREE;
1254 t = build_real (type, result);
1256 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1257 return t;
1260 if (TREE_CODE (arg1) == FIXED_CST)
1262 FIXED_VALUE_TYPE f1;
1263 FIXED_VALUE_TYPE f2;
1264 FIXED_VALUE_TYPE result;
1265 tree t, type;
1266 int sat_p;
1267 bool overflow_p;
1269 /* The following codes are handled by fixed_arithmetic. */
1270 switch (code)
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 case MULT_EXPR:
1275 case TRUNC_DIV_EXPR:
1276 if (TREE_CODE (arg2) != FIXED_CST)
1277 return NULL_TREE;
1278 f2 = TREE_FIXED_CST (arg2);
1279 break;
1281 case LSHIFT_EXPR:
1282 case RSHIFT_EXPR:
1284 if (TREE_CODE (arg2) != INTEGER_CST)
1285 return NULL_TREE;
1286 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1287 f2.data.high = w2.elt (1);
1288 f2.data.low = w2.ulow ();
1289 f2.mode = SImode;
1291 break;
1293 default:
1294 return NULL_TREE;
1297 f1 = TREE_FIXED_CST (arg1);
1298 type = TREE_TYPE (arg1);
1299 sat_p = TYPE_SATURATING (type);
1300 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1301 t = build_fixed (type, result);
1302 /* Propagate overflow flags. */
1303 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1304 TREE_OVERFLOW (t) = 1;
1305 return t;
1308 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1310 tree type = TREE_TYPE (arg1);
1311 tree r1 = TREE_REALPART (arg1);
1312 tree i1 = TREE_IMAGPART (arg1);
1313 tree r2 = TREE_REALPART (arg2);
1314 tree i2 = TREE_IMAGPART (arg2);
1315 tree real, imag;
1317 switch (code)
1319 case PLUS_EXPR:
1320 case MINUS_EXPR:
1321 real = const_binop (code, r1, r2);
1322 imag = const_binop (code, i1, i2);
1323 break;
1325 case MULT_EXPR:
1326 if (COMPLEX_FLOAT_TYPE_P (type))
1327 return do_mpc_arg2 (arg1, arg2, type,
1328 /* do_nonfinite= */ folding_initializer,
1329 mpc_mul);
1331 real = const_binop (MINUS_EXPR,
1332 const_binop (MULT_EXPR, r1, r2),
1333 const_binop (MULT_EXPR, i1, i2));
1334 imag = const_binop (PLUS_EXPR,
1335 const_binop (MULT_EXPR, r1, i2),
1336 const_binop (MULT_EXPR, i1, r2));
1337 break;
1339 case RDIV_EXPR:
1340 if (COMPLEX_FLOAT_TYPE_P (type))
1341 return do_mpc_arg2 (arg1, arg2, type,
1342 /* do_nonfinite= */ folding_initializer,
1343 mpc_div);
1344 /* Fallthru. */
1345 case TRUNC_DIV_EXPR:
1346 case CEIL_DIV_EXPR:
1347 case FLOOR_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 if (flag_complex_method == 0)
1351 /* Keep this algorithm in sync with
1352 tree-complex.c:expand_complex_div_straight().
1354 Expand complex division to scalars, straightforward algorithm.
1355 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1356 t = br*br + bi*bi
1358 tree magsquared
1359 = const_binop (PLUS_EXPR,
1360 const_binop (MULT_EXPR, r2, r2),
1361 const_binop (MULT_EXPR, i2, i2));
1362 tree t1
1363 = const_binop (PLUS_EXPR,
1364 const_binop (MULT_EXPR, r1, r2),
1365 const_binop (MULT_EXPR, i1, i2));
1366 tree t2
1367 = const_binop (MINUS_EXPR,
1368 const_binop (MULT_EXPR, i1, r2),
1369 const_binop (MULT_EXPR, r1, i2));
1371 real = const_binop (code, t1, magsquared);
1372 imag = const_binop (code, t2, magsquared);
1374 else
1376 /* Keep this algorithm in sync with
1377 tree-complex.c:expand_complex_div_wide().
1379 Expand complex division to scalars, modified algorithm to minimize
1380 overflow with wide input ranges. */
1381 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1382 fold_abs_const (r2, TREE_TYPE (type)),
1383 fold_abs_const (i2, TREE_TYPE (type)));
1385 if (integer_nonzerop (compare))
1387 /* In the TRUE branch, we compute
1388 ratio = br/bi;
1389 div = (br * ratio) + bi;
1390 tr = (ar * ratio) + ai;
1391 ti = (ai * ratio) - ar;
1392 tr = tr / div;
1393 ti = ti / div; */
1394 tree ratio = const_binop (code, r2, i2);
1395 tree div = const_binop (PLUS_EXPR, i2,
1396 const_binop (MULT_EXPR, r2, ratio));
1397 real = const_binop (MULT_EXPR, r1, ratio);
1398 real = const_binop (PLUS_EXPR, real, i1);
1399 real = const_binop (code, real, div);
1401 imag = const_binop (MULT_EXPR, i1, ratio);
1402 imag = const_binop (MINUS_EXPR, imag, r1);
1403 imag = const_binop (code, imag, div);
1405 else
1407 /* In the FALSE branch, we compute
1408 ratio = d/c;
1409 divisor = (d * ratio) + c;
1410 tr = (b * ratio) + a;
1411 ti = b - (a * ratio);
1412 tr = tr / div;
1413 ti = ti / div; */
1414 tree ratio = const_binop (code, i2, r2);
1415 tree div = const_binop (PLUS_EXPR, r2,
1416 const_binop (MULT_EXPR, i2, ratio));
1418 real = const_binop (MULT_EXPR, i1, ratio);
1419 real = const_binop (PLUS_EXPR, real, r1);
1420 real = const_binop (code, real, div);
1422 imag = const_binop (MULT_EXPR, r1, ratio);
1423 imag = const_binop (MINUS_EXPR, i1, imag);
1424 imag = const_binop (code, imag, div);
1427 break;
1429 default:
1430 return NULL_TREE;
1433 if (real && imag)
1434 return build_complex (type, real, imag);
1437 if (TREE_CODE (arg1) == VECTOR_CST
1438 && TREE_CODE (arg2) == VECTOR_CST
1439 && (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))
1440 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1442 tree type = TREE_TYPE (arg1);
1443 bool step_ok_p;
1444 if (VECTOR_CST_STEPPED_P (arg1)
1445 && VECTOR_CST_STEPPED_P (arg2))
1446 /* We can operate directly on the encoding if:
1448 a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1449 implies
1450 (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1452 Addition and subtraction are the supported operators
1453 for which this is true. */
1454 step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1455 else if (VECTOR_CST_STEPPED_P (arg1))
1456 /* We can operate directly on stepped encodings if:
1458 a3 - a2 == a2 - a1
1459 implies:
1460 (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1462 which is true if (x -> x op c) distributes over addition. */
1463 step_ok_p = distributes_over_addition_p (code, 1);
1464 else
1465 /* Similarly in reverse. */
1466 step_ok_p = distributes_over_addition_p (code, 2);
1467 tree_vector_builder elts;
1468 if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1469 return NULL_TREE;
1470 unsigned int count = elts.encoded_nelts ();
1471 for (unsigned int i = 0; i < count; ++i)
1473 tree elem1 = VECTOR_CST_ELT (arg1, i);
1474 tree elem2 = VECTOR_CST_ELT (arg2, i);
1476 tree elt = const_binop (code, elem1, elem2);
1478 /* It is possible that const_binop cannot handle the given
1479 code and return NULL_TREE */
1480 if (elt == NULL_TREE)
1481 return NULL_TREE;
1482 elts.quick_push (elt);
1485 return elts.build ();
1488 /* Shifts allow a scalar offset for a vector. */
1489 if (TREE_CODE (arg1) == VECTOR_CST
1490 && TREE_CODE (arg2) == INTEGER_CST)
1492 tree type = TREE_TYPE (arg1);
1493 bool step_ok_p = distributes_over_addition_p (code, 1);
1494 tree_vector_builder elts;
1495 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1496 return NULL_TREE;
1497 unsigned int count = elts.encoded_nelts ();
1498 for (unsigned int i = 0; i < count; ++i)
1500 tree elem1 = VECTOR_CST_ELT (arg1, i);
1502 tree elt = const_binop (code, elem1, arg2);
1504 /* It is possible that const_binop cannot handle the given
1505 code and return NULL_TREE. */
1506 if (elt == NULL_TREE)
1507 return NULL_TREE;
1508 elts.quick_push (elt);
1511 return elts.build ();
1513 return NULL_TREE;
1516 /* Overload that adds a TYPE parameter to be able to dispatch
1517 to fold_relational_const. */
1519 tree
1520 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1522 if (TREE_CODE_CLASS (code) == tcc_comparison)
1523 return fold_relational_const (code, type, arg1, arg2);
1525 /* ??? Until we make the const_binop worker take the type of the
1526 result as argument put those cases that need it here. */
1527 switch (code)
1529 case COMPLEX_EXPR:
1530 if ((TREE_CODE (arg1) == REAL_CST
1531 && TREE_CODE (arg2) == REAL_CST)
1532 || (TREE_CODE (arg1) == INTEGER_CST
1533 && TREE_CODE (arg2) == INTEGER_CST))
1534 return build_complex (type, arg1, arg2);
1535 return NULL_TREE;
1537 case POINTER_DIFF_EXPR:
1538 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1540 offset_int res = wi::sub (wi::to_offset (arg1),
1541 wi::to_offset (arg2));
1542 return force_fit_type (type, res, 1,
1543 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1545 return NULL_TREE;
1547 case VEC_PACK_TRUNC_EXPR:
1548 case VEC_PACK_FIX_TRUNC_EXPR:
1550 unsigned int out_nelts, in_nelts, i;
1552 if (TREE_CODE (arg1) != VECTOR_CST
1553 || TREE_CODE (arg2) != VECTOR_CST)
1554 return NULL_TREE;
1556 in_nelts = VECTOR_CST_NELTS (arg1);
1557 out_nelts = in_nelts * 2;
1558 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1559 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1561 tree_vector_builder elts (type, out_nelts, 1);
1562 for (i = 0; i < out_nelts; i++)
1564 tree elt = (i < in_nelts
1565 ? VECTOR_CST_ELT (arg1, i)
1566 : VECTOR_CST_ELT (arg2, i - in_nelts));
1567 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1568 ? NOP_EXPR : FIX_TRUNC_EXPR,
1569 TREE_TYPE (type), elt);
1570 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1571 return NULL_TREE;
1572 elts.quick_push (elt);
1575 return elts.build ();
1578 case VEC_WIDEN_MULT_LO_EXPR:
1579 case VEC_WIDEN_MULT_HI_EXPR:
1580 case VEC_WIDEN_MULT_EVEN_EXPR:
1581 case VEC_WIDEN_MULT_ODD_EXPR:
1583 unsigned int out_nelts, in_nelts, out, ofs, scale;
1585 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1586 return NULL_TREE;
1588 in_nelts = VECTOR_CST_NELTS (arg1);
1589 out_nelts = in_nelts / 2;
1590 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1591 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1593 if (code == VEC_WIDEN_MULT_LO_EXPR)
1594 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1595 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1596 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1597 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1598 scale = 1, ofs = 0;
1599 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1600 scale = 1, ofs = 1;
1602 tree_vector_builder elts (type, out_nelts, 1);
1603 for (out = 0; out < out_nelts; out++)
1605 unsigned int in = (out << scale) + ofs;
1606 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1607 VECTOR_CST_ELT (arg1, in));
1608 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1609 VECTOR_CST_ELT (arg2, in));
1611 if (t1 == NULL_TREE || t2 == NULL_TREE)
1612 return NULL_TREE;
1613 tree elt = const_binop (MULT_EXPR, t1, t2);
1614 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1615 return NULL_TREE;
1616 elts.quick_push (elt);
1619 return elts.build ();
1622 default:;
1625 if (TREE_CODE_CLASS (code) != tcc_binary)
1626 return NULL_TREE;
1628 /* Make sure type and arg0 have the same saturating flag. */
1629 gcc_checking_assert (TYPE_SATURATING (type)
1630 == TYPE_SATURATING (TREE_TYPE (arg1)));
1632 return const_binop (code, arg1, arg2);
1635 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1636 Return zero if computing the constants is not possible. */
1638 tree
1639 const_unop (enum tree_code code, tree type, tree arg0)
1641 /* Don't perform the operation, other than NEGATE and ABS, if
1642 flag_signaling_nans is on and the operand is a signaling NaN. */
1643 if (TREE_CODE (arg0) == REAL_CST
1644 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1645 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1646 && code != NEGATE_EXPR
1647 && code != ABS_EXPR)
1648 return NULL_TREE;
1650 switch (code)
1652 CASE_CONVERT:
1653 case FLOAT_EXPR:
1654 case FIX_TRUNC_EXPR:
1655 case FIXED_CONVERT_EXPR:
1656 return fold_convert_const (code, type, arg0);
1658 case ADDR_SPACE_CONVERT_EXPR:
1659 /* If the source address is 0, and the source address space
1660 cannot have a valid object at 0, fold to dest type null. */
1661 if (integer_zerop (arg0)
1662 && !(targetm.addr_space.zero_address_valid
1663 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1664 return fold_convert_const (code, type, arg0);
1665 break;
1667 case VIEW_CONVERT_EXPR:
1668 return fold_view_convert_expr (type, arg0);
1670 case NEGATE_EXPR:
1672 /* Can't call fold_negate_const directly here as that doesn't
1673 handle all cases and we might not be able to negate some
1674 constants. */
1675 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1676 if (tem && CONSTANT_CLASS_P (tem))
1677 return tem;
1678 break;
1681 case ABS_EXPR:
1682 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1683 return fold_abs_const (arg0, type);
1684 break;
1686 case CONJ_EXPR:
1687 if (TREE_CODE (arg0) == COMPLEX_CST)
1689 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1690 TREE_TYPE (type));
1691 return build_complex (type, TREE_REALPART (arg0), ipart);
1693 break;
1695 case BIT_NOT_EXPR:
1696 if (TREE_CODE (arg0) == INTEGER_CST)
1697 return fold_not_const (arg0, type);
1698 /* Perform BIT_NOT_EXPR on each element individually. */
1699 else if (TREE_CODE (arg0) == VECTOR_CST)
1701 tree elem;
1703 /* This can cope with stepped encodings because ~x == -1 - x. */
1704 tree_vector_builder elements;
1705 elements.new_unary_operation (type, arg0, true);
1706 unsigned int i, count = elements.encoded_nelts ();
1707 for (i = 0; i < count; ++i)
1709 elem = VECTOR_CST_ELT (arg0, i);
1710 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1711 if (elem == NULL_TREE)
1712 break;
1713 elements.quick_push (elem);
1715 if (i == count)
1716 return elements.build ();
1718 break;
1720 case TRUTH_NOT_EXPR:
1721 if (TREE_CODE (arg0) == INTEGER_CST)
1722 return constant_boolean_node (integer_zerop (arg0), type);
1723 break;
1725 case REALPART_EXPR:
1726 if (TREE_CODE (arg0) == COMPLEX_CST)
1727 return fold_convert (type, TREE_REALPART (arg0));
1728 break;
1730 case IMAGPART_EXPR:
1731 if (TREE_CODE (arg0) == COMPLEX_CST)
1732 return fold_convert (type, TREE_IMAGPART (arg0));
1733 break;
1735 case VEC_UNPACK_LO_EXPR:
1736 case VEC_UNPACK_HI_EXPR:
1737 case VEC_UNPACK_FLOAT_LO_EXPR:
1738 case VEC_UNPACK_FLOAT_HI_EXPR:
1740 unsigned int out_nelts, in_nelts, i;
1741 enum tree_code subcode;
1743 if (TREE_CODE (arg0) != VECTOR_CST)
1744 return NULL_TREE;
1746 in_nelts = VECTOR_CST_NELTS (arg0);
1747 out_nelts = in_nelts / 2;
1748 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
1750 unsigned int offset = 0;
1751 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1752 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1753 offset = out_nelts;
1755 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1756 subcode = NOP_EXPR;
1757 else
1758 subcode = FLOAT_EXPR;
1760 tree_vector_builder elts (type, out_nelts, 1);
1761 for (i = 0; i < out_nelts; i++)
1763 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1764 VECTOR_CST_ELT (arg0, i + offset));
1765 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1766 return NULL_TREE;
1767 elts.quick_push (elt);
1770 return elts.build ();
1773 default:
1774 break;
1777 return NULL_TREE;
1780 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1781 indicates which particular sizetype to create. */
1783 tree
1784 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1786 return build_int_cst (sizetype_tab[(int) kind], number);
1789 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1790 is a tree code. The type of the result is taken from the operands.
1791 Both must be equivalent integer types, ala int_binop_types_match_p.
1792 If the operands are constant, so is the result. */
1794 tree
1795 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1797 tree type = TREE_TYPE (arg0);
1799 if (arg0 == error_mark_node || arg1 == error_mark_node)
1800 return error_mark_node;
1802 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1803 TREE_TYPE (arg1)));
1805 /* Handle the special case of two integer constants faster. */
1806 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1808 /* And some specific cases even faster than that. */
1809 if (code == PLUS_EXPR)
1811 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1812 return arg1;
1813 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1814 return arg0;
1816 else if (code == MINUS_EXPR)
1818 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1819 return arg0;
1821 else if (code == MULT_EXPR)
1823 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1824 return arg1;
1827 /* Handle general case of two integer constants. For sizetype
1828 constant calculations we always want to know about overflow,
1829 even in the unsigned case. */
1830 return int_const_binop_1 (code, arg0, arg1, -1);
1833 return fold_build2_loc (loc, code, type, arg0, arg1);
1836 /* Given two values, either both of sizetype or both of bitsizetype,
1837 compute the difference between the two values. Return the value
1838 in signed type corresponding to the type of the operands. */
1840 tree
1841 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1843 tree type = TREE_TYPE (arg0);
1844 tree ctype;
1846 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1847 TREE_TYPE (arg1)));
1849 /* If the type is already signed, just do the simple thing. */
1850 if (!TYPE_UNSIGNED (type))
1851 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1853 if (type == sizetype)
1854 ctype = ssizetype;
1855 else if (type == bitsizetype)
1856 ctype = sbitsizetype;
1857 else
1858 ctype = signed_type_for (type);
1860 /* If either operand is not a constant, do the conversions to the signed
1861 type and subtract. The hardware will do the right thing with any
1862 overflow in the subtraction. */
1863 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1864 return size_binop_loc (loc, MINUS_EXPR,
1865 fold_convert_loc (loc, ctype, arg0),
1866 fold_convert_loc (loc, ctype, arg1));
1868 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1869 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1870 overflow) and negate (which can't either). Special-case a result
1871 of zero while we're here. */
1872 if (tree_int_cst_equal (arg0, arg1))
1873 return build_int_cst (ctype, 0);
1874 else if (tree_int_cst_lt (arg1, arg0))
1875 return fold_convert_loc (loc, ctype,
1876 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1877 else
1878 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1879 fold_convert_loc (loc, ctype,
1880 size_binop_loc (loc,
1881 MINUS_EXPR,
1882 arg1, arg0)));
1885 /* A subroutine of fold_convert_const handling conversions of an
1886 INTEGER_CST to another integer type. */
1888 static tree
1889 fold_convert_const_int_from_int (tree type, const_tree arg1)
1891 /* Given an integer constant, make new constant with new type,
1892 appropriately sign-extended or truncated. Use widest_int
1893 so that any extension is done according ARG1's type. */
1894 return force_fit_type (type, wi::to_widest (arg1),
1895 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1896 TREE_OVERFLOW (arg1));
1899 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1900 to an integer type. */
1902 static tree
1903 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1905 bool overflow = false;
1906 tree t;
1908 /* The following code implements the floating point to integer
1909 conversion rules required by the Java Language Specification,
1910 that IEEE NaNs are mapped to zero and values that overflow
1911 the target precision saturate, i.e. values greater than
1912 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1913 are mapped to INT_MIN. These semantics are allowed by the
1914 C and C++ standards that simply state that the behavior of
1915 FP-to-integer conversion is unspecified upon overflow. */
1917 wide_int val;
1918 REAL_VALUE_TYPE r;
1919 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1921 switch (code)
1923 case FIX_TRUNC_EXPR:
1924 real_trunc (&r, VOIDmode, &x);
1925 break;
1927 default:
1928 gcc_unreachable ();
1931 /* If R is NaN, return zero and show we have an overflow. */
1932 if (REAL_VALUE_ISNAN (r))
1934 overflow = true;
1935 val = wi::zero (TYPE_PRECISION (type));
1938 /* See if R is less than the lower bound or greater than the
1939 upper bound. */
1941 if (! overflow)
1943 tree lt = TYPE_MIN_VALUE (type);
1944 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1945 if (real_less (&r, &l))
1947 overflow = true;
1948 val = wi::to_wide (lt);
1952 if (! overflow)
1954 tree ut = TYPE_MAX_VALUE (type);
1955 if (ut)
1957 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1958 if (real_less (&u, &r))
1960 overflow = true;
1961 val = wi::to_wide (ut);
1966 if (! overflow)
1967 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1969 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1970 return t;
1973 /* A subroutine of fold_convert_const handling conversions of a
1974 FIXED_CST to an integer type. */
1976 static tree
1977 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1979 tree t;
1980 double_int temp, temp_trunc;
1981 scalar_mode mode;
1983 /* Right shift FIXED_CST to temp by fbit. */
1984 temp = TREE_FIXED_CST (arg1).data;
1985 mode = TREE_FIXED_CST (arg1).mode;
1986 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1988 temp = temp.rshift (GET_MODE_FBIT (mode),
1989 HOST_BITS_PER_DOUBLE_INT,
1990 SIGNED_FIXED_POINT_MODE_P (mode));
1992 /* Left shift temp to temp_trunc by fbit. */
1993 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1994 HOST_BITS_PER_DOUBLE_INT,
1995 SIGNED_FIXED_POINT_MODE_P (mode));
1997 else
1999 temp = double_int_zero;
2000 temp_trunc = double_int_zero;
2003 /* If FIXED_CST is negative, we need to round the value toward 0.
2004 By checking if the fractional bits are not zero to add 1 to temp. */
2005 if (SIGNED_FIXED_POINT_MODE_P (mode)
2006 && temp_trunc.is_negative ()
2007 && TREE_FIXED_CST (arg1).data != temp_trunc)
2008 temp += double_int_one;
2010 /* Given a fixed-point constant, make new constant with new type,
2011 appropriately sign-extended or truncated. */
2012 t = force_fit_type (type, temp, -1,
2013 (temp.is_negative ()
2014 && (TYPE_UNSIGNED (type)
2015 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2016 | TREE_OVERFLOW (arg1));
2018 return t;
2021 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2022 to another floating point type. */
2024 static tree
2025 fold_convert_const_real_from_real (tree type, const_tree arg1)
2027 REAL_VALUE_TYPE value;
2028 tree t;
2030 /* Don't perform the operation if flag_signaling_nans is on
2031 and the operand is a signaling NaN. */
2032 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2033 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2034 return NULL_TREE;
2036 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2037 t = build_real (type, value);
2039 /* If converting an infinity or NAN to a representation that doesn't
2040 have one, set the overflow bit so that we can produce some kind of
2041 error message at the appropriate point if necessary. It's not the
2042 most user-friendly message, but it's better than nothing. */
2043 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2044 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2045 TREE_OVERFLOW (t) = 1;
2046 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2047 && !MODE_HAS_NANS (TYPE_MODE (type)))
2048 TREE_OVERFLOW (t) = 1;
2049 /* Regular overflow, conversion produced an infinity in a mode that
2050 can't represent them. */
2051 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2052 && REAL_VALUE_ISINF (value)
2053 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2054 TREE_OVERFLOW (t) = 1;
2055 else
2056 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2057 return t;
2060 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2061 to a floating point type. */
2063 static tree
2064 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2066 REAL_VALUE_TYPE value;
2067 tree t;
2069 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2070 &TREE_FIXED_CST (arg1));
2071 t = build_real (type, value);
2073 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2074 return t;
2077 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2078 to another fixed-point type. */
2080 static tree
2081 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2083 FIXED_VALUE_TYPE value;
2084 tree t;
2085 bool overflow_p;
2087 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2088 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2089 t = build_fixed (type, value);
2091 /* Propagate overflow flags. */
2092 if (overflow_p | TREE_OVERFLOW (arg1))
2093 TREE_OVERFLOW (t) = 1;
2094 return t;
2097 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2098 to a fixed-point type. */
2100 static tree
2101 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2103 FIXED_VALUE_TYPE value;
2104 tree t;
2105 bool overflow_p;
2106 double_int di;
2108 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2110 di.low = TREE_INT_CST_ELT (arg1, 0);
2111 if (TREE_INT_CST_NUNITS (arg1) == 1)
2112 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2113 else
2114 di.high = TREE_INT_CST_ELT (arg1, 1);
2116 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2117 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2118 TYPE_SATURATING (type));
2119 t = build_fixed (type, value);
2121 /* Propagate overflow flags. */
2122 if (overflow_p | TREE_OVERFLOW (arg1))
2123 TREE_OVERFLOW (t) = 1;
2124 return t;
2127 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2128 to a fixed-point type. */
2130 static tree
2131 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2133 FIXED_VALUE_TYPE value;
2134 tree t;
2135 bool overflow_p;
2137 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2138 &TREE_REAL_CST (arg1),
2139 TYPE_SATURATING (type));
2140 t = build_fixed (type, value);
2142 /* Propagate overflow flags. */
2143 if (overflow_p | TREE_OVERFLOW (arg1))
2144 TREE_OVERFLOW (t) = 1;
2145 return t;
2148 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2149 type TYPE. If no simplification can be done return NULL_TREE. */
2151 static tree
2152 fold_convert_const (enum tree_code code, tree type, tree arg1)
2154 if (TREE_TYPE (arg1) == type)
2155 return arg1;
2157 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2158 || TREE_CODE (type) == OFFSET_TYPE)
2160 if (TREE_CODE (arg1) == INTEGER_CST)
2161 return fold_convert_const_int_from_int (type, arg1);
2162 else if (TREE_CODE (arg1) == REAL_CST)
2163 return fold_convert_const_int_from_real (code, type, arg1);
2164 else if (TREE_CODE (arg1) == FIXED_CST)
2165 return fold_convert_const_int_from_fixed (type, arg1);
2167 else if (TREE_CODE (type) == REAL_TYPE)
2169 if (TREE_CODE (arg1) == INTEGER_CST)
2170 return build_real_from_int_cst (type, arg1);
2171 else if (TREE_CODE (arg1) == REAL_CST)
2172 return fold_convert_const_real_from_real (type, arg1);
2173 else if (TREE_CODE (arg1) == FIXED_CST)
2174 return fold_convert_const_real_from_fixed (type, arg1);
2176 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2178 if (TREE_CODE (arg1) == FIXED_CST)
2179 return fold_convert_const_fixed_from_fixed (type, arg1);
2180 else if (TREE_CODE (arg1) == INTEGER_CST)
2181 return fold_convert_const_fixed_from_int (type, arg1);
2182 else if (TREE_CODE (arg1) == REAL_CST)
2183 return fold_convert_const_fixed_from_real (type, arg1);
2185 else if (TREE_CODE (type) == VECTOR_TYPE)
2187 if (TREE_CODE (arg1) == VECTOR_CST
2188 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2190 tree elttype = TREE_TYPE (type);
2191 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2192 /* We can't handle steps directly when extending, since the
2193 values need to wrap at the original precision first. */
2194 bool step_ok_p
2195 = (INTEGRAL_TYPE_P (elttype)
2196 && INTEGRAL_TYPE_P (arg1_elttype)
2197 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2198 tree_vector_builder v;
2199 if (!v.new_unary_operation (type, arg1, step_ok_p))
2200 return NULL_TREE;
2201 unsigned int len = v.encoded_nelts ();
2202 for (unsigned int i = 0; i < len; ++i)
2204 tree elt = VECTOR_CST_ELT (arg1, i);
2205 tree cvt = fold_convert_const (code, elttype, elt);
2206 if (cvt == NULL_TREE)
2207 return NULL_TREE;
2208 v.quick_push (cvt);
2210 return v.build ();
2213 return NULL_TREE;
2216 /* Construct a vector of zero elements of vector type TYPE. */
2218 static tree
2219 build_zero_vector (tree type)
2221 tree t;
2223 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2224 return build_vector_from_val (type, t);
2227 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2229 bool
2230 fold_convertible_p (const_tree type, const_tree arg)
2232 tree orig = TREE_TYPE (arg);
2234 if (type == orig)
2235 return true;
2237 if (TREE_CODE (arg) == ERROR_MARK
2238 || TREE_CODE (type) == ERROR_MARK
2239 || TREE_CODE (orig) == ERROR_MARK)
2240 return false;
2242 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2243 return true;
2245 switch (TREE_CODE (type))
2247 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2248 case POINTER_TYPE: case REFERENCE_TYPE:
2249 case OFFSET_TYPE:
2250 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2251 || TREE_CODE (orig) == OFFSET_TYPE);
2253 case REAL_TYPE:
2254 case FIXED_POINT_TYPE:
2255 case VECTOR_TYPE:
2256 case VOID_TYPE:
2257 return TREE_CODE (type) == TREE_CODE (orig);
2259 default:
2260 return false;
2264 /* Convert expression ARG to type TYPE. Used by the middle-end for
2265 simple conversions in preference to calling the front-end's convert. */
2267 tree
2268 fold_convert_loc (location_t loc, tree type, tree arg)
2270 tree orig = TREE_TYPE (arg);
2271 tree tem;
2273 if (type == orig)
2274 return arg;
2276 if (TREE_CODE (arg) == ERROR_MARK
2277 || TREE_CODE (type) == ERROR_MARK
2278 || TREE_CODE (orig) == ERROR_MARK)
2279 return error_mark_node;
2281 switch (TREE_CODE (type))
2283 case POINTER_TYPE:
2284 case REFERENCE_TYPE:
2285 /* Handle conversions between pointers to different address spaces. */
2286 if (POINTER_TYPE_P (orig)
2287 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2288 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2289 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2290 /* fall through */
2292 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2293 case OFFSET_TYPE:
2294 if (TREE_CODE (arg) == INTEGER_CST)
2296 tem = fold_convert_const (NOP_EXPR, type, arg);
2297 if (tem != NULL_TREE)
2298 return tem;
2300 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2301 || TREE_CODE (orig) == OFFSET_TYPE)
2302 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2303 if (TREE_CODE (orig) == COMPLEX_TYPE)
2304 return fold_convert_loc (loc, type,
2305 fold_build1_loc (loc, REALPART_EXPR,
2306 TREE_TYPE (orig), arg));
2307 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2308 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2309 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2311 case REAL_TYPE:
2312 if (TREE_CODE (arg) == INTEGER_CST)
2314 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2315 if (tem != NULL_TREE)
2316 return tem;
2318 else if (TREE_CODE (arg) == REAL_CST)
2320 tem = fold_convert_const (NOP_EXPR, type, arg);
2321 if (tem != NULL_TREE)
2322 return tem;
2324 else if (TREE_CODE (arg) == FIXED_CST)
2326 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2327 if (tem != NULL_TREE)
2328 return tem;
2331 switch (TREE_CODE (orig))
2333 case INTEGER_TYPE:
2334 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2335 case POINTER_TYPE: case REFERENCE_TYPE:
2336 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2338 case REAL_TYPE:
2339 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2341 case FIXED_POINT_TYPE:
2342 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2344 case COMPLEX_TYPE:
2345 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2346 return fold_convert_loc (loc, type, tem);
2348 default:
2349 gcc_unreachable ();
2352 case FIXED_POINT_TYPE:
2353 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2354 || TREE_CODE (arg) == REAL_CST)
2356 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2357 if (tem != NULL_TREE)
2358 goto fold_convert_exit;
2361 switch (TREE_CODE (orig))
2363 case FIXED_POINT_TYPE:
2364 case INTEGER_TYPE:
2365 case ENUMERAL_TYPE:
2366 case BOOLEAN_TYPE:
2367 case REAL_TYPE:
2368 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2370 case COMPLEX_TYPE:
2371 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2372 return fold_convert_loc (loc, type, tem);
2374 default:
2375 gcc_unreachable ();
2378 case COMPLEX_TYPE:
2379 switch (TREE_CODE (orig))
2381 case INTEGER_TYPE:
2382 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2383 case POINTER_TYPE: case REFERENCE_TYPE:
2384 case REAL_TYPE:
2385 case FIXED_POINT_TYPE:
2386 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2387 fold_convert_loc (loc, TREE_TYPE (type), arg),
2388 fold_convert_loc (loc, TREE_TYPE (type),
2389 integer_zero_node));
2390 case COMPLEX_TYPE:
2392 tree rpart, ipart;
2394 if (TREE_CODE (arg) == COMPLEX_EXPR)
2396 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2397 TREE_OPERAND (arg, 0));
2398 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2399 TREE_OPERAND (arg, 1));
2400 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2403 arg = save_expr (arg);
2404 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2405 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2406 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2407 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2408 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2411 default:
2412 gcc_unreachable ();
2415 case VECTOR_TYPE:
2416 if (integer_zerop (arg))
2417 return build_zero_vector (type);
2418 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2419 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2420 || TREE_CODE (orig) == VECTOR_TYPE);
2421 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2423 case VOID_TYPE:
2424 tem = fold_ignored_result (arg);
2425 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2427 default:
2428 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2429 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2430 gcc_unreachable ();
2432 fold_convert_exit:
2433 protected_set_expr_location_unshare (tem, loc);
2434 return tem;
2437 /* Return false if expr can be assumed not to be an lvalue, true
2438 otherwise. */
2440 static bool
2441 maybe_lvalue_p (const_tree x)
2443 /* We only need to wrap lvalue tree codes. */
2444 switch (TREE_CODE (x))
2446 case VAR_DECL:
2447 case PARM_DECL:
2448 case RESULT_DECL:
2449 case LABEL_DECL:
2450 case FUNCTION_DECL:
2451 case SSA_NAME:
2453 case COMPONENT_REF:
2454 case MEM_REF:
2455 case INDIRECT_REF:
2456 case ARRAY_REF:
2457 case ARRAY_RANGE_REF:
2458 case BIT_FIELD_REF:
2459 case OBJ_TYPE_REF:
2461 case REALPART_EXPR:
2462 case IMAGPART_EXPR:
2463 case PREINCREMENT_EXPR:
2464 case PREDECREMENT_EXPR:
2465 case SAVE_EXPR:
2466 case TRY_CATCH_EXPR:
2467 case WITH_CLEANUP_EXPR:
2468 case COMPOUND_EXPR:
2469 case MODIFY_EXPR:
2470 case TARGET_EXPR:
2471 case COND_EXPR:
2472 case BIND_EXPR:
2473 break;
2475 default:
2476 /* Assume the worst for front-end tree codes. */
2477 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2478 break;
2479 return false;
2482 return true;
2485 /* Return an expr equal to X but certainly not valid as an lvalue. */
2487 tree
2488 non_lvalue_loc (location_t loc, tree x)
2490 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2491 us. */
2492 if (in_gimple_form)
2493 return x;
2495 if (! maybe_lvalue_p (x))
2496 return x;
2497 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2500 /* When pedantic, return an expr equal to X but certainly not valid as a
2501 pedantic lvalue. Otherwise, return X. */
2503 static tree
2504 pedantic_non_lvalue_loc (location_t loc, tree x)
2506 return protected_set_expr_location_unshare (x, loc);
2509 /* Given a tree comparison code, return the code that is the logical inverse.
2510 It is generally not safe to do this for floating-point comparisons, except
2511 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2512 ERROR_MARK in this case. */
2514 enum tree_code
2515 invert_tree_comparison (enum tree_code code, bool honor_nans)
2517 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2518 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2519 return ERROR_MARK;
2521 switch (code)
2523 case EQ_EXPR:
2524 return NE_EXPR;
2525 case NE_EXPR:
2526 return EQ_EXPR;
2527 case GT_EXPR:
2528 return honor_nans ? UNLE_EXPR : LE_EXPR;
2529 case GE_EXPR:
2530 return honor_nans ? UNLT_EXPR : LT_EXPR;
2531 case LT_EXPR:
2532 return honor_nans ? UNGE_EXPR : GE_EXPR;
2533 case LE_EXPR:
2534 return honor_nans ? UNGT_EXPR : GT_EXPR;
2535 case LTGT_EXPR:
2536 return UNEQ_EXPR;
2537 case UNEQ_EXPR:
2538 return LTGT_EXPR;
2539 case UNGT_EXPR:
2540 return LE_EXPR;
2541 case UNGE_EXPR:
2542 return LT_EXPR;
2543 case UNLT_EXPR:
2544 return GE_EXPR;
2545 case UNLE_EXPR:
2546 return GT_EXPR;
2547 case ORDERED_EXPR:
2548 return UNORDERED_EXPR;
2549 case UNORDERED_EXPR:
2550 return ORDERED_EXPR;
2551 default:
2552 gcc_unreachable ();
2556 /* Similar, but return the comparison that results if the operands are
2557 swapped. This is safe for floating-point. */
2559 enum tree_code
2560 swap_tree_comparison (enum tree_code code)
2562 switch (code)
2564 case EQ_EXPR:
2565 case NE_EXPR:
2566 case ORDERED_EXPR:
2567 case UNORDERED_EXPR:
2568 case LTGT_EXPR:
2569 case UNEQ_EXPR:
2570 return code;
2571 case GT_EXPR:
2572 return LT_EXPR;
2573 case GE_EXPR:
2574 return LE_EXPR;
2575 case LT_EXPR:
2576 return GT_EXPR;
2577 case LE_EXPR:
2578 return GE_EXPR;
2579 case UNGT_EXPR:
2580 return UNLT_EXPR;
2581 case UNGE_EXPR:
2582 return UNLE_EXPR;
2583 case UNLT_EXPR:
2584 return UNGT_EXPR;
2585 case UNLE_EXPR:
2586 return UNGE_EXPR;
2587 default:
2588 gcc_unreachable ();
2593 /* Convert a comparison tree code from an enum tree_code representation
2594 into a compcode bit-based encoding. This function is the inverse of
2595 compcode_to_comparison. */
2597 static enum comparison_code
2598 comparison_to_compcode (enum tree_code code)
2600 switch (code)
2602 case LT_EXPR:
2603 return COMPCODE_LT;
2604 case EQ_EXPR:
2605 return COMPCODE_EQ;
2606 case LE_EXPR:
2607 return COMPCODE_LE;
2608 case GT_EXPR:
2609 return COMPCODE_GT;
2610 case NE_EXPR:
2611 return COMPCODE_NE;
2612 case GE_EXPR:
2613 return COMPCODE_GE;
2614 case ORDERED_EXPR:
2615 return COMPCODE_ORD;
2616 case UNORDERED_EXPR:
2617 return COMPCODE_UNORD;
2618 case UNLT_EXPR:
2619 return COMPCODE_UNLT;
2620 case UNEQ_EXPR:
2621 return COMPCODE_UNEQ;
2622 case UNLE_EXPR:
2623 return COMPCODE_UNLE;
2624 case UNGT_EXPR:
2625 return COMPCODE_UNGT;
2626 case LTGT_EXPR:
2627 return COMPCODE_LTGT;
2628 case UNGE_EXPR:
2629 return COMPCODE_UNGE;
2630 default:
2631 gcc_unreachable ();
2635 /* Convert a compcode bit-based encoding of a comparison operator back
2636 to GCC's enum tree_code representation. This function is the
2637 inverse of comparison_to_compcode. */
2639 static enum tree_code
2640 compcode_to_comparison (enum comparison_code code)
2642 switch (code)
2644 case COMPCODE_LT:
2645 return LT_EXPR;
2646 case COMPCODE_EQ:
2647 return EQ_EXPR;
2648 case COMPCODE_LE:
2649 return LE_EXPR;
2650 case COMPCODE_GT:
2651 return GT_EXPR;
2652 case COMPCODE_NE:
2653 return NE_EXPR;
2654 case COMPCODE_GE:
2655 return GE_EXPR;
2656 case COMPCODE_ORD:
2657 return ORDERED_EXPR;
2658 case COMPCODE_UNORD:
2659 return UNORDERED_EXPR;
2660 case COMPCODE_UNLT:
2661 return UNLT_EXPR;
2662 case COMPCODE_UNEQ:
2663 return UNEQ_EXPR;
2664 case COMPCODE_UNLE:
2665 return UNLE_EXPR;
2666 case COMPCODE_UNGT:
2667 return UNGT_EXPR;
2668 case COMPCODE_LTGT:
2669 return LTGT_EXPR;
2670 case COMPCODE_UNGE:
2671 return UNGE_EXPR;
2672 default:
2673 gcc_unreachable ();
2677 /* Return a tree for the comparison which is the combination of
2678 doing the AND or OR (depending on CODE) of the two operations LCODE
2679 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2680 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2681 if this makes the transformation invalid. */
2683 tree
2684 combine_comparisons (location_t loc,
2685 enum tree_code code, enum tree_code lcode,
2686 enum tree_code rcode, tree truth_type,
2687 tree ll_arg, tree lr_arg)
2689 bool honor_nans = HONOR_NANS (ll_arg);
2690 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2691 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2692 int compcode;
2694 switch (code)
2696 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2697 compcode = lcompcode & rcompcode;
2698 break;
2700 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2701 compcode = lcompcode | rcompcode;
2702 break;
2704 default:
2705 return NULL_TREE;
2708 if (!honor_nans)
2710 /* Eliminate unordered comparisons, as well as LTGT and ORD
2711 which are not used unless the mode has NaNs. */
2712 compcode &= ~COMPCODE_UNORD;
2713 if (compcode == COMPCODE_LTGT)
2714 compcode = COMPCODE_NE;
2715 else if (compcode == COMPCODE_ORD)
2716 compcode = COMPCODE_TRUE;
2718 else if (flag_trapping_math)
2720 /* Check that the original operation and the optimized ones will trap
2721 under the same condition. */
2722 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2723 && (lcompcode != COMPCODE_EQ)
2724 && (lcompcode != COMPCODE_ORD);
2725 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2726 && (rcompcode != COMPCODE_EQ)
2727 && (rcompcode != COMPCODE_ORD);
2728 bool trap = (compcode & COMPCODE_UNORD) == 0
2729 && (compcode != COMPCODE_EQ)
2730 && (compcode != COMPCODE_ORD);
2732 /* In a short-circuited boolean expression the LHS might be
2733 such that the RHS, if evaluated, will never trap. For
2734 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2735 if neither x nor y is NaN. (This is a mixed blessing: for
2736 example, the expression above will never trap, hence
2737 optimizing it to x < y would be invalid). */
2738 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2739 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2740 rtrap = false;
2742 /* If the comparison was short-circuited, and only the RHS
2743 trapped, we may now generate a spurious trap. */
2744 if (rtrap && !ltrap
2745 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2746 return NULL_TREE;
2748 /* If we changed the conditions that cause a trap, we lose. */
2749 if ((ltrap || rtrap) != trap)
2750 return NULL_TREE;
2753 if (compcode == COMPCODE_TRUE)
2754 return constant_boolean_node (true, truth_type);
2755 else if (compcode == COMPCODE_FALSE)
2756 return constant_boolean_node (false, truth_type);
2757 else
2759 enum tree_code tcode;
2761 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2762 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2766 /* Return nonzero if two operands (typically of the same tree node)
2767 are necessarily equal. FLAGS modifies behavior as follows:
2769 If OEP_ONLY_CONST is set, only return nonzero for constants.
2770 This function tests whether the operands are indistinguishable;
2771 it does not test whether they are equal using C's == operation.
2772 The distinction is important for IEEE floating point, because
2773 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2774 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2776 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2777 even though it may hold multiple values during a function.
2778 This is because a GCC tree node guarantees that nothing else is
2779 executed between the evaluation of its "operands" (which may often
2780 be evaluated in arbitrary order). Hence if the operands themselves
2781 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2782 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2783 unset means assuming isochronic (or instantaneous) tree equivalence.
2784 Unless comparing arbitrary expression trees, such as from different
2785 statements, this flag can usually be left unset.
2787 If OEP_PURE_SAME is set, then pure functions with identical arguments
2788 are considered the same. It is used when the caller has other ways
2789 to ensure that global memory is unchanged in between.
2791 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2792 not values of expressions.
2794 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2795 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2797 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2798 any operand with side effect. This is unnecesarily conservative in the
2799 case we know that arg0 and arg1 are in disjoint code paths (such as in
2800 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2801 addresses with TREE_CONSTANT flag set so we know that &var == &var
2802 even if var is volatile. */
2805 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2807 /* When checking, verify at the outermost operand_equal_p call that
2808 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2809 hash value. */
2810 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2812 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2814 if (arg0 != arg1)
2816 inchash::hash hstate0 (0), hstate1 (0);
2817 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2818 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2819 hashval_t h0 = hstate0.end ();
2820 hashval_t h1 = hstate1.end ();
2821 gcc_assert (h0 == h1);
2823 return 1;
2825 else
2826 return 0;
2829 /* If either is ERROR_MARK, they aren't equal. */
2830 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2831 || TREE_TYPE (arg0) == error_mark_node
2832 || TREE_TYPE (arg1) == error_mark_node)
2833 return 0;
2835 /* Similar, if either does not have a type (like a released SSA name),
2836 they aren't equal. */
2837 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2838 return 0;
2840 /* We cannot consider pointers to different address space equal. */
2841 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2842 && POINTER_TYPE_P (TREE_TYPE (arg1))
2843 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2844 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2845 return 0;
2847 /* Check equality of integer constants before bailing out due to
2848 precision differences. */
2849 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2851 /* Address of INTEGER_CST is not defined; check that we did not forget
2852 to drop the OEP_ADDRESS_OF flags. */
2853 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2854 return tree_int_cst_equal (arg0, arg1);
2857 if (!(flags & OEP_ADDRESS_OF))
2859 /* If both types don't have the same signedness, then we can't consider
2860 them equal. We must check this before the STRIP_NOPS calls
2861 because they may change the signedness of the arguments. As pointers
2862 strictly don't have a signedness, require either two pointers or
2863 two non-pointers as well. */
2864 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2865 || POINTER_TYPE_P (TREE_TYPE (arg0))
2866 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2867 return 0;
2869 /* If both types don't have the same precision, then it is not safe
2870 to strip NOPs. */
2871 if (element_precision (TREE_TYPE (arg0))
2872 != element_precision (TREE_TYPE (arg1)))
2873 return 0;
2875 STRIP_NOPS (arg0);
2876 STRIP_NOPS (arg1);
2878 #if 0
2879 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2880 sanity check once the issue is solved. */
2881 else
2882 /* Addresses of conversions and SSA_NAMEs (and many other things)
2883 are not defined. Check that we did not forget to drop the
2884 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2885 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2886 && TREE_CODE (arg0) != SSA_NAME);
2887 #endif
2889 /* In case both args are comparisons but with different comparison
2890 code, try to swap the comparison operands of one arg to produce
2891 a match and compare that variant. */
2892 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2893 && COMPARISON_CLASS_P (arg0)
2894 && COMPARISON_CLASS_P (arg1))
2896 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2898 if (TREE_CODE (arg0) == swap_code)
2899 return operand_equal_p (TREE_OPERAND (arg0, 0),
2900 TREE_OPERAND (arg1, 1), flags)
2901 && operand_equal_p (TREE_OPERAND (arg0, 1),
2902 TREE_OPERAND (arg1, 0), flags);
2905 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2907 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2908 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2910 else if (flags & OEP_ADDRESS_OF)
2912 /* If we are interested in comparing addresses ignore
2913 MEM_REF wrappings of the base that can appear just for
2914 TBAA reasons. */
2915 if (TREE_CODE (arg0) == MEM_REF
2916 && DECL_P (arg1)
2917 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2918 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2919 && integer_zerop (TREE_OPERAND (arg0, 1)))
2920 return 1;
2921 else if (TREE_CODE (arg1) == MEM_REF
2922 && DECL_P (arg0)
2923 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2924 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2925 && integer_zerop (TREE_OPERAND (arg1, 1)))
2926 return 1;
2927 return 0;
2929 else
2930 return 0;
2933 /* When not checking adddresses, this is needed for conversions and for
2934 COMPONENT_REF. Might as well play it safe and always test this. */
2935 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2936 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2937 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2938 && !(flags & OEP_ADDRESS_OF)))
2939 return 0;
2941 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2942 We don't care about side effects in that case because the SAVE_EXPR
2943 takes care of that for us. In all other cases, two expressions are
2944 equal if they have no side effects. If we have two identical
2945 expressions with side effects that should be treated the same due
2946 to the only side effects being identical SAVE_EXPR's, that will
2947 be detected in the recursive calls below.
2948 If we are taking an invariant address of two identical objects
2949 they are necessarily equal as well. */
2950 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2951 && (TREE_CODE (arg0) == SAVE_EXPR
2952 || (flags & OEP_MATCH_SIDE_EFFECTS)
2953 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2954 return 1;
2956 /* Next handle constant cases, those for which we can return 1 even
2957 if ONLY_CONST is set. */
2958 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2959 switch (TREE_CODE (arg0))
2961 case INTEGER_CST:
2962 return tree_int_cst_equal (arg0, arg1);
2964 case FIXED_CST:
2965 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2966 TREE_FIXED_CST (arg1));
2968 case REAL_CST:
2969 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2970 return 1;
2973 if (!HONOR_SIGNED_ZEROS (arg0))
2975 /* If we do not distinguish between signed and unsigned zero,
2976 consider them equal. */
2977 if (real_zerop (arg0) && real_zerop (arg1))
2978 return 1;
2980 return 0;
2982 case VECTOR_CST:
2984 unsigned i;
2986 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2987 return 0;
2989 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2991 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2992 VECTOR_CST_ELT (arg1, i), flags))
2993 return 0;
2995 return 1;
2998 case COMPLEX_CST:
2999 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3000 flags)
3001 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3002 flags));
3004 case STRING_CST:
3005 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3006 && ! memcmp (TREE_STRING_POINTER (arg0),
3007 TREE_STRING_POINTER (arg1),
3008 TREE_STRING_LENGTH (arg0)));
3010 case ADDR_EXPR:
3011 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3012 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3013 flags | OEP_ADDRESS_OF
3014 | OEP_MATCH_SIDE_EFFECTS);
3015 case CONSTRUCTOR:
3016 /* In GIMPLE empty constructors are allowed in initializers of
3017 aggregates. */
3018 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3019 default:
3020 break;
3023 if (flags & OEP_ONLY_CONST)
3024 return 0;
3026 /* Define macros to test an operand from arg0 and arg1 for equality and a
3027 variant that allows null and views null as being different from any
3028 non-null value. In the latter case, if either is null, the both
3029 must be; otherwise, do the normal comparison. */
3030 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3031 TREE_OPERAND (arg1, N), flags)
3033 #define OP_SAME_WITH_NULL(N) \
3034 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3035 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3037 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3039 case tcc_unary:
3040 /* Two conversions are equal only if signedness and modes match. */
3041 switch (TREE_CODE (arg0))
3043 CASE_CONVERT:
3044 case FIX_TRUNC_EXPR:
3045 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3046 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3047 return 0;
3048 break;
3049 default:
3050 break;
3053 return OP_SAME (0);
3056 case tcc_comparison:
3057 case tcc_binary:
3058 if (OP_SAME (0) && OP_SAME (1))
3059 return 1;
3061 /* For commutative ops, allow the other order. */
3062 return (commutative_tree_code (TREE_CODE (arg0))
3063 && operand_equal_p (TREE_OPERAND (arg0, 0),
3064 TREE_OPERAND (arg1, 1), flags)
3065 && operand_equal_p (TREE_OPERAND (arg0, 1),
3066 TREE_OPERAND (arg1, 0), flags));
3068 case tcc_reference:
3069 /* If either of the pointer (or reference) expressions we are
3070 dereferencing contain a side effect, these cannot be equal,
3071 but their addresses can be. */
3072 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3073 && (TREE_SIDE_EFFECTS (arg0)
3074 || TREE_SIDE_EFFECTS (arg1)))
3075 return 0;
3077 switch (TREE_CODE (arg0))
3079 case INDIRECT_REF:
3080 if (!(flags & OEP_ADDRESS_OF)
3081 && (TYPE_ALIGN (TREE_TYPE (arg0))
3082 != TYPE_ALIGN (TREE_TYPE (arg1))))
3083 return 0;
3084 flags &= ~OEP_ADDRESS_OF;
3085 return OP_SAME (0);
3087 case IMAGPART_EXPR:
3088 /* Require the same offset. */
3089 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3090 TYPE_SIZE (TREE_TYPE (arg1)),
3091 flags & ~OEP_ADDRESS_OF))
3092 return 0;
3094 /* Fallthru. */
3095 case REALPART_EXPR:
3096 case VIEW_CONVERT_EXPR:
3097 return OP_SAME (0);
3099 case TARGET_MEM_REF:
3100 case MEM_REF:
3101 if (!(flags & OEP_ADDRESS_OF))
3103 /* Require equal access sizes */
3104 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3105 && (!TYPE_SIZE (TREE_TYPE (arg0))
3106 || !TYPE_SIZE (TREE_TYPE (arg1))
3107 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3108 TYPE_SIZE (TREE_TYPE (arg1)),
3109 flags)))
3110 return 0;
3111 /* Verify that access happens in similar types. */
3112 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3113 return 0;
3114 /* Verify that accesses are TBAA compatible. */
3115 if (!alias_ptr_types_compatible_p
3116 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3117 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3118 || (MR_DEPENDENCE_CLIQUE (arg0)
3119 != MR_DEPENDENCE_CLIQUE (arg1))
3120 || (MR_DEPENDENCE_BASE (arg0)
3121 != MR_DEPENDENCE_BASE (arg1)))
3122 return 0;
3123 /* Verify that alignment is compatible. */
3124 if (TYPE_ALIGN (TREE_TYPE (arg0))
3125 != TYPE_ALIGN (TREE_TYPE (arg1)))
3126 return 0;
3128 flags &= ~OEP_ADDRESS_OF;
3129 return (OP_SAME (0) && OP_SAME (1)
3130 /* TARGET_MEM_REF require equal extra operands. */
3131 && (TREE_CODE (arg0) != TARGET_MEM_REF
3132 || (OP_SAME_WITH_NULL (2)
3133 && OP_SAME_WITH_NULL (3)
3134 && OP_SAME_WITH_NULL (4))));
3136 case ARRAY_REF:
3137 case ARRAY_RANGE_REF:
3138 if (!OP_SAME (0))
3139 return 0;
3140 flags &= ~OEP_ADDRESS_OF;
3141 /* Compare the array index by value if it is constant first as we
3142 may have different types but same value here. */
3143 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3144 TREE_OPERAND (arg1, 1))
3145 || OP_SAME (1))
3146 && OP_SAME_WITH_NULL (2)
3147 && OP_SAME_WITH_NULL (3)
3148 /* Compare low bound and element size as with OEP_ADDRESS_OF
3149 we have to account for the offset of the ref. */
3150 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3151 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3152 || (operand_equal_p (array_ref_low_bound
3153 (CONST_CAST_TREE (arg0)),
3154 array_ref_low_bound
3155 (CONST_CAST_TREE (arg1)), flags)
3156 && operand_equal_p (array_ref_element_size
3157 (CONST_CAST_TREE (arg0)),
3158 array_ref_element_size
3159 (CONST_CAST_TREE (arg1)),
3160 flags))));
3162 case COMPONENT_REF:
3163 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3164 may be NULL when we're called to compare MEM_EXPRs. */
3165 if (!OP_SAME_WITH_NULL (0)
3166 || !OP_SAME (1))
3167 return 0;
3168 flags &= ~OEP_ADDRESS_OF;
3169 return OP_SAME_WITH_NULL (2);
3171 case BIT_FIELD_REF:
3172 if (!OP_SAME (0))
3173 return 0;
3174 flags &= ~OEP_ADDRESS_OF;
3175 return OP_SAME (1) && OP_SAME (2);
3177 default:
3178 return 0;
3181 case tcc_expression:
3182 switch (TREE_CODE (arg0))
3184 case ADDR_EXPR:
3185 /* Be sure we pass right ADDRESS_OF flag. */
3186 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3187 return operand_equal_p (TREE_OPERAND (arg0, 0),
3188 TREE_OPERAND (arg1, 0),
3189 flags | OEP_ADDRESS_OF);
3191 case TRUTH_NOT_EXPR:
3192 return OP_SAME (0);
3194 case TRUTH_ANDIF_EXPR:
3195 case TRUTH_ORIF_EXPR:
3196 return OP_SAME (0) && OP_SAME (1);
3198 case FMA_EXPR:
3199 case WIDEN_MULT_PLUS_EXPR:
3200 case WIDEN_MULT_MINUS_EXPR:
3201 if (!OP_SAME (2))
3202 return 0;
3203 /* The multiplcation operands are commutative. */
3204 /* FALLTHRU */
3206 case TRUTH_AND_EXPR:
3207 case TRUTH_OR_EXPR:
3208 case TRUTH_XOR_EXPR:
3209 if (OP_SAME (0) && OP_SAME (1))
3210 return 1;
3212 /* Otherwise take into account this is a commutative operation. */
3213 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3214 TREE_OPERAND (arg1, 1), flags)
3215 && operand_equal_p (TREE_OPERAND (arg0, 1),
3216 TREE_OPERAND (arg1, 0), flags));
3218 case COND_EXPR:
3219 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3220 return 0;
3221 flags &= ~OEP_ADDRESS_OF;
3222 return OP_SAME (0);
3224 case BIT_INSERT_EXPR:
3225 /* BIT_INSERT_EXPR has an implict operand as the type precision
3226 of op1. Need to check to make sure they are the same. */
3227 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3228 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3229 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3230 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3231 return false;
3232 /* FALLTHRU */
3234 case VEC_COND_EXPR:
3235 case DOT_PROD_EXPR:
3236 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3238 case MODIFY_EXPR:
3239 case INIT_EXPR:
3240 case COMPOUND_EXPR:
3241 case PREDECREMENT_EXPR:
3242 case PREINCREMENT_EXPR:
3243 case POSTDECREMENT_EXPR:
3244 case POSTINCREMENT_EXPR:
3245 if (flags & OEP_LEXICOGRAPHIC)
3246 return OP_SAME (0) && OP_SAME (1);
3247 return 0;
3249 case CLEANUP_POINT_EXPR:
3250 case EXPR_STMT:
3251 if (flags & OEP_LEXICOGRAPHIC)
3252 return OP_SAME (0);
3253 return 0;
3255 default:
3256 return 0;
3259 case tcc_vl_exp:
3260 switch (TREE_CODE (arg0))
3262 case CALL_EXPR:
3263 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3264 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3265 /* If not both CALL_EXPRs are either internal or normal function
3266 functions, then they are not equal. */
3267 return 0;
3268 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3270 /* If the CALL_EXPRs call different internal functions, then they
3271 are not equal. */
3272 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3273 return 0;
3275 else
3277 /* If the CALL_EXPRs call different functions, then they are not
3278 equal. */
3279 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3280 flags))
3281 return 0;
3284 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3286 unsigned int cef = call_expr_flags (arg0);
3287 if (flags & OEP_PURE_SAME)
3288 cef &= ECF_CONST | ECF_PURE;
3289 else
3290 cef &= ECF_CONST;
3291 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3292 return 0;
3295 /* Now see if all the arguments are the same. */
3297 const_call_expr_arg_iterator iter0, iter1;
3298 const_tree a0, a1;
3299 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3300 a1 = first_const_call_expr_arg (arg1, &iter1);
3301 a0 && a1;
3302 a0 = next_const_call_expr_arg (&iter0),
3303 a1 = next_const_call_expr_arg (&iter1))
3304 if (! operand_equal_p (a0, a1, flags))
3305 return 0;
3307 /* If we get here and both argument lists are exhausted
3308 then the CALL_EXPRs are equal. */
3309 return ! (a0 || a1);
3311 default:
3312 return 0;
3315 case tcc_declaration:
3316 /* Consider __builtin_sqrt equal to sqrt. */
3317 return (TREE_CODE (arg0) == FUNCTION_DECL
3318 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3319 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3320 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3322 case tcc_exceptional:
3323 if (TREE_CODE (arg0) == CONSTRUCTOR)
3325 /* In GIMPLE constructors are used only to build vectors from
3326 elements. Individual elements in the constructor must be
3327 indexed in increasing order and form an initial sequence.
3329 We make no effort to compare constructors in generic.
3330 (see sem_variable::equals in ipa-icf which can do so for
3331 constants). */
3332 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3333 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3334 return 0;
3336 /* Be sure that vectors constructed have the same representation.
3337 We only tested element precision and modes to match.
3338 Vectors may be BLKmode and thus also check that the number of
3339 parts match. */
3340 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3341 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3342 return 0;
3344 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3345 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3346 unsigned int len = vec_safe_length (v0);
3348 if (len != vec_safe_length (v1))
3349 return 0;
3351 for (unsigned int i = 0; i < len; i++)
3353 constructor_elt *c0 = &(*v0)[i];
3354 constructor_elt *c1 = &(*v1)[i];
3356 if (!operand_equal_p (c0->value, c1->value, flags)
3357 /* In GIMPLE the indexes can be either NULL or matching i.
3358 Double check this so we won't get false
3359 positives for GENERIC. */
3360 || (c0->index
3361 && (TREE_CODE (c0->index) != INTEGER_CST
3362 || !compare_tree_int (c0->index, i)))
3363 || (c1->index
3364 && (TREE_CODE (c1->index) != INTEGER_CST
3365 || !compare_tree_int (c1->index, i))))
3366 return 0;
3368 return 1;
3370 else if (TREE_CODE (arg0) == STATEMENT_LIST
3371 && (flags & OEP_LEXICOGRAPHIC))
3373 /* Compare the STATEMENT_LISTs. */
3374 tree_stmt_iterator tsi1, tsi2;
3375 tree body1 = CONST_CAST_TREE (arg0);
3376 tree body2 = CONST_CAST_TREE (arg1);
3377 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3378 tsi_next (&tsi1), tsi_next (&tsi2))
3380 /* The lists don't have the same number of statements. */
3381 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3382 return 0;
3383 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3384 return 1;
3385 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3386 OEP_LEXICOGRAPHIC))
3387 return 0;
3390 return 0;
3392 case tcc_statement:
3393 switch (TREE_CODE (arg0))
3395 case RETURN_EXPR:
3396 if (flags & OEP_LEXICOGRAPHIC)
3397 return OP_SAME_WITH_NULL (0);
3398 return 0;
3399 default:
3400 return 0;
3403 default:
3404 return 0;
3407 #undef OP_SAME
3408 #undef OP_SAME_WITH_NULL
3411 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3412 with a different signedness or a narrower precision. */
3414 static bool
3415 operand_equal_for_comparison_p (tree arg0, tree arg1)
3417 if (operand_equal_p (arg0, arg1, 0))
3418 return true;
3420 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3421 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3422 return false;
3424 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3425 and see if the inner values are the same. This removes any
3426 signedness comparison, which doesn't matter here. */
3427 tree op0 = arg0;
3428 tree op1 = arg1;
3429 STRIP_NOPS (op0);
3430 STRIP_NOPS (op1);
3431 if (operand_equal_p (op0, op1, 0))
3432 return true;
3434 /* Discard a single widening conversion from ARG1 and see if the inner
3435 value is the same as ARG0. */
3436 if (CONVERT_EXPR_P (arg1)
3437 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3438 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3439 < TYPE_PRECISION (TREE_TYPE (arg1))
3440 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3441 return true;
3443 return false;
3446 /* See if ARG is an expression that is either a comparison or is performing
3447 arithmetic on comparisons. The comparisons must only be comparing
3448 two different values, which will be stored in *CVAL1 and *CVAL2; if
3449 they are nonzero it means that some operands have already been found.
3450 No variables may be used anywhere else in the expression except in the
3451 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3452 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3454 If this is true, return 1. Otherwise, return zero. */
3456 static int
3457 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3459 enum tree_code code = TREE_CODE (arg);
3460 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3462 /* We can handle some of the tcc_expression cases here. */
3463 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3464 tclass = tcc_unary;
3465 else if (tclass == tcc_expression
3466 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3467 || code == COMPOUND_EXPR))
3468 tclass = tcc_binary;
3470 else if (tclass == tcc_expression && code == SAVE_EXPR
3471 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3473 /* If we've already found a CVAL1 or CVAL2, this expression is
3474 two complex to handle. */
3475 if (*cval1 || *cval2)
3476 return 0;
3478 tclass = tcc_unary;
3479 *save_p = 1;
3482 switch (tclass)
3484 case tcc_unary:
3485 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3487 case tcc_binary:
3488 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3489 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3490 cval1, cval2, save_p));
3492 case tcc_constant:
3493 return 1;
3495 case tcc_expression:
3496 if (code == COND_EXPR)
3497 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3498 cval1, cval2, save_p)
3499 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3500 cval1, cval2, save_p)
3501 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3502 cval1, cval2, save_p));
3503 return 0;
3505 case tcc_comparison:
3506 /* First see if we can handle the first operand, then the second. For
3507 the second operand, we know *CVAL1 can't be zero. It must be that
3508 one side of the comparison is each of the values; test for the
3509 case where this isn't true by failing if the two operands
3510 are the same. */
3512 if (operand_equal_p (TREE_OPERAND (arg, 0),
3513 TREE_OPERAND (arg, 1), 0))
3514 return 0;
3516 if (*cval1 == 0)
3517 *cval1 = TREE_OPERAND (arg, 0);
3518 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3520 else if (*cval2 == 0)
3521 *cval2 = TREE_OPERAND (arg, 0);
3522 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3524 else
3525 return 0;
3527 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3529 else if (*cval2 == 0)
3530 *cval2 = TREE_OPERAND (arg, 1);
3531 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3533 else
3534 return 0;
3536 return 1;
3538 default:
3539 return 0;
3543 /* ARG is a tree that is known to contain just arithmetic operations and
3544 comparisons. Evaluate the operations in the tree substituting NEW0 for
3545 any occurrence of OLD0 as an operand of a comparison and likewise for
3546 NEW1 and OLD1. */
3548 static tree
3549 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3550 tree old1, tree new1)
3552 tree type = TREE_TYPE (arg);
3553 enum tree_code code = TREE_CODE (arg);
3554 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3556 /* We can handle some of the tcc_expression cases here. */
3557 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3558 tclass = tcc_unary;
3559 else if (tclass == tcc_expression
3560 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3561 tclass = tcc_binary;
3563 switch (tclass)
3565 case tcc_unary:
3566 return fold_build1_loc (loc, code, type,
3567 eval_subst (loc, TREE_OPERAND (arg, 0),
3568 old0, new0, old1, new1));
3570 case tcc_binary:
3571 return fold_build2_loc (loc, code, type,
3572 eval_subst (loc, TREE_OPERAND (arg, 0),
3573 old0, new0, old1, new1),
3574 eval_subst (loc, TREE_OPERAND (arg, 1),
3575 old0, new0, old1, new1));
3577 case tcc_expression:
3578 switch (code)
3580 case SAVE_EXPR:
3581 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3582 old1, new1);
3584 case COMPOUND_EXPR:
3585 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3586 old1, new1);
3588 case COND_EXPR:
3589 return fold_build3_loc (loc, code, type,
3590 eval_subst (loc, TREE_OPERAND (arg, 0),
3591 old0, new0, old1, new1),
3592 eval_subst (loc, TREE_OPERAND (arg, 1),
3593 old0, new0, old1, new1),
3594 eval_subst (loc, TREE_OPERAND (arg, 2),
3595 old0, new0, old1, new1));
3596 default:
3597 break;
3599 /* Fall through - ??? */
3601 case tcc_comparison:
3603 tree arg0 = TREE_OPERAND (arg, 0);
3604 tree arg1 = TREE_OPERAND (arg, 1);
3606 /* We need to check both for exact equality and tree equality. The
3607 former will be true if the operand has a side-effect. In that
3608 case, we know the operand occurred exactly once. */
3610 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3611 arg0 = new0;
3612 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3613 arg0 = new1;
3615 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3616 arg1 = new0;
3617 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3618 arg1 = new1;
3620 return fold_build2_loc (loc, code, type, arg0, arg1);
3623 default:
3624 return arg;
3628 /* Return a tree for the case when the result of an expression is RESULT
3629 converted to TYPE and OMITTED was previously an operand of the expression
3630 but is now not needed (e.g., we folded OMITTED * 0).
3632 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3633 the conversion of RESULT to TYPE. */
3635 tree
3636 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3638 tree t = fold_convert_loc (loc, type, result);
3640 /* If the resulting operand is an empty statement, just return the omitted
3641 statement casted to void. */
3642 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3643 return build1_loc (loc, NOP_EXPR, void_type_node,
3644 fold_ignored_result (omitted));
3646 if (TREE_SIDE_EFFECTS (omitted))
3647 return build2_loc (loc, COMPOUND_EXPR, type,
3648 fold_ignored_result (omitted), t);
3650 return non_lvalue_loc (loc, t);
3653 /* Return a tree for the case when the result of an expression is RESULT
3654 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3655 of the expression but are now not needed.
3657 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3658 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3659 evaluated before OMITTED2. Otherwise, if neither has side effects,
3660 just do the conversion of RESULT to TYPE. */
3662 tree
3663 omit_two_operands_loc (location_t loc, tree type, tree result,
3664 tree omitted1, tree omitted2)
3666 tree t = fold_convert_loc (loc, type, result);
3668 if (TREE_SIDE_EFFECTS (omitted2))
3669 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3670 if (TREE_SIDE_EFFECTS (omitted1))
3671 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3673 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3677 /* Return a simplified tree node for the truth-negation of ARG. This
3678 never alters ARG itself. We assume that ARG is an operation that
3679 returns a truth value (0 or 1).
3681 FIXME: one would think we would fold the result, but it causes
3682 problems with the dominator optimizer. */
3684 static tree
3685 fold_truth_not_expr (location_t loc, tree arg)
3687 tree type = TREE_TYPE (arg);
3688 enum tree_code code = TREE_CODE (arg);
3689 location_t loc1, loc2;
3691 /* If this is a comparison, we can simply invert it, except for
3692 floating-point non-equality comparisons, in which case we just
3693 enclose a TRUTH_NOT_EXPR around what we have. */
3695 if (TREE_CODE_CLASS (code) == tcc_comparison)
3697 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3698 if (FLOAT_TYPE_P (op_type)
3699 && flag_trapping_math
3700 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3701 && code != NE_EXPR && code != EQ_EXPR)
3702 return NULL_TREE;
3704 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3705 if (code == ERROR_MARK)
3706 return NULL_TREE;
3708 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3709 TREE_OPERAND (arg, 1));
3710 if (TREE_NO_WARNING (arg))
3711 TREE_NO_WARNING (ret) = 1;
3712 return ret;
3715 switch (code)
3717 case INTEGER_CST:
3718 return constant_boolean_node (integer_zerop (arg), type);
3720 case TRUTH_AND_EXPR:
3721 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3722 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3723 return build2_loc (loc, TRUTH_OR_EXPR, type,
3724 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3725 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3727 case TRUTH_OR_EXPR:
3728 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3729 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3730 return build2_loc (loc, TRUTH_AND_EXPR, type,
3731 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3732 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3734 case TRUTH_XOR_EXPR:
3735 /* Here we can invert either operand. We invert the first operand
3736 unless the second operand is a TRUTH_NOT_EXPR in which case our
3737 result is the XOR of the first operand with the inside of the
3738 negation of the second operand. */
3740 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3741 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3742 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3743 else
3744 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3745 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3746 TREE_OPERAND (arg, 1));
3748 case TRUTH_ANDIF_EXPR:
3749 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3750 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3751 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3752 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3753 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3755 case TRUTH_ORIF_EXPR:
3756 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3757 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3758 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3759 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3760 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3762 case TRUTH_NOT_EXPR:
3763 return TREE_OPERAND (arg, 0);
3765 case COND_EXPR:
3767 tree arg1 = TREE_OPERAND (arg, 1);
3768 tree arg2 = TREE_OPERAND (arg, 2);
3770 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3771 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3773 /* A COND_EXPR may have a throw as one operand, which
3774 then has void type. Just leave void operands
3775 as they are. */
3776 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3777 VOID_TYPE_P (TREE_TYPE (arg1))
3778 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3779 VOID_TYPE_P (TREE_TYPE (arg2))
3780 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3783 case COMPOUND_EXPR:
3784 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3785 return build2_loc (loc, COMPOUND_EXPR, type,
3786 TREE_OPERAND (arg, 0),
3787 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3789 case NON_LVALUE_EXPR:
3790 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3791 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3793 CASE_CONVERT:
3794 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3795 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3797 /* fall through */
3799 case FLOAT_EXPR:
3800 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3801 return build1_loc (loc, TREE_CODE (arg), type,
3802 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3804 case BIT_AND_EXPR:
3805 if (!integer_onep (TREE_OPERAND (arg, 1)))
3806 return NULL_TREE;
3807 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3809 case SAVE_EXPR:
3810 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3812 case CLEANUP_POINT_EXPR:
3813 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3814 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3815 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3817 default:
3818 return NULL_TREE;
3822 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3823 assume that ARG is an operation that returns a truth value (0 or 1
3824 for scalars, 0 or -1 for vectors). Return the folded expression if
3825 folding is successful. Otherwise, return NULL_TREE. */
3827 static tree
3828 fold_invert_truthvalue (location_t loc, tree arg)
3830 tree type = TREE_TYPE (arg);
3831 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3832 ? BIT_NOT_EXPR
3833 : TRUTH_NOT_EXPR,
3834 type, arg);
3837 /* Return a simplified tree node for the truth-negation of ARG. This
3838 never alters ARG itself. We assume that ARG is an operation that
3839 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3841 tree
3842 invert_truthvalue_loc (location_t loc, tree arg)
3844 if (TREE_CODE (arg) == ERROR_MARK)
3845 return arg;
3847 tree type = TREE_TYPE (arg);
3848 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3849 ? BIT_NOT_EXPR
3850 : TRUTH_NOT_EXPR,
3851 type, arg);
3854 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3855 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3856 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3857 is the original memory reference used to preserve the alias set of
3858 the access. */
3860 static tree
3861 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3862 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3863 int unsignedp, int reversep)
3865 tree result, bftype;
3867 /* Attempt not to lose the access path if possible. */
3868 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3870 tree ninner = TREE_OPERAND (orig_inner, 0);
3871 machine_mode nmode;
3872 HOST_WIDE_INT nbitsize, nbitpos;
3873 tree noffset;
3874 int nunsignedp, nreversep, nvolatilep = 0;
3875 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3876 &noffset, &nmode, &nunsignedp,
3877 &nreversep, &nvolatilep);
3878 if (base == inner
3879 && noffset == NULL_TREE
3880 && nbitsize >= bitsize
3881 && nbitpos <= bitpos
3882 && bitpos + bitsize <= nbitpos + nbitsize
3883 && !reversep
3884 && !nreversep
3885 && !nvolatilep)
3887 inner = ninner;
3888 bitpos -= nbitpos;
3892 alias_set_type iset = get_alias_set (orig_inner);
3893 if (iset == 0 && get_alias_set (inner) != iset)
3894 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3895 build_fold_addr_expr (inner),
3896 build_int_cst (ptr_type_node, 0));
3898 if (bitpos == 0 && !reversep)
3900 tree size = TYPE_SIZE (TREE_TYPE (inner));
3901 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3902 || POINTER_TYPE_P (TREE_TYPE (inner)))
3903 && tree_fits_shwi_p (size)
3904 && tree_to_shwi (size) == bitsize)
3905 return fold_convert_loc (loc, type, inner);
3908 bftype = type;
3909 if (TYPE_PRECISION (bftype) != bitsize
3910 || TYPE_UNSIGNED (bftype) == !unsignedp)
3911 bftype = build_nonstandard_integer_type (bitsize, 0);
3913 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3914 bitsize_int (bitsize), bitsize_int (bitpos));
3915 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3917 if (bftype != type)
3918 result = fold_convert_loc (loc, type, result);
3920 return result;
3923 /* Optimize a bit-field compare.
3925 There are two cases: First is a compare against a constant and the
3926 second is a comparison of two items where the fields are at the same
3927 bit position relative to the start of a chunk (byte, halfword, word)
3928 large enough to contain it. In these cases we can avoid the shift
3929 implicit in bitfield extractions.
3931 For constants, we emit a compare of the shifted constant with the
3932 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3933 compared. For two fields at the same position, we do the ANDs with the
3934 similar mask and compare the result of the ANDs.
3936 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3937 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3938 are the left and right operands of the comparison, respectively.
3940 If the optimization described above can be done, we return the resulting
3941 tree. Otherwise we return zero. */
3943 static tree
3944 optimize_bit_field_compare (location_t loc, enum tree_code code,
3945 tree compare_type, tree lhs, tree rhs)
3947 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3948 tree type = TREE_TYPE (lhs);
3949 tree unsigned_type;
3950 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3951 machine_mode lmode, rmode;
3952 scalar_int_mode nmode;
3953 int lunsignedp, runsignedp;
3954 int lreversep, rreversep;
3955 int lvolatilep = 0, rvolatilep = 0;
3956 tree linner, rinner = NULL_TREE;
3957 tree mask;
3958 tree offset;
3960 /* Get all the information about the extractions being done. If the bit size
3961 if the same as the size of the underlying object, we aren't doing an
3962 extraction at all and so can do nothing. We also don't want to
3963 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3964 then will no longer be able to replace it. */
3965 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3966 &lunsignedp, &lreversep, &lvolatilep);
3967 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3968 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3969 return 0;
3971 if (const_p)
3972 rreversep = lreversep;
3973 else
3975 /* If this is not a constant, we can only do something if bit positions,
3976 sizes, signedness and storage order are the same. */
3977 rinner
3978 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3979 &runsignedp, &rreversep, &rvolatilep);
3981 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3982 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3983 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3984 return 0;
3987 /* Honor the C++ memory model and mimic what RTL expansion does. */
3988 unsigned HOST_WIDE_INT bitstart = 0;
3989 unsigned HOST_WIDE_INT bitend = 0;
3990 if (TREE_CODE (lhs) == COMPONENT_REF)
3992 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
3993 if (offset != NULL_TREE)
3994 return 0;
3997 /* See if we can find a mode to refer to this field. We should be able to,
3998 but fail if we can't. */
3999 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4000 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4001 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4002 TYPE_ALIGN (TREE_TYPE (rinner))),
4003 BITS_PER_WORD, false, &nmode))
4004 return 0;
4006 /* Set signed and unsigned types of the precision of this mode for the
4007 shifts below. */
4008 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4010 /* Compute the bit position and size for the new reference and our offset
4011 within it. If the new reference is the same size as the original, we
4012 won't optimize anything, so return zero. */
4013 nbitsize = GET_MODE_BITSIZE (nmode);
4014 nbitpos = lbitpos & ~ (nbitsize - 1);
4015 lbitpos -= nbitpos;
4016 if (nbitsize == lbitsize)
4017 return 0;
4019 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4020 lbitpos = nbitsize - lbitsize - lbitpos;
4022 /* Make the mask to be used against the extracted field. */
4023 mask = build_int_cst_type (unsigned_type, -1);
4024 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4025 mask = const_binop (RSHIFT_EXPR, mask,
4026 size_int (nbitsize - lbitsize - lbitpos));
4028 if (! const_p)
4030 if (nbitpos < 0)
4031 return 0;
4033 /* If not comparing with constant, just rework the comparison
4034 and return. */
4035 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4036 nbitsize, nbitpos, 1, lreversep);
4037 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4038 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4039 nbitsize, nbitpos, 1, rreversep);
4040 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4041 return fold_build2_loc (loc, code, compare_type, t1, t2);
4044 /* Otherwise, we are handling the constant case. See if the constant is too
4045 big for the field. Warn and return a tree for 0 (false) if so. We do
4046 this not only for its own sake, but to avoid having to test for this
4047 error case below. If we didn't, we might generate wrong code.
4049 For unsigned fields, the constant shifted right by the field length should
4050 be all zero. For signed fields, the high-order bits should agree with
4051 the sign bit. */
4053 if (lunsignedp)
4055 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4057 warning (0, "comparison is always %d due to width of bit-field",
4058 code == NE_EXPR);
4059 return constant_boolean_node (code == NE_EXPR, compare_type);
4062 else
4064 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4065 if (tem != 0 && tem != -1)
4067 warning (0, "comparison is always %d due to width of bit-field",
4068 code == NE_EXPR);
4069 return constant_boolean_node (code == NE_EXPR, compare_type);
4073 if (nbitpos < 0)
4074 return 0;
4076 /* Single-bit compares should always be against zero. */
4077 if (lbitsize == 1 && ! integer_zerop (rhs))
4079 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4080 rhs = build_int_cst (type, 0);
4083 /* Make a new bitfield reference, shift the constant over the
4084 appropriate number of bits and mask it with the computed mask
4085 (in case this was a signed field). If we changed it, make a new one. */
4086 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4087 nbitsize, nbitpos, 1, lreversep);
4089 rhs = const_binop (BIT_AND_EXPR,
4090 const_binop (LSHIFT_EXPR,
4091 fold_convert_loc (loc, unsigned_type, rhs),
4092 size_int (lbitpos)),
4093 mask);
4095 lhs = build2_loc (loc, code, compare_type,
4096 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4097 return lhs;
4100 /* Subroutine for fold_truth_andor_1: decode a field reference.
4102 If EXP is a comparison reference, we return the innermost reference.
4104 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4105 set to the starting bit number.
4107 If the innermost field can be completely contained in a mode-sized
4108 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4110 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4111 otherwise it is not changed.
4113 *PUNSIGNEDP is set to the signedness of the field.
4115 *PREVERSEP is set to the storage order of the field.
4117 *PMASK is set to the mask used. This is either contained in a
4118 BIT_AND_EXPR or derived from the width of the field.
4120 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4122 Return 0 if this is not a component reference or is one that we can't
4123 do anything with. */
4125 static tree
4126 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4127 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4128 int *punsignedp, int *preversep, int *pvolatilep,
4129 tree *pmask, tree *pand_mask)
4131 tree exp = *exp_;
4132 tree outer_type = 0;
4133 tree and_mask = 0;
4134 tree mask, inner, offset;
4135 tree unsigned_type;
4136 unsigned int precision;
4138 /* All the optimizations using this function assume integer fields.
4139 There are problems with FP fields since the type_for_size call
4140 below can fail for, e.g., XFmode. */
4141 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4142 return 0;
4144 /* We are interested in the bare arrangement of bits, so strip everything
4145 that doesn't affect the machine mode. However, record the type of the
4146 outermost expression if it may matter below. */
4147 if (CONVERT_EXPR_P (exp)
4148 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4149 outer_type = TREE_TYPE (exp);
4150 STRIP_NOPS (exp);
4152 if (TREE_CODE (exp) == BIT_AND_EXPR)
4154 and_mask = TREE_OPERAND (exp, 1);
4155 exp = TREE_OPERAND (exp, 0);
4156 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4157 if (TREE_CODE (and_mask) != INTEGER_CST)
4158 return 0;
4161 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4162 punsignedp, preversep, pvolatilep);
4163 if ((inner == exp && and_mask == 0)
4164 || *pbitsize < 0 || offset != 0
4165 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4166 /* Reject out-of-bound accesses (PR79731). */
4167 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4168 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4169 *pbitpos + *pbitsize) < 0))
4170 return 0;
4172 *exp_ = exp;
4174 /* If the number of bits in the reference is the same as the bitsize of
4175 the outer type, then the outer type gives the signedness. Otherwise
4176 (in case of a small bitfield) the signedness is unchanged. */
4177 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4178 *punsignedp = TYPE_UNSIGNED (outer_type);
4180 /* Compute the mask to access the bitfield. */
4181 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4182 precision = TYPE_PRECISION (unsigned_type);
4184 mask = build_int_cst_type (unsigned_type, -1);
4186 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4187 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4189 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4190 if (and_mask != 0)
4191 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4192 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4194 *pmask = mask;
4195 *pand_mask = and_mask;
4196 return inner;
4199 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4200 bit positions and MASK is SIGNED. */
4202 static int
4203 all_ones_mask_p (const_tree mask, unsigned int size)
4205 tree type = TREE_TYPE (mask);
4206 unsigned int precision = TYPE_PRECISION (type);
4208 /* If this function returns true when the type of the mask is
4209 UNSIGNED, then there will be errors. In particular see
4210 gcc.c-torture/execute/990326-1.c. There does not appear to be
4211 any documentation paper trail as to why this is so. But the pre
4212 wide-int worked with that restriction and it has been preserved
4213 here. */
4214 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4215 return false;
4217 return wi::mask (size, false, precision) == wi::to_wide (mask);
4220 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4221 represents the sign bit of EXP's type. If EXP represents a sign
4222 or zero extension, also test VAL against the unextended type.
4223 The return value is the (sub)expression whose sign bit is VAL,
4224 or NULL_TREE otherwise. */
4226 tree
4227 sign_bit_p (tree exp, const_tree val)
4229 int width;
4230 tree t;
4232 /* Tree EXP must have an integral type. */
4233 t = TREE_TYPE (exp);
4234 if (! INTEGRAL_TYPE_P (t))
4235 return NULL_TREE;
4237 /* Tree VAL must be an integer constant. */
4238 if (TREE_CODE (val) != INTEGER_CST
4239 || TREE_OVERFLOW (val))
4240 return NULL_TREE;
4242 width = TYPE_PRECISION (t);
4243 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4244 return exp;
4246 /* Handle extension from a narrower type. */
4247 if (TREE_CODE (exp) == NOP_EXPR
4248 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4249 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4251 return NULL_TREE;
4254 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4255 to be evaluated unconditionally. */
4257 static int
4258 simple_operand_p (const_tree exp)
4260 /* Strip any conversions that don't change the machine mode. */
4261 STRIP_NOPS (exp);
4263 return (CONSTANT_CLASS_P (exp)
4264 || TREE_CODE (exp) == SSA_NAME
4265 || (DECL_P (exp)
4266 && ! TREE_ADDRESSABLE (exp)
4267 && ! TREE_THIS_VOLATILE (exp)
4268 && ! DECL_NONLOCAL (exp)
4269 /* Don't regard global variables as simple. They may be
4270 allocated in ways unknown to the compiler (shared memory,
4271 #pragma weak, etc). */
4272 && ! TREE_PUBLIC (exp)
4273 && ! DECL_EXTERNAL (exp)
4274 /* Weakrefs are not safe to be read, since they can be NULL.
4275 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4276 have DECL_WEAK flag set. */
4277 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4278 /* Loading a static variable is unduly expensive, but global
4279 registers aren't expensive. */
4280 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4283 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4284 to be evaluated unconditionally.
4285 I addition to simple_operand_p, we assume that comparisons, conversions,
4286 and logic-not operations are simple, if their operands are simple, too. */
4288 static bool
4289 simple_operand_p_2 (tree exp)
4291 enum tree_code code;
4293 if (TREE_SIDE_EFFECTS (exp)
4294 || tree_could_trap_p (exp))
4295 return false;
4297 while (CONVERT_EXPR_P (exp))
4298 exp = TREE_OPERAND (exp, 0);
4300 code = TREE_CODE (exp);
4302 if (TREE_CODE_CLASS (code) == tcc_comparison)
4303 return (simple_operand_p (TREE_OPERAND (exp, 0))
4304 && simple_operand_p (TREE_OPERAND (exp, 1)));
4306 if (code == TRUTH_NOT_EXPR)
4307 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4309 return simple_operand_p (exp);
4313 /* The following functions are subroutines to fold_range_test and allow it to
4314 try to change a logical combination of comparisons into a range test.
4316 For example, both
4317 X == 2 || X == 3 || X == 4 || X == 5
4319 X >= 2 && X <= 5
4320 are converted to
4321 (unsigned) (X - 2) <= 3
4323 We describe each set of comparisons as being either inside or outside
4324 a range, using a variable named like IN_P, and then describe the
4325 range with a lower and upper bound. If one of the bounds is omitted,
4326 it represents either the highest or lowest value of the type.
4328 In the comments below, we represent a range by two numbers in brackets
4329 preceded by a "+" to designate being inside that range, or a "-" to
4330 designate being outside that range, so the condition can be inverted by
4331 flipping the prefix. An omitted bound is represented by a "-". For
4332 example, "- [-, 10]" means being outside the range starting at the lowest
4333 possible value and ending at 10, in other words, being greater than 10.
4334 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4335 always false.
4337 We set up things so that the missing bounds are handled in a consistent
4338 manner so neither a missing bound nor "true" and "false" need to be
4339 handled using a special case. */
4341 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4342 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4343 and UPPER1_P are nonzero if the respective argument is an upper bound
4344 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4345 must be specified for a comparison. ARG1 will be converted to ARG0's
4346 type if both are specified. */
4348 static tree
4349 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4350 tree arg1, int upper1_p)
4352 tree tem;
4353 int result;
4354 int sgn0, sgn1;
4356 /* If neither arg represents infinity, do the normal operation.
4357 Else, if not a comparison, return infinity. Else handle the special
4358 comparison rules. Note that most of the cases below won't occur, but
4359 are handled for consistency. */
4361 if (arg0 != 0 && arg1 != 0)
4363 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4364 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4365 STRIP_NOPS (tem);
4366 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4369 if (TREE_CODE_CLASS (code) != tcc_comparison)
4370 return 0;
4372 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4373 for neither. In real maths, we cannot assume open ended ranges are
4374 the same. But, this is computer arithmetic, where numbers are finite.
4375 We can therefore make the transformation of any unbounded range with
4376 the value Z, Z being greater than any representable number. This permits
4377 us to treat unbounded ranges as equal. */
4378 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4379 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4380 switch (code)
4382 case EQ_EXPR:
4383 result = sgn0 == sgn1;
4384 break;
4385 case NE_EXPR:
4386 result = sgn0 != sgn1;
4387 break;
4388 case LT_EXPR:
4389 result = sgn0 < sgn1;
4390 break;
4391 case LE_EXPR:
4392 result = sgn0 <= sgn1;
4393 break;
4394 case GT_EXPR:
4395 result = sgn0 > sgn1;
4396 break;
4397 case GE_EXPR:
4398 result = sgn0 >= sgn1;
4399 break;
4400 default:
4401 gcc_unreachable ();
4404 return constant_boolean_node (result, type);
4407 /* Helper routine for make_range. Perform one step for it, return
4408 new expression if the loop should continue or NULL_TREE if it should
4409 stop. */
4411 tree
4412 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4413 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4414 bool *strict_overflow_p)
4416 tree arg0_type = TREE_TYPE (arg0);
4417 tree n_low, n_high, low = *p_low, high = *p_high;
4418 int in_p = *p_in_p, n_in_p;
4420 switch (code)
4422 case TRUTH_NOT_EXPR:
4423 /* We can only do something if the range is testing for zero. */
4424 if (low == NULL_TREE || high == NULL_TREE
4425 || ! integer_zerop (low) || ! integer_zerop (high))
4426 return NULL_TREE;
4427 *p_in_p = ! in_p;
4428 return arg0;
4430 case EQ_EXPR: case NE_EXPR:
4431 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4432 /* We can only do something if the range is testing for zero
4433 and if the second operand is an integer constant. Note that
4434 saying something is "in" the range we make is done by
4435 complementing IN_P since it will set in the initial case of
4436 being not equal to zero; "out" is leaving it alone. */
4437 if (low == NULL_TREE || high == NULL_TREE
4438 || ! integer_zerop (low) || ! integer_zerop (high)
4439 || TREE_CODE (arg1) != INTEGER_CST)
4440 return NULL_TREE;
4442 switch (code)
4444 case NE_EXPR: /* - [c, c] */
4445 low = high = arg1;
4446 break;
4447 case EQ_EXPR: /* + [c, c] */
4448 in_p = ! in_p, low = high = arg1;
4449 break;
4450 case GT_EXPR: /* - [-, c] */
4451 low = 0, high = arg1;
4452 break;
4453 case GE_EXPR: /* + [c, -] */
4454 in_p = ! in_p, low = arg1, high = 0;
4455 break;
4456 case LT_EXPR: /* - [c, -] */
4457 low = arg1, high = 0;
4458 break;
4459 case LE_EXPR: /* + [-, c] */
4460 in_p = ! in_p, low = 0, high = arg1;
4461 break;
4462 default:
4463 gcc_unreachable ();
4466 /* If this is an unsigned comparison, we also know that EXP is
4467 greater than or equal to zero. We base the range tests we make
4468 on that fact, so we record it here so we can parse existing
4469 range tests. We test arg0_type since often the return type
4470 of, e.g. EQ_EXPR, is boolean. */
4471 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4473 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4474 in_p, low, high, 1,
4475 build_int_cst (arg0_type, 0),
4476 NULL_TREE))
4477 return NULL_TREE;
4479 in_p = n_in_p, low = n_low, high = n_high;
4481 /* If the high bound is missing, but we have a nonzero low
4482 bound, reverse the range so it goes from zero to the low bound
4483 minus 1. */
4484 if (high == 0 && low && ! integer_zerop (low))
4486 in_p = ! in_p;
4487 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4488 build_int_cst (TREE_TYPE (low), 1), 0);
4489 low = build_int_cst (arg0_type, 0);
4493 *p_low = low;
4494 *p_high = high;
4495 *p_in_p = in_p;
4496 return arg0;
4498 case NEGATE_EXPR:
4499 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4500 low and high are non-NULL, then normalize will DTRT. */
4501 if (!TYPE_UNSIGNED (arg0_type)
4502 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4504 if (low == NULL_TREE)
4505 low = TYPE_MIN_VALUE (arg0_type);
4506 if (high == NULL_TREE)
4507 high = TYPE_MAX_VALUE (arg0_type);
4510 /* (-x) IN [a,b] -> x in [-b, -a] */
4511 n_low = range_binop (MINUS_EXPR, exp_type,
4512 build_int_cst (exp_type, 0),
4513 0, high, 1);
4514 n_high = range_binop (MINUS_EXPR, exp_type,
4515 build_int_cst (exp_type, 0),
4516 0, low, 0);
4517 if (n_high != 0 && TREE_OVERFLOW (n_high))
4518 return NULL_TREE;
4519 goto normalize;
4521 case BIT_NOT_EXPR:
4522 /* ~ X -> -X - 1 */
4523 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4524 build_int_cst (exp_type, 1));
4526 case PLUS_EXPR:
4527 case MINUS_EXPR:
4528 if (TREE_CODE (arg1) != INTEGER_CST)
4529 return NULL_TREE;
4531 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4532 move a constant to the other side. */
4533 if (!TYPE_UNSIGNED (arg0_type)
4534 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4535 return NULL_TREE;
4537 /* If EXP is signed, any overflow in the computation is undefined,
4538 so we don't worry about it so long as our computations on
4539 the bounds don't overflow. For unsigned, overflow is defined
4540 and this is exactly the right thing. */
4541 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4542 arg0_type, low, 0, arg1, 0);
4543 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4544 arg0_type, high, 1, arg1, 0);
4545 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4546 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4547 return NULL_TREE;
4549 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4550 *strict_overflow_p = true;
4552 normalize:
4553 /* Check for an unsigned range which has wrapped around the maximum
4554 value thus making n_high < n_low, and normalize it. */
4555 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4557 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4558 build_int_cst (TREE_TYPE (n_high), 1), 0);
4559 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4560 build_int_cst (TREE_TYPE (n_low), 1), 0);
4562 /* If the range is of the form +/- [ x+1, x ], we won't
4563 be able to normalize it. But then, it represents the
4564 whole range or the empty set, so make it
4565 +/- [ -, - ]. */
4566 if (tree_int_cst_equal (n_low, low)
4567 && tree_int_cst_equal (n_high, high))
4568 low = high = 0;
4569 else
4570 in_p = ! in_p;
4572 else
4573 low = n_low, high = n_high;
4575 *p_low = low;
4576 *p_high = high;
4577 *p_in_p = in_p;
4578 return arg0;
4580 CASE_CONVERT:
4581 case NON_LVALUE_EXPR:
4582 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4583 return NULL_TREE;
4585 if (! INTEGRAL_TYPE_P (arg0_type)
4586 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4587 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4588 return NULL_TREE;
4590 n_low = low, n_high = high;
4592 if (n_low != 0)
4593 n_low = fold_convert_loc (loc, arg0_type, n_low);
4595 if (n_high != 0)
4596 n_high = fold_convert_loc (loc, arg0_type, n_high);
4598 /* If we're converting arg0 from an unsigned type, to exp,
4599 a signed type, we will be doing the comparison as unsigned.
4600 The tests above have already verified that LOW and HIGH
4601 are both positive.
4603 So we have to ensure that we will handle large unsigned
4604 values the same way that the current signed bounds treat
4605 negative values. */
4607 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4609 tree high_positive;
4610 tree equiv_type;
4611 /* For fixed-point modes, we need to pass the saturating flag
4612 as the 2nd parameter. */
4613 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4614 equiv_type
4615 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4616 TYPE_SATURATING (arg0_type));
4617 else
4618 equiv_type
4619 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4621 /* A range without an upper bound is, naturally, unbounded.
4622 Since convert would have cropped a very large value, use
4623 the max value for the destination type. */
4624 high_positive
4625 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4626 : TYPE_MAX_VALUE (arg0_type);
4628 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4629 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4630 fold_convert_loc (loc, arg0_type,
4631 high_positive),
4632 build_int_cst (arg0_type, 1));
4634 /* If the low bound is specified, "and" the range with the
4635 range for which the original unsigned value will be
4636 positive. */
4637 if (low != 0)
4639 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4640 1, fold_convert_loc (loc, arg0_type,
4641 integer_zero_node),
4642 high_positive))
4643 return NULL_TREE;
4645 in_p = (n_in_p == in_p);
4647 else
4649 /* Otherwise, "or" the range with the range of the input
4650 that will be interpreted as negative. */
4651 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4652 1, fold_convert_loc (loc, arg0_type,
4653 integer_zero_node),
4654 high_positive))
4655 return NULL_TREE;
4657 in_p = (in_p != n_in_p);
4661 *p_low = n_low;
4662 *p_high = n_high;
4663 *p_in_p = in_p;
4664 return arg0;
4666 default:
4667 return NULL_TREE;
4671 /* Given EXP, a logical expression, set the range it is testing into
4672 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4673 actually being tested. *PLOW and *PHIGH will be made of the same
4674 type as the returned expression. If EXP is not a comparison, we
4675 will most likely not be returning a useful value and range. Set
4676 *STRICT_OVERFLOW_P to true if the return value is only valid
4677 because signed overflow is undefined; otherwise, do not change
4678 *STRICT_OVERFLOW_P. */
4680 tree
4681 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4682 bool *strict_overflow_p)
4684 enum tree_code code;
4685 tree arg0, arg1 = NULL_TREE;
4686 tree exp_type, nexp;
4687 int in_p;
4688 tree low, high;
4689 location_t loc = EXPR_LOCATION (exp);
4691 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4692 and see if we can refine the range. Some of the cases below may not
4693 happen, but it doesn't seem worth worrying about this. We "continue"
4694 the outer loop when we've changed something; otherwise we "break"
4695 the switch, which will "break" the while. */
4697 in_p = 0;
4698 low = high = build_int_cst (TREE_TYPE (exp), 0);
4700 while (1)
4702 code = TREE_CODE (exp);
4703 exp_type = TREE_TYPE (exp);
4704 arg0 = NULL_TREE;
4706 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4708 if (TREE_OPERAND_LENGTH (exp) > 0)
4709 arg0 = TREE_OPERAND (exp, 0);
4710 if (TREE_CODE_CLASS (code) == tcc_binary
4711 || TREE_CODE_CLASS (code) == tcc_comparison
4712 || (TREE_CODE_CLASS (code) == tcc_expression
4713 && TREE_OPERAND_LENGTH (exp) > 1))
4714 arg1 = TREE_OPERAND (exp, 1);
4716 if (arg0 == NULL_TREE)
4717 break;
4719 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4720 &high, &in_p, strict_overflow_p);
4721 if (nexp == NULL_TREE)
4722 break;
4723 exp = nexp;
4726 /* If EXP is a constant, we can evaluate whether this is true or false. */
4727 if (TREE_CODE (exp) == INTEGER_CST)
4729 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4730 exp, 0, low, 0))
4731 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4732 exp, 1, high, 1)));
4733 low = high = 0;
4734 exp = 0;
4737 *pin_p = in_p, *plow = low, *phigh = high;
4738 return exp;
4741 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4742 a bitwise check i.e. when
4743 LOW == 0xXX...X00...0
4744 HIGH == 0xXX...X11...1
4745 Return corresponding mask in MASK and stem in VALUE. */
4747 static bool
4748 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4749 tree *value)
4751 if (TREE_CODE (low) != INTEGER_CST
4752 || TREE_CODE (high) != INTEGER_CST)
4753 return false;
4755 unsigned prec = TYPE_PRECISION (type);
4756 wide_int lo = wi::to_wide (low, prec);
4757 wide_int hi = wi::to_wide (high, prec);
4759 wide_int end_mask = lo ^ hi;
4760 if ((end_mask & (end_mask + 1)) != 0
4761 || (lo & end_mask) != 0)
4762 return false;
4764 wide_int stem_mask = ~end_mask;
4765 wide_int stem = lo & stem_mask;
4766 if (stem != (hi & stem_mask))
4767 return false;
4769 *mask = wide_int_to_tree (type, stem_mask);
4770 *value = wide_int_to_tree (type, stem);
4772 return true;
4775 /* Helper routine for build_range_check and match.pd. Return the type to
4776 perform the check or NULL if it shouldn't be optimized. */
4778 tree
4779 range_check_type (tree etype)
4781 /* First make sure that arithmetics in this type is valid, then make sure
4782 that it wraps around. */
4783 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4784 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4785 TYPE_UNSIGNED (etype));
4787 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4789 tree utype, minv, maxv;
4791 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4792 for the type in question, as we rely on this here. */
4793 utype = unsigned_type_for (etype);
4794 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4795 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4796 build_int_cst (TREE_TYPE (maxv), 1), 1);
4797 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4799 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4800 minv, 1, maxv, 1)))
4801 etype = utype;
4802 else
4803 return NULL_TREE;
4805 return etype;
4808 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4809 type, TYPE, return an expression to test if EXP is in (or out of, depending
4810 on IN_P) the range. Return 0 if the test couldn't be created. */
4812 tree
4813 build_range_check (location_t loc, tree type, tree exp, int in_p,
4814 tree low, tree high)
4816 tree etype = TREE_TYPE (exp), mask, value;
4818 /* Disable this optimization for function pointer expressions
4819 on targets that require function pointer canonicalization. */
4820 if (targetm.have_canonicalize_funcptr_for_compare ()
4821 && TREE_CODE (etype) == POINTER_TYPE
4822 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4823 return NULL_TREE;
4825 if (! in_p)
4827 value = build_range_check (loc, type, exp, 1, low, high);
4828 if (value != 0)
4829 return invert_truthvalue_loc (loc, value);
4831 return 0;
4834 if (low == 0 && high == 0)
4835 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4837 if (low == 0)
4838 return fold_build2_loc (loc, LE_EXPR, type, exp,
4839 fold_convert_loc (loc, etype, high));
4841 if (high == 0)
4842 return fold_build2_loc (loc, GE_EXPR, type, exp,
4843 fold_convert_loc (loc, etype, low));
4845 if (operand_equal_p (low, high, 0))
4846 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4847 fold_convert_loc (loc, etype, low));
4849 if (TREE_CODE (exp) == BIT_AND_EXPR
4850 && maskable_range_p (low, high, etype, &mask, &value))
4851 return fold_build2_loc (loc, EQ_EXPR, type,
4852 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4853 exp, mask),
4854 value);
4856 if (integer_zerop (low))
4858 if (! TYPE_UNSIGNED (etype))
4860 etype = unsigned_type_for (etype);
4861 high = fold_convert_loc (loc, etype, high);
4862 exp = fold_convert_loc (loc, etype, exp);
4864 return build_range_check (loc, type, exp, 1, 0, high);
4867 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4868 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4870 int prec = TYPE_PRECISION (etype);
4872 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4874 if (TYPE_UNSIGNED (etype))
4876 tree signed_etype = signed_type_for (etype);
4877 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4878 etype
4879 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4880 else
4881 etype = signed_etype;
4882 exp = fold_convert_loc (loc, etype, exp);
4884 return fold_build2_loc (loc, GT_EXPR, type, exp,
4885 build_int_cst (etype, 0));
4889 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4890 This requires wrap-around arithmetics for the type of the expression. */
4891 etype = range_check_type (etype);
4892 if (etype == NULL_TREE)
4893 return NULL_TREE;
4895 if (POINTER_TYPE_P (etype))
4896 etype = unsigned_type_for (etype);
4898 high = fold_convert_loc (loc, etype, high);
4899 low = fold_convert_loc (loc, etype, low);
4900 exp = fold_convert_loc (loc, etype, exp);
4902 value = const_binop (MINUS_EXPR, high, low);
4904 if (value != 0 && !TREE_OVERFLOW (value))
4905 return build_range_check (loc, type,
4906 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4907 1, build_int_cst (etype, 0), value);
4909 return 0;
4912 /* Return the predecessor of VAL in its type, handling the infinite case. */
4914 static tree
4915 range_predecessor (tree val)
4917 tree type = TREE_TYPE (val);
4919 if (INTEGRAL_TYPE_P (type)
4920 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4921 return 0;
4922 else
4923 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4924 build_int_cst (TREE_TYPE (val), 1), 0);
4927 /* Return the successor of VAL in its type, handling the infinite case. */
4929 static tree
4930 range_successor (tree val)
4932 tree type = TREE_TYPE (val);
4934 if (INTEGRAL_TYPE_P (type)
4935 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4936 return 0;
4937 else
4938 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4939 build_int_cst (TREE_TYPE (val), 1), 0);
4942 /* Given two ranges, see if we can merge them into one. Return 1 if we
4943 can, 0 if we can't. Set the output range into the specified parameters. */
4945 bool
4946 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4947 tree high0, int in1_p, tree low1, tree high1)
4949 int no_overlap;
4950 int subset;
4951 int temp;
4952 tree tem;
4953 int in_p;
4954 tree low, high;
4955 int lowequal = ((low0 == 0 && low1 == 0)
4956 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4957 low0, 0, low1, 0)));
4958 int highequal = ((high0 == 0 && high1 == 0)
4959 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4960 high0, 1, high1, 1)));
4962 /* Make range 0 be the range that starts first, or ends last if they
4963 start at the same value. Swap them if it isn't. */
4964 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4965 low0, 0, low1, 0))
4966 || (lowequal
4967 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4968 high1, 1, high0, 1))))
4970 temp = in0_p, in0_p = in1_p, in1_p = temp;
4971 tem = low0, low0 = low1, low1 = tem;
4972 tem = high0, high0 = high1, high1 = tem;
4975 /* Now flag two cases, whether the ranges are disjoint or whether the
4976 second range is totally subsumed in the first. Note that the tests
4977 below are simplified by the ones above. */
4978 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4979 high0, 1, low1, 0));
4980 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4981 high1, 1, high0, 1));
4983 /* We now have four cases, depending on whether we are including or
4984 excluding the two ranges. */
4985 if (in0_p && in1_p)
4987 /* If they don't overlap, the result is false. If the second range
4988 is a subset it is the result. Otherwise, the range is from the start
4989 of the second to the end of the first. */
4990 if (no_overlap)
4991 in_p = 0, low = high = 0;
4992 else if (subset)
4993 in_p = 1, low = low1, high = high1;
4994 else
4995 in_p = 1, low = low1, high = high0;
4998 else if (in0_p && ! in1_p)
5000 /* If they don't overlap, the result is the first range. If they are
5001 equal, the result is false. If the second range is a subset of the
5002 first, and the ranges begin at the same place, we go from just after
5003 the end of the second range to the end of the first. If the second
5004 range is not a subset of the first, or if it is a subset and both
5005 ranges end at the same place, the range starts at the start of the
5006 first range and ends just before the second range.
5007 Otherwise, we can't describe this as a single range. */
5008 if (no_overlap)
5009 in_p = 1, low = low0, high = high0;
5010 else if (lowequal && highequal)
5011 in_p = 0, low = high = 0;
5012 else if (subset && lowequal)
5014 low = range_successor (high1);
5015 high = high0;
5016 in_p = 1;
5017 if (low == 0)
5019 /* We are in the weird situation where high0 > high1 but
5020 high1 has no successor. Punt. */
5021 return 0;
5024 else if (! subset || highequal)
5026 low = low0;
5027 high = range_predecessor (low1);
5028 in_p = 1;
5029 if (high == 0)
5031 /* low0 < low1 but low1 has no predecessor. Punt. */
5032 return 0;
5035 else
5036 return 0;
5039 else if (! in0_p && in1_p)
5041 /* If they don't overlap, the result is the second range. If the second
5042 is a subset of the first, the result is false. Otherwise,
5043 the range starts just after the first range and ends at the
5044 end of the second. */
5045 if (no_overlap)
5046 in_p = 1, low = low1, high = high1;
5047 else if (subset || highequal)
5048 in_p = 0, low = high = 0;
5049 else
5051 low = range_successor (high0);
5052 high = high1;
5053 in_p = 1;
5054 if (low == 0)
5056 /* high1 > high0 but high0 has no successor. Punt. */
5057 return 0;
5062 else
5064 /* The case where we are excluding both ranges. Here the complex case
5065 is if they don't overlap. In that case, the only time we have a
5066 range is if they are adjacent. If the second is a subset of the
5067 first, the result is the first. Otherwise, the range to exclude
5068 starts at the beginning of the first range and ends at the end of the
5069 second. */
5070 if (no_overlap)
5072 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5073 range_successor (high0),
5074 1, low1, 0)))
5075 in_p = 0, low = low0, high = high1;
5076 else
5078 /* Canonicalize - [min, x] into - [-, x]. */
5079 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5080 switch (TREE_CODE (TREE_TYPE (low0)))
5082 case ENUMERAL_TYPE:
5083 if (TYPE_PRECISION (TREE_TYPE (low0))
5084 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5085 break;
5086 /* FALLTHROUGH */
5087 case INTEGER_TYPE:
5088 if (tree_int_cst_equal (low0,
5089 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5090 low0 = 0;
5091 break;
5092 case POINTER_TYPE:
5093 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5094 && integer_zerop (low0))
5095 low0 = 0;
5096 break;
5097 default:
5098 break;
5101 /* Canonicalize - [x, max] into - [x, -]. */
5102 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5103 switch (TREE_CODE (TREE_TYPE (high1)))
5105 case ENUMERAL_TYPE:
5106 if (TYPE_PRECISION (TREE_TYPE (high1))
5107 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5108 break;
5109 /* FALLTHROUGH */
5110 case INTEGER_TYPE:
5111 if (tree_int_cst_equal (high1,
5112 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5113 high1 = 0;
5114 break;
5115 case POINTER_TYPE:
5116 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5117 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5118 high1, 1,
5119 build_int_cst (TREE_TYPE (high1), 1),
5120 1)))
5121 high1 = 0;
5122 break;
5123 default:
5124 break;
5127 /* The ranges might be also adjacent between the maximum and
5128 minimum values of the given type. For
5129 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5130 return + [x + 1, y - 1]. */
5131 if (low0 == 0 && high1 == 0)
5133 low = range_successor (high0);
5134 high = range_predecessor (low1);
5135 if (low == 0 || high == 0)
5136 return 0;
5138 in_p = 1;
5140 else
5141 return 0;
5144 else if (subset)
5145 in_p = 0, low = low0, high = high0;
5146 else
5147 in_p = 0, low = low0, high = high1;
5150 *pin_p = in_p, *plow = low, *phigh = high;
5151 return 1;
5155 /* Subroutine of fold, looking inside expressions of the form
5156 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5157 of the COND_EXPR. This function is being used also to optimize
5158 A op B ? C : A, by reversing the comparison first.
5160 Return a folded expression whose code is not a COND_EXPR
5161 anymore, or NULL_TREE if no folding opportunity is found. */
5163 static tree
5164 fold_cond_expr_with_comparison (location_t loc, tree type,
5165 tree arg0, tree arg1, tree arg2)
5167 enum tree_code comp_code = TREE_CODE (arg0);
5168 tree arg00 = TREE_OPERAND (arg0, 0);
5169 tree arg01 = TREE_OPERAND (arg0, 1);
5170 tree arg1_type = TREE_TYPE (arg1);
5171 tree tem;
5173 STRIP_NOPS (arg1);
5174 STRIP_NOPS (arg2);
5176 /* If we have A op 0 ? A : -A, consider applying the following
5177 transformations:
5179 A == 0? A : -A same as -A
5180 A != 0? A : -A same as A
5181 A >= 0? A : -A same as abs (A)
5182 A > 0? A : -A same as abs (A)
5183 A <= 0? A : -A same as -abs (A)
5184 A < 0? A : -A same as -abs (A)
5186 None of these transformations work for modes with signed
5187 zeros. If A is +/-0, the first two transformations will
5188 change the sign of the result (from +0 to -0, or vice
5189 versa). The last four will fix the sign of the result,
5190 even though the original expressions could be positive or
5191 negative, depending on the sign of A.
5193 Note that all these transformations are correct if A is
5194 NaN, since the two alternatives (A and -A) are also NaNs. */
5195 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5196 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5197 ? real_zerop (arg01)
5198 : integer_zerop (arg01))
5199 && ((TREE_CODE (arg2) == NEGATE_EXPR
5200 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5201 /* In the case that A is of the form X-Y, '-A' (arg2) may
5202 have already been folded to Y-X, check for that. */
5203 || (TREE_CODE (arg1) == MINUS_EXPR
5204 && TREE_CODE (arg2) == MINUS_EXPR
5205 && operand_equal_p (TREE_OPERAND (arg1, 0),
5206 TREE_OPERAND (arg2, 1), 0)
5207 && operand_equal_p (TREE_OPERAND (arg1, 1),
5208 TREE_OPERAND (arg2, 0), 0))))
5209 switch (comp_code)
5211 case EQ_EXPR:
5212 case UNEQ_EXPR:
5213 tem = fold_convert_loc (loc, arg1_type, arg1);
5214 return fold_convert_loc (loc, type, negate_expr (tem));
5215 case NE_EXPR:
5216 case LTGT_EXPR:
5217 return fold_convert_loc (loc, type, arg1);
5218 case UNGE_EXPR:
5219 case UNGT_EXPR:
5220 if (flag_trapping_math)
5221 break;
5222 /* Fall through. */
5223 case GE_EXPR:
5224 case GT_EXPR:
5225 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5226 break;
5227 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5228 return fold_convert_loc (loc, type, tem);
5229 case UNLE_EXPR:
5230 case UNLT_EXPR:
5231 if (flag_trapping_math)
5232 break;
5233 /* FALLTHRU */
5234 case LE_EXPR:
5235 case LT_EXPR:
5236 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5237 break;
5238 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5239 return negate_expr (fold_convert_loc (loc, type, tem));
5240 default:
5241 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5242 break;
5245 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5246 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5247 both transformations are correct when A is NaN: A != 0
5248 is then true, and A == 0 is false. */
5250 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5251 && integer_zerop (arg01) && integer_zerop (arg2))
5253 if (comp_code == NE_EXPR)
5254 return fold_convert_loc (loc, type, arg1);
5255 else if (comp_code == EQ_EXPR)
5256 return build_zero_cst (type);
5259 /* Try some transformations of A op B ? A : B.
5261 A == B? A : B same as B
5262 A != B? A : B same as A
5263 A >= B? A : B same as max (A, B)
5264 A > B? A : B same as max (B, A)
5265 A <= B? A : B same as min (A, B)
5266 A < B? A : B same as min (B, A)
5268 As above, these transformations don't work in the presence
5269 of signed zeros. For example, if A and B are zeros of
5270 opposite sign, the first two transformations will change
5271 the sign of the result. In the last four, the original
5272 expressions give different results for (A=+0, B=-0) and
5273 (A=-0, B=+0), but the transformed expressions do not.
5275 The first two transformations are correct if either A or B
5276 is a NaN. In the first transformation, the condition will
5277 be false, and B will indeed be chosen. In the case of the
5278 second transformation, the condition A != B will be true,
5279 and A will be chosen.
5281 The conversions to max() and min() are not correct if B is
5282 a number and A is not. The conditions in the original
5283 expressions will be false, so all four give B. The min()
5284 and max() versions would give a NaN instead. */
5285 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5286 && operand_equal_for_comparison_p (arg01, arg2)
5287 /* Avoid these transformations if the COND_EXPR may be used
5288 as an lvalue in the C++ front-end. PR c++/19199. */
5289 && (in_gimple_form
5290 || VECTOR_TYPE_P (type)
5291 || (! lang_GNU_CXX ()
5292 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5293 || ! maybe_lvalue_p (arg1)
5294 || ! maybe_lvalue_p (arg2)))
5296 tree comp_op0 = arg00;
5297 tree comp_op1 = arg01;
5298 tree comp_type = TREE_TYPE (comp_op0);
5300 switch (comp_code)
5302 case EQ_EXPR:
5303 return fold_convert_loc (loc, type, arg2);
5304 case NE_EXPR:
5305 return fold_convert_loc (loc, type, arg1);
5306 case LE_EXPR:
5307 case LT_EXPR:
5308 case UNLE_EXPR:
5309 case UNLT_EXPR:
5310 /* In C++ a ?: expression can be an lvalue, so put the
5311 operand which will be used if they are equal first
5312 so that we can convert this back to the
5313 corresponding COND_EXPR. */
5314 if (!HONOR_NANS (arg1))
5316 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5317 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5318 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5319 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5320 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5321 comp_op1, comp_op0);
5322 return fold_convert_loc (loc, type, tem);
5324 break;
5325 case GE_EXPR:
5326 case GT_EXPR:
5327 case UNGE_EXPR:
5328 case UNGT_EXPR:
5329 if (!HONOR_NANS (arg1))
5331 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5332 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5333 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5334 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5335 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5336 comp_op1, comp_op0);
5337 return fold_convert_loc (loc, type, tem);
5339 break;
5340 case UNEQ_EXPR:
5341 if (!HONOR_NANS (arg1))
5342 return fold_convert_loc (loc, type, arg2);
5343 break;
5344 case LTGT_EXPR:
5345 if (!HONOR_NANS (arg1))
5346 return fold_convert_loc (loc, type, arg1);
5347 break;
5348 default:
5349 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5350 break;
5354 return NULL_TREE;
5359 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5360 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5361 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5362 false) >= 2)
5363 #endif
5365 /* EXP is some logical combination of boolean tests. See if we can
5366 merge it into some range test. Return the new tree if so. */
5368 static tree
5369 fold_range_test (location_t loc, enum tree_code code, tree type,
5370 tree op0, tree op1)
5372 int or_op = (code == TRUTH_ORIF_EXPR
5373 || code == TRUTH_OR_EXPR);
5374 int in0_p, in1_p, in_p;
5375 tree low0, low1, low, high0, high1, high;
5376 bool strict_overflow_p = false;
5377 tree tem, lhs, rhs;
5378 const char * const warnmsg = G_("assuming signed overflow does not occur "
5379 "when simplifying range test");
5381 if (!INTEGRAL_TYPE_P (type))
5382 return 0;
5384 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5385 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5387 /* If this is an OR operation, invert both sides; we will invert
5388 again at the end. */
5389 if (or_op)
5390 in0_p = ! in0_p, in1_p = ! in1_p;
5392 /* If both expressions are the same, if we can merge the ranges, and we
5393 can build the range test, return it or it inverted. If one of the
5394 ranges is always true or always false, consider it to be the same
5395 expression as the other. */
5396 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5397 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5398 in1_p, low1, high1)
5399 && 0 != (tem = (build_range_check (loc, type,
5400 lhs != 0 ? lhs
5401 : rhs != 0 ? rhs : integer_zero_node,
5402 in_p, low, high))))
5404 if (strict_overflow_p)
5405 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5406 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5409 /* On machines where the branch cost is expensive, if this is a
5410 short-circuited branch and the underlying object on both sides
5411 is the same, make a non-short-circuit operation. */
5412 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5413 && !flag_sanitize_coverage
5414 && lhs != 0 && rhs != 0
5415 && (code == TRUTH_ANDIF_EXPR
5416 || code == TRUTH_ORIF_EXPR)
5417 && operand_equal_p (lhs, rhs, 0))
5419 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5420 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5421 which cases we can't do this. */
5422 if (simple_operand_p (lhs))
5423 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5424 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5425 type, op0, op1);
5427 else if (!lang_hooks.decls.global_bindings_p ()
5428 && !CONTAINS_PLACEHOLDER_P (lhs))
5430 tree common = save_expr (lhs);
5432 if (0 != (lhs = build_range_check (loc, type, common,
5433 or_op ? ! in0_p : in0_p,
5434 low0, high0))
5435 && (0 != (rhs = build_range_check (loc, type, common,
5436 or_op ? ! in1_p : in1_p,
5437 low1, high1))))
5439 if (strict_overflow_p)
5440 fold_overflow_warning (warnmsg,
5441 WARN_STRICT_OVERFLOW_COMPARISON);
5442 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5443 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5444 type, lhs, rhs);
5449 return 0;
5452 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5453 bit value. Arrange things so the extra bits will be set to zero if and
5454 only if C is signed-extended to its full width. If MASK is nonzero,
5455 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5457 static tree
5458 unextend (tree c, int p, int unsignedp, tree mask)
5460 tree type = TREE_TYPE (c);
5461 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5462 tree temp;
5464 if (p == modesize || unsignedp)
5465 return c;
5467 /* We work by getting just the sign bit into the low-order bit, then
5468 into the high-order bit, then sign-extend. We then XOR that value
5469 with C. */
5470 temp = build_int_cst (TREE_TYPE (c),
5471 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5473 /* We must use a signed type in order to get an arithmetic right shift.
5474 However, we must also avoid introducing accidental overflows, so that
5475 a subsequent call to integer_zerop will work. Hence we must
5476 do the type conversion here. At this point, the constant is either
5477 zero or one, and the conversion to a signed type can never overflow.
5478 We could get an overflow if this conversion is done anywhere else. */
5479 if (TYPE_UNSIGNED (type))
5480 temp = fold_convert (signed_type_for (type), temp);
5482 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5483 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5484 if (mask != 0)
5485 temp = const_binop (BIT_AND_EXPR, temp,
5486 fold_convert (TREE_TYPE (c), mask));
5487 /* If necessary, convert the type back to match the type of C. */
5488 if (TYPE_UNSIGNED (type))
5489 temp = fold_convert (type, temp);
5491 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5494 /* For an expression that has the form
5495 (A && B) || ~B
5497 (A || B) && ~B,
5498 we can drop one of the inner expressions and simplify to
5499 A || ~B
5501 A && ~B
5502 LOC is the location of the resulting expression. OP is the inner
5503 logical operation; the left-hand side in the examples above, while CMPOP
5504 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5505 removing a condition that guards another, as in
5506 (A != NULL && A->...) || A == NULL
5507 which we must not transform. If RHS_ONLY is true, only eliminate the
5508 right-most operand of the inner logical operation. */
5510 static tree
5511 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5512 bool rhs_only)
5514 tree type = TREE_TYPE (cmpop);
5515 enum tree_code code = TREE_CODE (cmpop);
5516 enum tree_code truthop_code = TREE_CODE (op);
5517 tree lhs = TREE_OPERAND (op, 0);
5518 tree rhs = TREE_OPERAND (op, 1);
5519 tree orig_lhs = lhs, orig_rhs = rhs;
5520 enum tree_code rhs_code = TREE_CODE (rhs);
5521 enum tree_code lhs_code = TREE_CODE (lhs);
5522 enum tree_code inv_code;
5524 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5525 return NULL_TREE;
5527 if (TREE_CODE_CLASS (code) != tcc_comparison)
5528 return NULL_TREE;
5530 if (rhs_code == truthop_code)
5532 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5533 if (newrhs != NULL_TREE)
5535 rhs = newrhs;
5536 rhs_code = TREE_CODE (rhs);
5539 if (lhs_code == truthop_code && !rhs_only)
5541 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5542 if (newlhs != NULL_TREE)
5544 lhs = newlhs;
5545 lhs_code = TREE_CODE (lhs);
5549 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5550 if (inv_code == rhs_code
5551 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5552 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5553 return lhs;
5554 if (!rhs_only && inv_code == lhs_code
5555 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5556 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5557 return rhs;
5558 if (rhs != orig_rhs || lhs != orig_lhs)
5559 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5560 lhs, rhs);
5561 return NULL_TREE;
5564 /* Find ways of folding logical expressions of LHS and RHS:
5565 Try to merge two comparisons to the same innermost item.
5566 Look for range tests like "ch >= '0' && ch <= '9'".
5567 Look for combinations of simple terms on machines with expensive branches
5568 and evaluate the RHS unconditionally.
5570 For example, if we have p->a == 2 && p->b == 4 and we can make an
5571 object large enough to span both A and B, we can do this with a comparison
5572 against the object ANDed with the a mask.
5574 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5575 operations to do this with one comparison.
5577 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5578 function and the one above.
5580 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5581 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5583 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5584 two operands.
5586 We return the simplified tree or 0 if no optimization is possible. */
5588 static tree
5589 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5590 tree lhs, tree rhs)
5592 /* If this is the "or" of two comparisons, we can do something if
5593 the comparisons are NE_EXPR. If this is the "and", we can do something
5594 if the comparisons are EQ_EXPR. I.e.,
5595 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5597 WANTED_CODE is this operation code. For single bit fields, we can
5598 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5599 comparison for one-bit fields. */
5601 enum tree_code wanted_code;
5602 enum tree_code lcode, rcode;
5603 tree ll_arg, lr_arg, rl_arg, rr_arg;
5604 tree ll_inner, lr_inner, rl_inner, rr_inner;
5605 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5606 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5607 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5608 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5609 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5610 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5611 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5612 scalar_int_mode lnmode, rnmode;
5613 tree ll_mask, lr_mask, rl_mask, rr_mask;
5614 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5615 tree l_const, r_const;
5616 tree lntype, rntype, result;
5617 HOST_WIDE_INT first_bit, end_bit;
5618 int volatilep;
5620 /* Start by getting the comparison codes. Fail if anything is volatile.
5621 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5622 it were surrounded with a NE_EXPR. */
5624 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5625 return 0;
5627 lcode = TREE_CODE (lhs);
5628 rcode = TREE_CODE (rhs);
5630 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5632 lhs = build2 (NE_EXPR, truth_type, lhs,
5633 build_int_cst (TREE_TYPE (lhs), 0));
5634 lcode = NE_EXPR;
5637 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5639 rhs = build2 (NE_EXPR, truth_type, rhs,
5640 build_int_cst (TREE_TYPE (rhs), 0));
5641 rcode = NE_EXPR;
5644 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5645 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5646 return 0;
5648 ll_arg = TREE_OPERAND (lhs, 0);
5649 lr_arg = TREE_OPERAND (lhs, 1);
5650 rl_arg = TREE_OPERAND (rhs, 0);
5651 rr_arg = TREE_OPERAND (rhs, 1);
5653 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5654 if (simple_operand_p (ll_arg)
5655 && simple_operand_p (lr_arg))
5657 if (operand_equal_p (ll_arg, rl_arg, 0)
5658 && operand_equal_p (lr_arg, rr_arg, 0))
5660 result = combine_comparisons (loc, code, lcode, rcode,
5661 truth_type, ll_arg, lr_arg);
5662 if (result)
5663 return result;
5665 else if (operand_equal_p (ll_arg, rr_arg, 0)
5666 && operand_equal_p (lr_arg, rl_arg, 0))
5668 result = combine_comparisons (loc, code, lcode,
5669 swap_tree_comparison (rcode),
5670 truth_type, ll_arg, lr_arg);
5671 if (result)
5672 return result;
5676 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5677 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5679 /* If the RHS can be evaluated unconditionally and its operands are
5680 simple, it wins to evaluate the RHS unconditionally on machines
5681 with expensive branches. In this case, this isn't a comparison
5682 that can be merged. */
5684 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5685 false) >= 2
5686 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5687 && simple_operand_p (rl_arg)
5688 && simple_operand_p (rr_arg))
5690 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5691 if (code == TRUTH_OR_EXPR
5692 && lcode == NE_EXPR && integer_zerop (lr_arg)
5693 && rcode == NE_EXPR && integer_zerop (rr_arg)
5694 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5695 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5696 return build2_loc (loc, NE_EXPR, truth_type,
5697 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5698 ll_arg, rl_arg),
5699 build_int_cst (TREE_TYPE (ll_arg), 0));
5701 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5702 if (code == TRUTH_AND_EXPR
5703 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5704 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5705 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5706 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5707 return build2_loc (loc, EQ_EXPR, truth_type,
5708 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5709 ll_arg, rl_arg),
5710 build_int_cst (TREE_TYPE (ll_arg), 0));
5713 /* See if the comparisons can be merged. Then get all the parameters for
5714 each side. */
5716 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5717 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5718 return 0;
5720 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5721 volatilep = 0;
5722 ll_inner = decode_field_reference (loc, &ll_arg,
5723 &ll_bitsize, &ll_bitpos, &ll_mode,
5724 &ll_unsignedp, &ll_reversep, &volatilep,
5725 &ll_mask, &ll_and_mask);
5726 lr_inner = decode_field_reference (loc, &lr_arg,
5727 &lr_bitsize, &lr_bitpos, &lr_mode,
5728 &lr_unsignedp, &lr_reversep, &volatilep,
5729 &lr_mask, &lr_and_mask);
5730 rl_inner = decode_field_reference (loc, &rl_arg,
5731 &rl_bitsize, &rl_bitpos, &rl_mode,
5732 &rl_unsignedp, &rl_reversep, &volatilep,
5733 &rl_mask, &rl_and_mask);
5734 rr_inner = decode_field_reference (loc, &rr_arg,
5735 &rr_bitsize, &rr_bitpos, &rr_mode,
5736 &rr_unsignedp, &rr_reversep, &volatilep,
5737 &rr_mask, &rr_and_mask);
5739 /* It must be true that the inner operation on the lhs of each
5740 comparison must be the same if we are to be able to do anything.
5741 Then see if we have constants. If not, the same must be true for
5742 the rhs's. */
5743 if (volatilep
5744 || ll_reversep != rl_reversep
5745 || ll_inner == 0 || rl_inner == 0
5746 || ! operand_equal_p (ll_inner, rl_inner, 0))
5747 return 0;
5749 if (TREE_CODE (lr_arg) == INTEGER_CST
5750 && TREE_CODE (rr_arg) == INTEGER_CST)
5752 l_const = lr_arg, r_const = rr_arg;
5753 lr_reversep = ll_reversep;
5755 else if (lr_reversep != rr_reversep
5756 || lr_inner == 0 || rr_inner == 0
5757 || ! operand_equal_p (lr_inner, rr_inner, 0))
5758 return 0;
5759 else
5760 l_const = r_const = 0;
5762 /* If either comparison code is not correct for our logical operation,
5763 fail. However, we can convert a one-bit comparison against zero into
5764 the opposite comparison against that bit being set in the field. */
5766 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5767 if (lcode != wanted_code)
5769 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5771 /* Make the left operand unsigned, since we are only interested
5772 in the value of one bit. Otherwise we are doing the wrong
5773 thing below. */
5774 ll_unsignedp = 1;
5775 l_const = ll_mask;
5777 else
5778 return 0;
5781 /* This is analogous to the code for l_const above. */
5782 if (rcode != wanted_code)
5784 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5786 rl_unsignedp = 1;
5787 r_const = rl_mask;
5789 else
5790 return 0;
5793 /* See if we can find a mode that contains both fields being compared on
5794 the left. If we can't, fail. Otherwise, update all constants and masks
5795 to be relative to a field of that size. */
5796 first_bit = MIN (ll_bitpos, rl_bitpos);
5797 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5798 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5799 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5800 volatilep, &lnmode))
5801 return 0;
5803 lnbitsize = GET_MODE_BITSIZE (lnmode);
5804 lnbitpos = first_bit & ~ (lnbitsize - 1);
5805 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5806 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5808 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5810 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5811 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5814 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5815 size_int (xll_bitpos));
5816 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5817 size_int (xrl_bitpos));
5819 if (l_const)
5821 l_const = fold_convert_loc (loc, lntype, l_const);
5822 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5823 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5824 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5825 fold_build1_loc (loc, BIT_NOT_EXPR,
5826 lntype, ll_mask))))
5828 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5830 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5833 if (r_const)
5835 r_const = fold_convert_loc (loc, lntype, r_const);
5836 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5837 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5838 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5839 fold_build1_loc (loc, BIT_NOT_EXPR,
5840 lntype, rl_mask))))
5842 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5844 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5848 /* If the right sides are not constant, do the same for it. Also,
5849 disallow this optimization if a size or signedness mismatch occurs
5850 between the left and right sides. */
5851 if (l_const == 0)
5853 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5854 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5855 /* Make sure the two fields on the right
5856 correspond to the left without being swapped. */
5857 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5858 return 0;
5860 first_bit = MIN (lr_bitpos, rr_bitpos);
5861 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5862 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5863 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5864 volatilep, &rnmode))
5865 return 0;
5867 rnbitsize = GET_MODE_BITSIZE (rnmode);
5868 rnbitpos = first_bit & ~ (rnbitsize - 1);
5869 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5870 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5872 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5874 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5875 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5878 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5879 rntype, lr_mask),
5880 size_int (xlr_bitpos));
5881 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5882 rntype, rr_mask),
5883 size_int (xrr_bitpos));
5885 /* Make a mask that corresponds to both fields being compared.
5886 Do this for both items being compared. If the operands are the
5887 same size and the bits being compared are in the same position
5888 then we can do this by masking both and comparing the masked
5889 results. */
5890 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5891 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5892 if (lnbitsize == rnbitsize
5893 && xll_bitpos == xlr_bitpos
5894 && lnbitpos >= 0
5895 && rnbitpos >= 0)
5897 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5898 lntype, lnbitsize, lnbitpos,
5899 ll_unsignedp || rl_unsignedp, ll_reversep);
5900 if (! all_ones_mask_p (ll_mask, lnbitsize))
5901 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5903 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5904 rntype, rnbitsize, rnbitpos,
5905 lr_unsignedp || rr_unsignedp, lr_reversep);
5906 if (! all_ones_mask_p (lr_mask, rnbitsize))
5907 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5909 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5912 /* There is still another way we can do something: If both pairs of
5913 fields being compared are adjacent, we may be able to make a wider
5914 field containing them both.
5916 Note that we still must mask the lhs/rhs expressions. Furthermore,
5917 the mask must be shifted to account for the shift done by
5918 make_bit_field_ref. */
5919 if (((ll_bitsize + ll_bitpos == rl_bitpos
5920 && lr_bitsize + lr_bitpos == rr_bitpos)
5921 || (ll_bitpos == rl_bitpos + rl_bitsize
5922 && lr_bitpos == rr_bitpos + rr_bitsize))
5923 && ll_bitpos >= 0
5924 && rl_bitpos >= 0
5925 && lr_bitpos >= 0
5926 && rr_bitpos >= 0)
5928 tree type;
5930 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5931 ll_bitsize + rl_bitsize,
5932 MIN (ll_bitpos, rl_bitpos),
5933 ll_unsignedp, ll_reversep);
5934 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5935 lr_bitsize + rr_bitsize,
5936 MIN (lr_bitpos, rr_bitpos),
5937 lr_unsignedp, lr_reversep);
5939 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5940 size_int (MIN (xll_bitpos, xrl_bitpos)));
5941 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5942 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5944 /* Convert to the smaller type before masking out unwanted bits. */
5945 type = lntype;
5946 if (lntype != rntype)
5948 if (lnbitsize > rnbitsize)
5950 lhs = fold_convert_loc (loc, rntype, lhs);
5951 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5952 type = rntype;
5954 else if (lnbitsize < rnbitsize)
5956 rhs = fold_convert_loc (loc, lntype, rhs);
5957 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5958 type = lntype;
5962 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5963 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5965 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5966 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5968 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5971 return 0;
5974 /* Handle the case of comparisons with constants. If there is something in
5975 common between the masks, those bits of the constants must be the same.
5976 If not, the condition is always false. Test for this to avoid generating
5977 incorrect code below. */
5978 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5979 if (! integer_zerop (result)
5980 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5981 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5983 if (wanted_code == NE_EXPR)
5985 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5986 return constant_boolean_node (true, truth_type);
5988 else
5990 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5991 return constant_boolean_node (false, truth_type);
5995 if (lnbitpos < 0)
5996 return 0;
5998 /* Construct the expression we will return. First get the component
5999 reference we will make. Unless the mask is all ones the width of
6000 that field, perform the mask operation. Then compare with the
6001 merged constant. */
6002 result = make_bit_field_ref (loc, ll_inner, ll_arg,
6003 lntype, lnbitsize, lnbitpos,
6004 ll_unsignedp || rl_unsignedp, ll_reversep);
6006 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6007 if (! all_ones_mask_p (ll_mask, lnbitsize))
6008 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6010 return build2_loc (loc, wanted_code, truth_type, result,
6011 const_binop (BIT_IOR_EXPR, l_const, r_const));
6014 /* T is an integer expression that is being multiplied, divided, or taken a
6015 modulus (CODE says which and what kind of divide or modulus) by a
6016 constant C. See if we can eliminate that operation by folding it with
6017 other operations already in T. WIDE_TYPE, if non-null, is a type that
6018 should be used for the computation if wider than our type.
6020 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6021 (X * 2) + (Y * 4). We must, however, be assured that either the original
6022 expression would not overflow or that overflow is undefined for the type
6023 in the language in question.
6025 If we return a non-null expression, it is an equivalent form of the
6026 original computation, but need not be in the original type.
6028 We set *STRICT_OVERFLOW_P to true if the return values depends on
6029 signed overflow being undefined. Otherwise we do not change
6030 *STRICT_OVERFLOW_P. */
6032 static tree
6033 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6034 bool *strict_overflow_p)
6036 /* To avoid exponential search depth, refuse to allow recursion past
6037 three levels. Beyond that (1) it's highly unlikely that we'll find
6038 something interesting and (2) we've probably processed it before
6039 when we built the inner expression. */
6041 static int depth;
6042 tree ret;
6044 if (depth > 3)
6045 return NULL;
6047 depth++;
6048 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6049 depth--;
6051 return ret;
6054 static tree
6055 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6056 bool *strict_overflow_p)
6058 tree type = TREE_TYPE (t);
6059 enum tree_code tcode = TREE_CODE (t);
6060 tree ctype = (wide_type != 0
6061 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6062 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6063 ? wide_type : type);
6064 tree t1, t2;
6065 int same_p = tcode == code;
6066 tree op0 = NULL_TREE, op1 = NULL_TREE;
6067 bool sub_strict_overflow_p;
6069 /* Don't deal with constants of zero here; they confuse the code below. */
6070 if (integer_zerop (c))
6071 return NULL_TREE;
6073 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6074 op0 = TREE_OPERAND (t, 0);
6076 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6077 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6079 /* Note that we need not handle conditional operations here since fold
6080 already handles those cases. So just do arithmetic here. */
6081 switch (tcode)
6083 case INTEGER_CST:
6084 /* For a constant, we can always simplify if we are a multiply
6085 or (for divide and modulus) if it is a multiple of our constant. */
6086 if (code == MULT_EXPR
6087 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6088 TYPE_SIGN (type)))
6090 tree tem = const_binop (code, fold_convert (ctype, t),
6091 fold_convert (ctype, c));
6092 /* If the multiplication overflowed, we lost information on it.
6093 See PR68142 and PR69845. */
6094 if (TREE_OVERFLOW (tem))
6095 return NULL_TREE;
6096 return tem;
6098 break;
6100 CASE_CONVERT: case NON_LVALUE_EXPR:
6101 /* If op0 is an expression ... */
6102 if ((COMPARISON_CLASS_P (op0)
6103 || UNARY_CLASS_P (op0)
6104 || BINARY_CLASS_P (op0)
6105 || VL_EXP_CLASS_P (op0)
6106 || EXPRESSION_CLASS_P (op0))
6107 /* ... and has wrapping overflow, and its type is smaller
6108 than ctype, then we cannot pass through as widening. */
6109 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6110 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6111 && (TYPE_PRECISION (ctype)
6112 > TYPE_PRECISION (TREE_TYPE (op0))))
6113 /* ... or this is a truncation (t is narrower than op0),
6114 then we cannot pass through this narrowing. */
6115 || (TYPE_PRECISION (type)
6116 < TYPE_PRECISION (TREE_TYPE (op0)))
6117 /* ... or signedness changes for division or modulus,
6118 then we cannot pass through this conversion. */
6119 || (code != MULT_EXPR
6120 && (TYPE_UNSIGNED (ctype)
6121 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6122 /* ... or has undefined overflow while the converted to
6123 type has not, we cannot do the operation in the inner type
6124 as that would introduce undefined overflow. */
6125 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6126 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6127 && !TYPE_OVERFLOW_UNDEFINED (type))))
6128 break;
6130 /* Pass the constant down and see if we can make a simplification. If
6131 we can, replace this expression with the inner simplification for
6132 possible later conversion to our or some other type. */
6133 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6134 && TREE_CODE (t2) == INTEGER_CST
6135 && !TREE_OVERFLOW (t2)
6136 && (0 != (t1 = extract_muldiv (op0, t2, code,
6137 code == MULT_EXPR
6138 ? ctype : NULL_TREE,
6139 strict_overflow_p))))
6140 return t1;
6141 break;
6143 case ABS_EXPR:
6144 /* If widening the type changes it from signed to unsigned, then we
6145 must avoid building ABS_EXPR itself as unsigned. */
6146 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6148 tree cstype = (*signed_type_for) (ctype);
6149 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6150 != 0)
6152 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6153 return fold_convert (ctype, t1);
6155 break;
6157 /* If the constant is negative, we cannot simplify this. */
6158 if (tree_int_cst_sgn (c) == -1)
6159 break;
6160 /* FALLTHROUGH */
6161 case NEGATE_EXPR:
6162 /* For division and modulus, type can't be unsigned, as e.g.
6163 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6164 For signed types, even with wrapping overflow, this is fine. */
6165 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6166 break;
6167 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6168 != 0)
6169 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6170 break;
6172 case MIN_EXPR: case MAX_EXPR:
6173 /* If widening the type changes the signedness, then we can't perform
6174 this optimization as that changes the result. */
6175 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6176 break;
6178 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6179 sub_strict_overflow_p = false;
6180 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6181 &sub_strict_overflow_p)) != 0
6182 && (t2 = extract_muldiv (op1, c, code, wide_type,
6183 &sub_strict_overflow_p)) != 0)
6185 if (tree_int_cst_sgn (c) < 0)
6186 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6187 if (sub_strict_overflow_p)
6188 *strict_overflow_p = true;
6189 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6190 fold_convert (ctype, t2));
6192 break;
6194 case LSHIFT_EXPR: case RSHIFT_EXPR:
6195 /* If the second operand is constant, this is a multiplication
6196 or floor division, by a power of two, so we can treat it that
6197 way unless the multiplier or divisor overflows. Signed
6198 left-shift overflow is implementation-defined rather than
6199 undefined in C90, so do not convert signed left shift into
6200 multiplication. */
6201 if (TREE_CODE (op1) == INTEGER_CST
6202 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6203 /* const_binop may not detect overflow correctly,
6204 so check for it explicitly here. */
6205 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6206 wi::to_wide (op1))
6207 && 0 != (t1 = fold_convert (ctype,
6208 const_binop (LSHIFT_EXPR,
6209 size_one_node,
6210 op1)))
6211 && !TREE_OVERFLOW (t1))
6212 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6213 ? MULT_EXPR : FLOOR_DIV_EXPR,
6214 ctype,
6215 fold_convert (ctype, op0),
6216 t1),
6217 c, code, wide_type, strict_overflow_p);
6218 break;
6220 case PLUS_EXPR: case MINUS_EXPR:
6221 /* See if we can eliminate the operation on both sides. If we can, we
6222 can return a new PLUS or MINUS. If we can't, the only remaining
6223 cases where we can do anything are if the second operand is a
6224 constant. */
6225 sub_strict_overflow_p = false;
6226 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6227 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6228 if (t1 != 0 && t2 != 0
6229 && TYPE_OVERFLOW_WRAPS (ctype)
6230 && (code == MULT_EXPR
6231 /* If not multiplication, we can only do this if both operands
6232 are divisible by c. */
6233 || (multiple_of_p (ctype, op0, c)
6234 && multiple_of_p (ctype, op1, c))))
6236 if (sub_strict_overflow_p)
6237 *strict_overflow_p = true;
6238 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6239 fold_convert (ctype, t2));
6242 /* If this was a subtraction, negate OP1 and set it to be an addition.
6243 This simplifies the logic below. */
6244 if (tcode == MINUS_EXPR)
6246 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6247 /* If OP1 was not easily negatable, the constant may be OP0. */
6248 if (TREE_CODE (op0) == INTEGER_CST)
6250 std::swap (op0, op1);
6251 std::swap (t1, t2);
6255 if (TREE_CODE (op1) != INTEGER_CST)
6256 break;
6258 /* If either OP1 or C are negative, this optimization is not safe for
6259 some of the division and remainder types while for others we need
6260 to change the code. */
6261 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6263 if (code == CEIL_DIV_EXPR)
6264 code = FLOOR_DIV_EXPR;
6265 else if (code == FLOOR_DIV_EXPR)
6266 code = CEIL_DIV_EXPR;
6267 else if (code != MULT_EXPR
6268 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6269 break;
6272 /* If it's a multiply or a division/modulus operation of a multiple
6273 of our constant, do the operation and verify it doesn't overflow. */
6274 if (code == MULT_EXPR
6275 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6276 TYPE_SIGN (type)))
6278 op1 = const_binop (code, fold_convert (ctype, op1),
6279 fold_convert (ctype, c));
6280 /* We allow the constant to overflow with wrapping semantics. */
6281 if (op1 == 0
6282 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6283 break;
6285 else
6286 break;
6288 /* If we have an unsigned type, we cannot widen the operation since it
6289 will change the result if the original computation overflowed. */
6290 if (TYPE_UNSIGNED (ctype) && ctype != type)
6291 break;
6293 /* The last case is if we are a multiply. In that case, we can
6294 apply the distributive law to commute the multiply and addition
6295 if the multiplication of the constants doesn't overflow
6296 and overflow is defined. With undefined overflow
6297 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6298 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6299 return fold_build2 (tcode, ctype,
6300 fold_build2 (code, ctype,
6301 fold_convert (ctype, op0),
6302 fold_convert (ctype, c)),
6303 op1);
6305 break;
6307 case MULT_EXPR:
6308 /* We have a special case here if we are doing something like
6309 (C * 8) % 4 since we know that's zero. */
6310 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6311 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6312 /* If the multiplication can overflow we cannot optimize this. */
6313 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6314 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6315 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6316 TYPE_SIGN (type)))
6318 *strict_overflow_p = true;
6319 return omit_one_operand (type, integer_zero_node, op0);
6322 /* ... fall through ... */
6324 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6325 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6326 /* If we can extract our operation from the LHS, do so and return a
6327 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6328 do something only if the second operand is a constant. */
6329 if (same_p
6330 && TYPE_OVERFLOW_WRAPS (ctype)
6331 && (t1 = extract_muldiv (op0, c, code, wide_type,
6332 strict_overflow_p)) != 0)
6333 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6334 fold_convert (ctype, op1));
6335 else if (tcode == MULT_EXPR && code == MULT_EXPR
6336 && TYPE_OVERFLOW_WRAPS (ctype)
6337 && (t1 = extract_muldiv (op1, c, code, wide_type,
6338 strict_overflow_p)) != 0)
6339 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6340 fold_convert (ctype, t1));
6341 else if (TREE_CODE (op1) != INTEGER_CST)
6342 return 0;
6344 /* If these are the same operation types, we can associate them
6345 assuming no overflow. */
6346 if (tcode == code)
6348 bool overflow_p = false;
6349 bool overflow_mul_p;
6350 signop sign = TYPE_SIGN (ctype);
6351 unsigned prec = TYPE_PRECISION (ctype);
6352 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6353 wi::to_wide (c, prec),
6354 sign, &overflow_mul_p);
6355 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6356 if (overflow_mul_p
6357 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6358 overflow_p = true;
6359 if (!overflow_p)
6360 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6361 wide_int_to_tree (ctype, mul));
6364 /* If these operations "cancel" each other, we have the main
6365 optimizations of this pass, which occur when either constant is a
6366 multiple of the other, in which case we replace this with either an
6367 operation or CODE or TCODE.
6369 If we have an unsigned type, we cannot do this since it will change
6370 the result if the original computation overflowed. */
6371 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6372 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6373 || (tcode == MULT_EXPR
6374 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6375 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6376 && code != MULT_EXPR)))
6378 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6379 TYPE_SIGN (type)))
6381 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6382 *strict_overflow_p = true;
6383 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6384 fold_convert (ctype,
6385 const_binop (TRUNC_DIV_EXPR,
6386 op1, c)));
6388 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6389 TYPE_SIGN (type)))
6391 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6392 *strict_overflow_p = true;
6393 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6394 fold_convert (ctype,
6395 const_binop (TRUNC_DIV_EXPR,
6396 c, op1)));
6399 break;
6401 default:
6402 break;
6405 return 0;
6408 /* Return a node which has the indicated constant VALUE (either 0 or
6409 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6410 and is of the indicated TYPE. */
6412 tree
6413 constant_boolean_node (bool value, tree type)
6415 if (type == integer_type_node)
6416 return value ? integer_one_node : integer_zero_node;
6417 else if (type == boolean_type_node)
6418 return value ? boolean_true_node : boolean_false_node;
6419 else if (TREE_CODE (type) == VECTOR_TYPE)
6420 return build_vector_from_val (type,
6421 build_int_cst (TREE_TYPE (type),
6422 value ? -1 : 0));
6423 else
6424 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6428 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6429 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6430 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6431 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6432 COND is the first argument to CODE; otherwise (as in the example
6433 given here), it is the second argument. TYPE is the type of the
6434 original expression. Return NULL_TREE if no simplification is
6435 possible. */
6437 static tree
6438 fold_binary_op_with_conditional_arg (location_t loc,
6439 enum tree_code code,
6440 tree type, tree op0, tree op1,
6441 tree cond, tree arg, int cond_first_p)
6443 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6444 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6445 tree test, true_value, false_value;
6446 tree lhs = NULL_TREE;
6447 tree rhs = NULL_TREE;
6448 enum tree_code cond_code = COND_EXPR;
6450 if (TREE_CODE (cond) == COND_EXPR
6451 || TREE_CODE (cond) == VEC_COND_EXPR)
6453 test = TREE_OPERAND (cond, 0);
6454 true_value = TREE_OPERAND (cond, 1);
6455 false_value = TREE_OPERAND (cond, 2);
6456 /* If this operand throws an expression, then it does not make
6457 sense to try to perform a logical or arithmetic operation
6458 involving it. */
6459 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6460 lhs = true_value;
6461 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6462 rhs = false_value;
6464 else if (!(TREE_CODE (type) != VECTOR_TYPE
6465 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6467 tree testtype = TREE_TYPE (cond);
6468 test = cond;
6469 true_value = constant_boolean_node (true, testtype);
6470 false_value = constant_boolean_node (false, testtype);
6472 else
6473 /* Detect the case of mixing vector and scalar types - bail out. */
6474 return NULL_TREE;
6476 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6477 cond_code = VEC_COND_EXPR;
6479 /* This transformation is only worthwhile if we don't have to wrap ARG
6480 in a SAVE_EXPR and the operation can be simplified without recursing
6481 on at least one of the branches once its pushed inside the COND_EXPR. */
6482 if (!TREE_CONSTANT (arg)
6483 && (TREE_SIDE_EFFECTS (arg)
6484 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6485 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6486 return NULL_TREE;
6488 arg = fold_convert_loc (loc, arg_type, arg);
6489 if (lhs == 0)
6491 true_value = fold_convert_loc (loc, cond_type, true_value);
6492 if (cond_first_p)
6493 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6494 else
6495 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6497 if (rhs == 0)
6499 false_value = fold_convert_loc (loc, cond_type, false_value);
6500 if (cond_first_p)
6501 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6502 else
6503 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6506 /* Check that we have simplified at least one of the branches. */
6507 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6508 return NULL_TREE;
6510 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6514 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6516 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6517 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6518 ADDEND is the same as X.
6520 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6521 and finite. The problematic cases are when X is zero, and its mode
6522 has signed zeros. In the case of rounding towards -infinity,
6523 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6524 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6526 bool
6527 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6529 if (!real_zerop (addend))
6530 return false;
6532 /* Don't allow the fold with -fsignaling-nans. */
6533 if (HONOR_SNANS (element_mode (type)))
6534 return false;
6536 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6537 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6538 return true;
6540 /* In a vector or complex, we would need to check the sign of all zeros. */
6541 if (TREE_CODE (addend) != REAL_CST)
6542 return false;
6544 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6545 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6546 negate = !negate;
6548 /* The mode has signed zeros, and we have to honor their sign.
6549 In this situation, there is only one case we can return true for.
6550 X - 0 is the same as X unless rounding towards -infinity is
6551 supported. */
6552 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6555 /* Subroutine of match.pd that optimizes comparisons of a division by
6556 a nonzero integer constant against an integer constant, i.e.
6557 X/C1 op C2.
6559 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6560 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6562 enum tree_code
6563 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6564 tree *hi, bool *neg_overflow)
6566 tree prod, tmp, type = TREE_TYPE (c1);
6567 signop sign = TYPE_SIGN (type);
6568 bool overflow;
6570 /* We have to do this the hard way to detect unsigned overflow.
6571 prod = int_const_binop (MULT_EXPR, c1, c2); */
6572 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6573 prod = force_fit_type (type, val, -1, overflow);
6574 *neg_overflow = false;
6576 if (sign == UNSIGNED)
6578 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6579 *lo = prod;
6581 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6582 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6583 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6585 else if (tree_int_cst_sgn (c1) >= 0)
6587 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6588 switch (tree_int_cst_sgn (c2))
6590 case -1:
6591 *neg_overflow = true;
6592 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6593 *hi = prod;
6594 break;
6596 case 0:
6597 *lo = fold_negate_const (tmp, type);
6598 *hi = tmp;
6599 break;
6601 case 1:
6602 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6603 *lo = prod;
6604 break;
6606 default:
6607 gcc_unreachable ();
6610 else
6612 /* A negative divisor reverses the relational operators. */
6613 code = swap_tree_comparison (code);
6615 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6616 switch (tree_int_cst_sgn (c2))
6618 case -1:
6619 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6620 *lo = prod;
6621 break;
6623 case 0:
6624 *hi = fold_negate_const (tmp, type);
6625 *lo = tmp;
6626 break;
6628 case 1:
6629 *neg_overflow = true;
6630 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6631 *hi = prod;
6632 break;
6634 default:
6635 gcc_unreachable ();
6639 if (code != EQ_EXPR && code != NE_EXPR)
6640 return code;
6642 if (TREE_OVERFLOW (*lo)
6643 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6644 *lo = NULL_TREE;
6645 if (TREE_OVERFLOW (*hi)
6646 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6647 *hi = NULL_TREE;
6649 return code;
6653 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6654 equality/inequality test, then return a simplified form of the test
6655 using a sign testing. Otherwise return NULL. TYPE is the desired
6656 result type. */
6658 static tree
6659 fold_single_bit_test_into_sign_test (location_t loc,
6660 enum tree_code code, tree arg0, tree arg1,
6661 tree result_type)
6663 /* If this is testing a single bit, we can optimize the test. */
6664 if ((code == NE_EXPR || code == EQ_EXPR)
6665 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6666 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6668 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6669 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6670 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6672 if (arg00 != NULL_TREE
6673 /* This is only a win if casting to a signed type is cheap,
6674 i.e. when arg00's type is not a partial mode. */
6675 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6677 tree stype = signed_type_for (TREE_TYPE (arg00));
6678 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6679 result_type,
6680 fold_convert_loc (loc, stype, arg00),
6681 build_int_cst (stype, 0));
6685 return NULL_TREE;
6688 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6689 equality/inequality test, then return a simplified form of
6690 the test using shifts and logical operations. Otherwise return
6691 NULL. TYPE is the desired result type. */
6693 tree
6694 fold_single_bit_test (location_t loc, enum tree_code code,
6695 tree arg0, tree arg1, tree result_type)
6697 /* If this is testing a single bit, we can optimize the test. */
6698 if ((code == NE_EXPR || code == EQ_EXPR)
6699 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6700 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6702 tree inner = TREE_OPERAND (arg0, 0);
6703 tree type = TREE_TYPE (arg0);
6704 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6705 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6706 int ops_unsigned;
6707 tree signed_type, unsigned_type, intermediate_type;
6708 tree tem, one;
6710 /* First, see if we can fold the single bit test into a sign-bit
6711 test. */
6712 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6713 result_type);
6714 if (tem)
6715 return tem;
6717 /* Otherwise we have (A & C) != 0 where C is a single bit,
6718 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6719 Similarly for (A & C) == 0. */
6721 /* If INNER is a right shift of a constant and it plus BITNUM does
6722 not overflow, adjust BITNUM and INNER. */
6723 if (TREE_CODE (inner) == RSHIFT_EXPR
6724 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6725 && bitnum < TYPE_PRECISION (type)
6726 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6727 TYPE_PRECISION (type) - bitnum))
6729 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6730 inner = TREE_OPERAND (inner, 0);
6733 /* If we are going to be able to omit the AND below, we must do our
6734 operations as unsigned. If we must use the AND, we have a choice.
6735 Normally unsigned is faster, but for some machines signed is. */
6736 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6737 && !flag_syntax_only) ? 0 : 1;
6739 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6740 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6741 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6742 inner = fold_convert_loc (loc, intermediate_type, inner);
6744 if (bitnum != 0)
6745 inner = build2 (RSHIFT_EXPR, intermediate_type,
6746 inner, size_int (bitnum));
6748 one = build_int_cst (intermediate_type, 1);
6750 if (code == EQ_EXPR)
6751 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6753 /* Put the AND last so it can combine with more things. */
6754 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6756 /* Make sure to return the proper type. */
6757 inner = fold_convert_loc (loc, result_type, inner);
6759 return inner;
6761 return NULL_TREE;
6764 /* Test whether it is preferable two swap two operands, ARG0 and
6765 ARG1, for example because ARG0 is an integer constant and ARG1
6766 isn't. */
6768 bool
6769 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6771 if (CONSTANT_CLASS_P (arg1))
6772 return 0;
6773 if (CONSTANT_CLASS_P (arg0))
6774 return 1;
6776 STRIP_NOPS (arg0);
6777 STRIP_NOPS (arg1);
6779 if (TREE_CONSTANT (arg1))
6780 return 0;
6781 if (TREE_CONSTANT (arg0))
6782 return 1;
6784 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6785 for commutative and comparison operators. Ensuring a canonical
6786 form allows the optimizers to find additional redundancies without
6787 having to explicitly check for both orderings. */
6788 if (TREE_CODE (arg0) == SSA_NAME
6789 && TREE_CODE (arg1) == SSA_NAME
6790 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6791 return 1;
6793 /* Put SSA_NAMEs last. */
6794 if (TREE_CODE (arg1) == SSA_NAME)
6795 return 0;
6796 if (TREE_CODE (arg0) == SSA_NAME)
6797 return 1;
6799 /* Put variables last. */
6800 if (DECL_P (arg1))
6801 return 0;
6802 if (DECL_P (arg0))
6803 return 1;
6805 return 0;
6809 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6810 means A >= Y && A != MAX, but in this case we know that
6811 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6813 static tree
6814 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6816 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6818 if (TREE_CODE (bound) == LT_EXPR)
6819 a = TREE_OPERAND (bound, 0);
6820 else if (TREE_CODE (bound) == GT_EXPR)
6821 a = TREE_OPERAND (bound, 1);
6822 else
6823 return NULL_TREE;
6825 typea = TREE_TYPE (a);
6826 if (!INTEGRAL_TYPE_P (typea)
6827 && !POINTER_TYPE_P (typea))
6828 return NULL_TREE;
6830 if (TREE_CODE (ineq) == LT_EXPR)
6832 a1 = TREE_OPERAND (ineq, 1);
6833 y = TREE_OPERAND (ineq, 0);
6835 else if (TREE_CODE (ineq) == GT_EXPR)
6837 a1 = TREE_OPERAND (ineq, 0);
6838 y = TREE_OPERAND (ineq, 1);
6840 else
6841 return NULL_TREE;
6843 if (TREE_TYPE (a1) != typea)
6844 return NULL_TREE;
6846 if (POINTER_TYPE_P (typea))
6848 /* Convert the pointer types into integer before taking the difference. */
6849 tree ta = fold_convert_loc (loc, ssizetype, a);
6850 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6851 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6853 else
6854 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6856 if (!diff || !integer_onep (diff))
6857 return NULL_TREE;
6859 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6862 /* Fold a sum or difference of at least one multiplication.
6863 Returns the folded tree or NULL if no simplification could be made. */
6865 static tree
6866 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6867 tree arg0, tree arg1)
6869 tree arg00, arg01, arg10, arg11;
6870 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6872 /* (A * C) +- (B * C) -> (A+-B) * C.
6873 (A * C) +- A -> A * (C+-1).
6874 We are most concerned about the case where C is a constant,
6875 but other combinations show up during loop reduction. Since
6876 it is not difficult, try all four possibilities. */
6878 if (TREE_CODE (arg0) == MULT_EXPR)
6880 arg00 = TREE_OPERAND (arg0, 0);
6881 arg01 = TREE_OPERAND (arg0, 1);
6883 else if (TREE_CODE (arg0) == INTEGER_CST)
6885 arg00 = build_one_cst (type);
6886 arg01 = arg0;
6888 else
6890 /* We cannot generate constant 1 for fract. */
6891 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6892 return NULL_TREE;
6893 arg00 = arg0;
6894 arg01 = build_one_cst (type);
6896 if (TREE_CODE (arg1) == MULT_EXPR)
6898 arg10 = TREE_OPERAND (arg1, 0);
6899 arg11 = TREE_OPERAND (arg1, 1);
6901 else if (TREE_CODE (arg1) == INTEGER_CST)
6903 arg10 = build_one_cst (type);
6904 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6905 the purpose of this canonicalization. */
6906 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
6907 && negate_expr_p (arg1)
6908 && code == PLUS_EXPR)
6910 arg11 = negate_expr (arg1);
6911 code = MINUS_EXPR;
6913 else
6914 arg11 = arg1;
6916 else
6918 /* We cannot generate constant 1 for fract. */
6919 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6920 return NULL_TREE;
6921 arg10 = arg1;
6922 arg11 = build_one_cst (type);
6924 same = NULL_TREE;
6926 /* Prefer factoring a common non-constant. */
6927 if (operand_equal_p (arg00, arg10, 0))
6928 same = arg00, alt0 = arg01, alt1 = arg11;
6929 else if (operand_equal_p (arg01, arg11, 0))
6930 same = arg01, alt0 = arg00, alt1 = arg10;
6931 else if (operand_equal_p (arg00, arg11, 0))
6932 same = arg00, alt0 = arg01, alt1 = arg10;
6933 else if (operand_equal_p (arg01, arg10, 0))
6934 same = arg01, alt0 = arg00, alt1 = arg11;
6936 /* No identical multiplicands; see if we can find a common
6937 power-of-two factor in non-power-of-two multiplies. This
6938 can help in multi-dimensional array access. */
6939 else if (tree_fits_shwi_p (arg01)
6940 && tree_fits_shwi_p (arg11))
6942 HOST_WIDE_INT int01, int11, tmp;
6943 bool swap = false;
6944 tree maybe_same;
6945 int01 = tree_to_shwi (arg01);
6946 int11 = tree_to_shwi (arg11);
6948 /* Move min of absolute values to int11. */
6949 if (absu_hwi (int01) < absu_hwi (int11))
6951 tmp = int01, int01 = int11, int11 = tmp;
6952 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6953 maybe_same = arg01;
6954 swap = true;
6956 else
6957 maybe_same = arg11;
6959 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6960 /* The remainder should not be a constant, otherwise we
6961 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6962 increased the number of multiplications necessary. */
6963 && TREE_CODE (arg10) != INTEGER_CST)
6965 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6966 build_int_cst (TREE_TYPE (arg00),
6967 int01 / int11));
6968 alt1 = arg10;
6969 same = maybe_same;
6970 if (swap)
6971 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6975 if (!same)
6976 return NULL_TREE;
6978 if (! INTEGRAL_TYPE_P (type)
6979 || TYPE_OVERFLOW_WRAPS (type)
6980 /* We are neither factoring zero nor minus one. */
6981 || TREE_CODE (same) == INTEGER_CST)
6982 return fold_build2_loc (loc, MULT_EXPR, type,
6983 fold_build2_loc (loc, code, type,
6984 fold_convert_loc (loc, type, alt0),
6985 fold_convert_loc (loc, type, alt1)),
6986 fold_convert_loc (loc, type, same));
6988 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6989 same may be minus one and thus the multiplication may overflow. Perform
6990 the operations in an unsigned type. */
6991 tree utype = unsigned_type_for (type);
6992 tree tem = fold_build2_loc (loc, code, utype,
6993 fold_convert_loc (loc, utype, alt0),
6994 fold_convert_loc (loc, utype, alt1));
6995 /* If the sum evaluated to a constant that is not -INF the multiplication
6996 cannot overflow. */
6997 if (TREE_CODE (tem) == INTEGER_CST
6998 && (wi::to_wide (tem)
6999 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7000 return fold_build2_loc (loc, MULT_EXPR, type,
7001 fold_convert (type, tem), same);
7003 return fold_convert_loc (loc, type,
7004 fold_build2_loc (loc, MULT_EXPR, utype, tem,
7005 fold_convert_loc (loc, utype, same)));
7008 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7009 specified by EXPR into the buffer PTR of length LEN bytes.
7010 Return the number of bytes placed in the buffer, or zero
7011 upon failure. */
7013 static int
7014 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7016 tree type = TREE_TYPE (expr);
7017 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7018 int byte, offset, word, words;
7019 unsigned char value;
7021 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7022 return 0;
7023 if (off == -1)
7024 off = 0;
7026 if (ptr == NULL)
7027 /* Dry run. */
7028 return MIN (len, total_bytes - off);
7030 words = total_bytes / UNITS_PER_WORD;
7032 for (byte = 0; byte < total_bytes; byte++)
7034 int bitpos = byte * BITS_PER_UNIT;
7035 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7036 number of bytes. */
7037 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7039 if (total_bytes > UNITS_PER_WORD)
7041 word = byte / UNITS_PER_WORD;
7042 if (WORDS_BIG_ENDIAN)
7043 word = (words - 1) - word;
7044 offset = word * UNITS_PER_WORD;
7045 if (BYTES_BIG_ENDIAN)
7046 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7047 else
7048 offset += byte % UNITS_PER_WORD;
7050 else
7051 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7052 if (offset >= off && offset - off < len)
7053 ptr[offset - off] = value;
7055 return MIN (len, total_bytes - off);
7059 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7060 specified by EXPR into the buffer PTR of length LEN bytes.
7061 Return the number of bytes placed in the buffer, or zero
7062 upon failure. */
7064 static int
7065 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7067 tree type = TREE_TYPE (expr);
7068 scalar_mode mode = SCALAR_TYPE_MODE (type);
7069 int total_bytes = GET_MODE_SIZE (mode);
7070 FIXED_VALUE_TYPE value;
7071 tree i_value, i_type;
7073 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7074 return 0;
7076 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7078 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7079 return 0;
7081 value = TREE_FIXED_CST (expr);
7082 i_value = double_int_to_tree (i_type, value.data);
7084 return native_encode_int (i_value, ptr, len, off);
7088 /* Subroutine of native_encode_expr. Encode the REAL_CST
7089 specified by EXPR into the buffer PTR of length LEN bytes.
7090 Return the number of bytes placed in the buffer, or zero
7091 upon failure. */
7093 static int
7094 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7096 tree type = TREE_TYPE (expr);
7097 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7098 int byte, offset, word, words, bitpos;
7099 unsigned char value;
7101 /* There are always 32 bits in each long, no matter the size of
7102 the hosts long. We handle floating point representations with
7103 up to 192 bits. */
7104 long tmp[6];
7106 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7107 return 0;
7108 if (off == -1)
7109 off = 0;
7111 if (ptr == NULL)
7112 /* Dry run. */
7113 return MIN (len, total_bytes - off);
7115 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7117 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7119 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7120 bitpos += BITS_PER_UNIT)
7122 byte = (bitpos / BITS_PER_UNIT) & 3;
7123 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7125 if (UNITS_PER_WORD < 4)
7127 word = byte / UNITS_PER_WORD;
7128 if (WORDS_BIG_ENDIAN)
7129 word = (words - 1) - word;
7130 offset = word * UNITS_PER_WORD;
7131 if (BYTES_BIG_ENDIAN)
7132 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7133 else
7134 offset += byte % UNITS_PER_WORD;
7136 else
7138 offset = byte;
7139 if (BYTES_BIG_ENDIAN)
7141 /* Reverse bytes within each long, or within the entire float
7142 if it's smaller than a long (for HFmode). */
7143 offset = MIN (3, total_bytes - 1) - offset;
7144 gcc_assert (offset >= 0);
7147 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7148 if (offset >= off
7149 && offset - off < len)
7150 ptr[offset - off] = value;
7152 return MIN (len, total_bytes - off);
7155 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7156 specified by EXPR into the buffer PTR of length LEN bytes.
7157 Return the number of bytes placed in the buffer, or zero
7158 upon failure. */
7160 static int
7161 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7163 int rsize, isize;
7164 tree part;
7166 part = TREE_REALPART (expr);
7167 rsize = native_encode_expr (part, ptr, len, off);
7168 if (off == -1 && rsize == 0)
7169 return 0;
7170 part = TREE_IMAGPART (expr);
7171 if (off != -1)
7172 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7173 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7174 len - rsize, off);
7175 if (off == -1 && isize != rsize)
7176 return 0;
7177 return rsize + isize;
7181 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7182 specified by EXPR into the buffer PTR of length LEN bytes.
7183 Return the number of bytes placed in the buffer, or zero
7184 upon failure. */
7186 static int
7187 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7189 unsigned i, count;
7190 int size, offset;
7191 tree itype, elem;
7193 offset = 0;
7194 count = VECTOR_CST_NELTS (expr);
7195 itype = TREE_TYPE (TREE_TYPE (expr));
7196 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7197 for (i = 0; i < count; i++)
7199 if (off >= size)
7201 off -= size;
7202 continue;
7204 elem = VECTOR_CST_ELT (expr, i);
7205 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7206 len - offset, off);
7207 if ((off == -1 && res != size) || res == 0)
7208 return 0;
7209 offset += res;
7210 if (offset >= len)
7211 return offset;
7212 if (off != -1)
7213 off = 0;
7215 return offset;
7219 /* Subroutine of native_encode_expr. Encode the STRING_CST
7220 specified by EXPR into the buffer PTR of length LEN bytes.
7221 Return the number of bytes placed in the buffer, or zero
7222 upon failure. */
7224 static int
7225 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7227 tree type = TREE_TYPE (expr);
7229 /* Wide-char strings are encoded in target byte-order so native
7230 encoding them is trivial. */
7231 if (BITS_PER_UNIT != CHAR_BIT
7232 || TREE_CODE (type) != ARRAY_TYPE
7233 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7234 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7235 return 0;
7237 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7238 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7239 return 0;
7240 if (off == -1)
7241 off = 0;
7242 if (ptr == NULL)
7243 /* Dry run. */;
7244 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7246 int written = 0;
7247 if (off < TREE_STRING_LENGTH (expr))
7249 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7250 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7252 memset (ptr + written, 0,
7253 MIN (total_bytes - written, len - written));
7255 else
7256 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7257 return MIN (total_bytes - off, len);
7261 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7262 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7263 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7264 anything, just do a dry run. If OFF is not -1 then start
7265 the encoding at byte offset OFF and encode at most LEN bytes.
7266 Return the number of bytes placed in the buffer, or zero upon failure. */
7269 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7271 /* We don't support starting at negative offset and -1 is special. */
7272 if (off < -1)
7273 return 0;
7275 switch (TREE_CODE (expr))
7277 case INTEGER_CST:
7278 return native_encode_int (expr, ptr, len, off);
7280 case REAL_CST:
7281 return native_encode_real (expr, ptr, len, off);
7283 case FIXED_CST:
7284 return native_encode_fixed (expr, ptr, len, off);
7286 case COMPLEX_CST:
7287 return native_encode_complex (expr, ptr, len, off);
7289 case VECTOR_CST:
7290 return native_encode_vector (expr, ptr, len, off);
7292 case STRING_CST:
7293 return native_encode_string (expr, ptr, len, off);
7295 default:
7296 return 0;
7301 /* Subroutine of native_interpret_expr. Interpret the contents of
7302 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7303 If the buffer cannot be interpreted, return NULL_TREE. */
7305 static tree
7306 native_interpret_int (tree type, const unsigned char *ptr, int len)
7308 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7310 if (total_bytes > len
7311 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7312 return NULL_TREE;
7314 wide_int result = wi::from_buffer (ptr, total_bytes);
7316 return wide_int_to_tree (type, result);
7320 /* Subroutine of native_interpret_expr. Interpret the contents of
7321 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7322 If the buffer cannot be interpreted, return NULL_TREE. */
7324 static tree
7325 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7327 scalar_mode mode = SCALAR_TYPE_MODE (type);
7328 int total_bytes = GET_MODE_SIZE (mode);
7329 double_int result;
7330 FIXED_VALUE_TYPE fixed_value;
7332 if (total_bytes > len
7333 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7334 return NULL_TREE;
7336 result = double_int::from_buffer (ptr, total_bytes);
7337 fixed_value = fixed_from_double_int (result, mode);
7339 return build_fixed (type, fixed_value);
7343 /* Subroutine of native_interpret_expr. Interpret the contents of
7344 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7345 If the buffer cannot be interpreted, return NULL_TREE. */
7347 static tree
7348 native_interpret_real (tree type, const unsigned char *ptr, int len)
7350 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7351 int total_bytes = GET_MODE_SIZE (mode);
7352 unsigned char value;
7353 /* There are always 32 bits in each long, no matter the size of
7354 the hosts long. We handle floating point representations with
7355 up to 192 bits. */
7356 REAL_VALUE_TYPE r;
7357 long tmp[6];
7359 if (total_bytes > len || total_bytes > 24)
7360 return NULL_TREE;
7361 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7363 memset (tmp, 0, sizeof (tmp));
7364 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7365 bitpos += BITS_PER_UNIT)
7367 /* Both OFFSET and BYTE index within a long;
7368 bitpos indexes the whole float. */
7369 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7370 if (UNITS_PER_WORD < 4)
7372 int word = byte / UNITS_PER_WORD;
7373 if (WORDS_BIG_ENDIAN)
7374 word = (words - 1) - word;
7375 offset = word * UNITS_PER_WORD;
7376 if (BYTES_BIG_ENDIAN)
7377 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7378 else
7379 offset += byte % UNITS_PER_WORD;
7381 else
7383 offset = byte;
7384 if (BYTES_BIG_ENDIAN)
7386 /* Reverse bytes within each long, or within the entire float
7387 if it's smaller than a long (for HFmode). */
7388 offset = MIN (3, total_bytes - 1) - offset;
7389 gcc_assert (offset >= 0);
7392 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7394 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7397 real_from_target (&r, tmp, mode);
7398 return build_real (type, r);
7402 /* Subroutine of native_interpret_expr. Interpret the contents of
7403 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7404 If the buffer cannot be interpreted, return NULL_TREE. */
7406 static tree
7407 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7409 tree etype, rpart, ipart;
7410 int size;
7412 etype = TREE_TYPE (type);
7413 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7414 if (size * 2 > len)
7415 return NULL_TREE;
7416 rpart = native_interpret_expr (etype, ptr, size);
7417 if (!rpart)
7418 return NULL_TREE;
7419 ipart = native_interpret_expr (etype, ptr+size, size);
7420 if (!ipart)
7421 return NULL_TREE;
7422 return build_complex (type, rpart, ipart);
7426 /* Subroutine of native_interpret_expr. Interpret the contents of
7427 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7428 If the buffer cannot be interpreted, return NULL_TREE. */
7430 static tree
7431 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7433 tree etype, elem;
7434 int i, size, count;
7436 etype = TREE_TYPE (type);
7437 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7438 count = TYPE_VECTOR_SUBPARTS (type);
7439 if (size * count > len)
7440 return NULL_TREE;
7442 tree_vector_builder elements (type, count, 1);
7443 for (i = 0; i < count; ++i)
7445 elem = native_interpret_expr (etype, ptr+(i*size), size);
7446 if (!elem)
7447 return NULL_TREE;
7448 elements.quick_push (elem);
7450 return elements.build ();
7454 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7455 the buffer PTR of length LEN as a constant of type TYPE. For
7456 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7457 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7458 return NULL_TREE. */
7460 tree
7461 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7463 switch (TREE_CODE (type))
7465 case INTEGER_TYPE:
7466 case ENUMERAL_TYPE:
7467 case BOOLEAN_TYPE:
7468 case POINTER_TYPE:
7469 case REFERENCE_TYPE:
7470 return native_interpret_int (type, ptr, len);
7472 case REAL_TYPE:
7473 return native_interpret_real (type, ptr, len);
7475 case FIXED_POINT_TYPE:
7476 return native_interpret_fixed (type, ptr, len);
7478 case COMPLEX_TYPE:
7479 return native_interpret_complex (type, ptr, len);
7481 case VECTOR_TYPE:
7482 return native_interpret_vector (type, ptr, len);
7484 default:
7485 return NULL_TREE;
7489 /* Returns true if we can interpret the contents of a native encoding
7490 as TYPE. */
7492 static bool
7493 can_native_interpret_type_p (tree type)
7495 switch (TREE_CODE (type))
7497 case INTEGER_TYPE:
7498 case ENUMERAL_TYPE:
7499 case BOOLEAN_TYPE:
7500 case POINTER_TYPE:
7501 case REFERENCE_TYPE:
7502 case FIXED_POINT_TYPE:
7503 case REAL_TYPE:
7504 case COMPLEX_TYPE:
7505 case VECTOR_TYPE:
7506 return true;
7507 default:
7508 return false;
7513 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7514 TYPE at compile-time. If we're unable to perform the conversion
7515 return NULL_TREE. */
7517 static tree
7518 fold_view_convert_expr (tree type, tree expr)
7520 /* We support up to 512-bit values (for V8DFmode). */
7521 unsigned char buffer[64];
7522 int len;
7524 /* Check that the host and target are sane. */
7525 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7526 return NULL_TREE;
7528 len = native_encode_expr (expr, buffer, sizeof (buffer));
7529 if (len == 0)
7530 return NULL_TREE;
7532 return native_interpret_expr (type, buffer, len);
7535 /* Build an expression for the address of T. Folds away INDIRECT_REF
7536 to avoid confusing the gimplify process. */
7538 tree
7539 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7541 /* The size of the object is not relevant when talking about its address. */
7542 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7543 t = TREE_OPERAND (t, 0);
7545 if (TREE_CODE (t) == INDIRECT_REF)
7547 t = TREE_OPERAND (t, 0);
7549 if (TREE_TYPE (t) != ptrtype)
7550 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7552 else if (TREE_CODE (t) == MEM_REF
7553 && integer_zerop (TREE_OPERAND (t, 1)))
7554 return TREE_OPERAND (t, 0);
7555 else if (TREE_CODE (t) == MEM_REF
7556 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7557 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7558 TREE_OPERAND (t, 0),
7559 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7560 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7562 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7564 if (TREE_TYPE (t) != ptrtype)
7565 t = fold_convert_loc (loc, ptrtype, t);
7567 else
7568 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7570 return t;
7573 /* Build an expression for the address of T. */
7575 tree
7576 build_fold_addr_expr_loc (location_t loc, tree t)
7578 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7580 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7583 /* Fold a unary expression of code CODE and type TYPE with operand
7584 OP0. Return the folded expression if folding is successful.
7585 Otherwise, return NULL_TREE. */
7587 tree
7588 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7590 tree tem;
7591 tree arg0;
7592 enum tree_code_class kind = TREE_CODE_CLASS (code);
7594 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7595 && TREE_CODE_LENGTH (code) == 1);
7597 arg0 = op0;
7598 if (arg0)
7600 if (CONVERT_EXPR_CODE_P (code)
7601 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7603 /* Don't use STRIP_NOPS, because signedness of argument type
7604 matters. */
7605 STRIP_SIGN_NOPS (arg0);
7607 else
7609 /* Strip any conversions that don't change the mode. This
7610 is safe for every expression, except for a comparison
7611 expression because its signedness is derived from its
7612 operands.
7614 Note that this is done as an internal manipulation within
7615 the constant folder, in order to find the simplest
7616 representation of the arguments so that their form can be
7617 studied. In any cases, the appropriate type conversions
7618 should be put back in the tree that will get out of the
7619 constant folder. */
7620 STRIP_NOPS (arg0);
7623 if (CONSTANT_CLASS_P (arg0))
7625 tree tem = const_unop (code, type, arg0);
7626 if (tem)
7628 if (TREE_TYPE (tem) != type)
7629 tem = fold_convert_loc (loc, type, tem);
7630 return tem;
7635 tem = generic_simplify (loc, code, type, op0);
7636 if (tem)
7637 return tem;
7639 if (TREE_CODE_CLASS (code) == tcc_unary)
7641 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7642 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7643 fold_build1_loc (loc, code, type,
7644 fold_convert_loc (loc, TREE_TYPE (op0),
7645 TREE_OPERAND (arg0, 1))));
7646 else if (TREE_CODE (arg0) == COND_EXPR)
7648 tree arg01 = TREE_OPERAND (arg0, 1);
7649 tree arg02 = TREE_OPERAND (arg0, 2);
7650 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7651 arg01 = fold_build1_loc (loc, code, type,
7652 fold_convert_loc (loc,
7653 TREE_TYPE (op0), arg01));
7654 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7655 arg02 = fold_build1_loc (loc, code, type,
7656 fold_convert_loc (loc,
7657 TREE_TYPE (op0), arg02));
7658 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7659 arg01, arg02);
7661 /* If this was a conversion, and all we did was to move into
7662 inside the COND_EXPR, bring it back out. But leave it if
7663 it is a conversion from integer to integer and the
7664 result precision is no wider than a word since such a
7665 conversion is cheap and may be optimized away by combine,
7666 while it couldn't if it were outside the COND_EXPR. Then return
7667 so we don't get into an infinite recursion loop taking the
7668 conversion out and then back in. */
7670 if ((CONVERT_EXPR_CODE_P (code)
7671 || code == NON_LVALUE_EXPR)
7672 && TREE_CODE (tem) == COND_EXPR
7673 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7674 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7675 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7676 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7677 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7678 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7679 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7680 && (INTEGRAL_TYPE_P
7681 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7682 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7683 || flag_syntax_only))
7684 tem = build1_loc (loc, code, type,
7685 build3 (COND_EXPR,
7686 TREE_TYPE (TREE_OPERAND
7687 (TREE_OPERAND (tem, 1), 0)),
7688 TREE_OPERAND (tem, 0),
7689 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7690 TREE_OPERAND (TREE_OPERAND (tem, 2),
7691 0)));
7692 return tem;
7696 switch (code)
7698 case NON_LVALUE_EXPR:
7699 if (!maybe_lvalue_p (op0))
7700 return fold_convert_loc (loc, type, op0);
7701 return NULL_TREE;
7703 CASE_CONVERT:
7704 case FLOAT_EXPR:
7705 case FIX_TRUNC_EXPR:
7706 if (COMPARISON_CLASS_P (op0))
7708 /* If we have (type) (a CMP b) and type is an integral type, return
7709 new expression involving the new type. Canonicalize
7710 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7711 non-integral type.
7712 Do not fold the result as that would not simplify further, also
7713 folding again results in recursions. */
7714 if (TREE_CODE (type) == BOOLEAN_TYPE)
7715 return build2_loc (loc, TREE_CODE (op0), type,
7716 TREE_OPERAND (op0, 0),
7717 TREE_OPERAND (op0, 1));
7718 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7719 && TREE_CODE (type) != VECTOR_TYPE)
7720 return build3_loc (loc, COND_EXPR, type, op0,
7721 constant_boolean_node (true, type),
7722 constant_boolean_node (false, type));
7725 /* Handle (T *)&A.B.C for A being of type T and B and C
7726 living at offset zero. This occurs frequently in
7727 C++ upcasting and then accessing the base. */
7728 if (TREE_CODE (op0) == ADDR_EXPR
7729 && POINTER_TYPE_P (type)
7730 && handled_component_p (TREE_OPERAND (op0, 0)))
7732 HOST_WIDE_INT bitsize, bitpos;
7733 tree offset;
7734 machine_mode mode;
7735 int unsignedp, reversep, volatilep;
7736 tree base
7737 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7738 &offset, &mode, &unsignedp, &reversep,
7739 &volatilep);
7740 /* If the reference was to a (constant) zero offset, we can use
7741 the address of the base if it has the same base type
7742 as the result type and the pointer type is unqualified. */
7743 if (! offset && bitpos == 0
7744 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7745 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7746 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7747 return fold_convert_loc (loc, type,
7748 build_fold_addr_expr_loc (loc, base));
7751 if (TREE_CODE (op0) == MODIFY_EXPR
7752 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7753 /* Detect assigning a bitfield. */
7754 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7755 && DECL_BIT_FIELD
7756 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7758 /* Don't leave an assignment inside a conversion
7759 unless assigning a bitfield. */
7760 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7761 /* First do the assignment, then return converted constant. */
7762 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7763 TREE_NO_WARNING (tem) = 1;
7764 TREE_USED (tem) = 1;
7765 return tem;
7768 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7769 constants (if x has signed type, the sign bit cannot be set
7770 in c). This folds extension into the BIT_AND_EXPR.
7771 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7772 very likely don't have maximal range for their precision and this
7773 transformation effectively doesn't preserve non-maximal ranges. */
7774 if (TREE_CODE (type) == INTEGER_TYPE
7775 && TREE_CODE (op0) == BIT_AND_EXPR
7776 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7778 tree and_expr = op0;
7779 tree and0 = TREE_OPERAND (and_expr, 0);
7780 tree and1 = TREE_OPERAND (and_expr, 1);
7781 int change = 0;
7783 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7784 || (TYPE_PRECISION (type)
7785 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7786 change = 1;
7787 else if (TYPE_PRECISION (TREE_TYPE (and1))
7788 <= HOST_BITS_PER_WIDE_INT
7789 && tree_fits_uhwi_p (and1))
7791 unsigned HOST_WIDE_INT cst;
7793 cst = tree_to_uhwi (and1);
7794 cst &= HOST_WIDE_INT_M1U
7795 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7796 change = (cst == 0);
7797 if (change
7798 && !flag_syntax_only
7799 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7800 == ZERO_EXTEND))
7802 tree uns = unsigned_type_for (TREE_TYPE (and0));
7803 and0 = fold_convert_loc (loc, uns, and0);
7804 and1 = fold_convert_loc (loc, uns, and1);
7807 if (change)
7809 tem = force_fit_type (type, wi::to_widest (and1), 0,
7810 TREE_OVERFLOW (and1));
7811 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7812 fold_convert_loc (loc, type, and0), tem);
7816 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7817 cast (T1)X will fold away. We assume that this happens when X itself
7818 is a cast. */
7819 if (POINTER_TYPE_P (type)
7820 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7821 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7823 tree arg00 = TREE_OPERAND (arg0, 0);
7824 tree arg01 = TREE_OPERAND (arg0, 1);
7826 return fold_build_pointer_plus_loc
7827 (loc, fold_convert_loc (loc, type, arg00), arg01);
7830 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7831 of the same precision, and X is an integer type not narrower than
7832 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7833 if (INTEGRAL_TYPE_P (type)
7834 && TREE_CODE (op0) == BIT_NOT_EXPR
7835 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7836 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7837 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7839 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7840 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7841 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7842 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7843 fold_convert_loc (loc, type, tem));
7846 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7847 type of X and Y (integer types only). */
7848 if (INTEGRAL_TYPE_P (type)
7849 && TREE_CODE (op0) == MULT_EXPR
7850 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7851 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7853 /* Be careful not to introduce new overflows. */
7854 tree mult_type;
7855 if (TYPE_OVERFLOW_WRAPS (type))
7856 mult_type = type;
7857 else
7858 mult_type = unsigned_type_for (type);
7860 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7862 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7863 fold_convert_loc (loc, mult_type,
7864 TREE_OPERAND (op0, 0)),
7865 fold_convert_loc (loc, mult_type,
7866 TREE_OPERAND (op0, 1)));
7867 return fold_convert_loc (loc, type, tem);
7871 return NULL_TREE;
7873 case VIEW_CONVERT_EXPR:
7874 if (TREE_CODE (op0) == MEM_REF)
7876 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7877 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7878 tem = fold_build2_loc (loc, MEM_REF, type,
7879 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7880 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7881 return tem;
7884 return NULL_TREE;
7886 case NEGATE_EXPR:
7887 tem = fold_negate_expr (loc, arg0);
7888 if (tem)
7889 return fold_convert_loc (loc, type, tem);
7890 return NULL_TREE;
7892 case ABS_EXPR:
7893 /* Convert fabs((double)float) into (double)fabsf(float). */
7894 if (TREE_CODE (arg0) == NOP_EXPR
7895 && TREE_CODE (type) == REAL_TYPE)
7897 tree targ0 = strip_float_extensions (arg0);
7898 if (targ0 != arg0)
7899 return fold_convert_loc (loc, type,
7900 fold_build1_loc (loc, ABS_EXPR,
7901 TREE_TYPE (targ0),
7902 targ0));
7904 return NULL_TREE;
7906 case BIT_NOT_EXPR:
7907 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7908 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7909 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7910 fold_convert_loc (loc, type,
7911 TREE_OPERAND (arg0, 0)))))
7912 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7913 fold_convert_loc (loc, type,
7914 TREE_OPERAND (arg0, 1)));
7915 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7916 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7917 fold_convert_loc (loc, type,
7918 TREE_OPERAND (arg0, 1)))))
7919 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7920 fold_convert_loc (loc, type,
7921 TREE_OPERAND (arg0, 0)), tem);
7923 return NULL_TREE;
7925 case TRUTH_NOT_EXPR:
7926 /* Note that the operand of this must be an int
7927 and its values must be 0 or 1.
7928 ("true" is a fixed value perhaps depending on the language,
7929 but we don't handle values other than 1 correctly yet.) */
7930 tem = fold_truth_not_expr (loc, arg0);
7931 if (!tem)
7932 return NULL_TREE;
7933 return fold_convert_loc (loc, type, tem);
7935 case INDIRECT_REF:
7936 /* Fold *&X to X if X is an lvalue. */
7937 if (TREE_CODE (op0) == ADDR_EXPR)
7939 tree op00 = TREE_OPERAND (op0, 0);
7940 if ((VAR_P (op00)
7941 || TREE_CODE (op00) == PARM_DECL
7942 || TREE_CODE (op00) == RESULT_DECL)
7943 && !TREE_READONLY (op00))
7944 return op00;
7946 return NULL_TREE;
7948 default:
7949 return NULL_TREE;
7950 } /* switch (code) */
7954 /* If the operation was a conversion do _not_ mark a resulting constant
7955 with TREE_OVERFLOW if the original constant was not. These conversions
7956 have implementation defined behavior and retaining the TREE_OVERFLOW
7957 flag here would confuse later passes such as VRP. */
7958 tree
7959 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7960 tree type, tree op0)
7962 tree res = fold_unary_loc (loc, code, type, op0);
7963 if (res
7964 && TREE_CODE (res) == INTEGER_CST
7965 && TREE_CODE (op0) == INTEGER_CST
7966 && CONVERT_EXPR_CODE_P (code))
7967 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7969 return res;
7972 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7973 operands OP0 and OP1. LOC is the location of the resulting expression.
7974 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7975 Return the folded expression if folding is successful. Otherwise,
7976 return NULL_TREE. */
7977 static tree
7978 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7979 tree arg0, tree arg1, tree op0, tree op1)
7981 tree tem;
7983 /* We only do these simplifications if we are optimizing. */
7984 if (!optimize)
7985 return NULL_TREE;
7987 /* Check for things like (A || B) && (A || C). We can convert this
7988 to A || (B && C). Note that either operator can be any of the four
7989 truth and/or operations and the transformation will still be
7990 valid. Also note that we only care about order for the
7991 ANDIF and ORIF operators. If B contains side effects, this
7992 might change the truth-value of A. */
7993 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7994 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7995 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7996 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7997 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7998 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8000 tree a00 = TREE_OPERAND (arg0, 0);
8001 tree a01 = TREE_OPERAND (arg0, 1);
8002 tree a10 = TREE_OPERAND (arg1, 0);
8003 tree a11 = TREE_OPERAND (arg1, 1);
8004 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8005 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8006 && (code == TRUTH_AND_EXPR
8007 || code == TRUTH_OR_EXPR));
8009 if (operand_equal_p (a00, a10, 0))
8010 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8011 fold_build2_loc (loc, code, type, a01, a11));
8012 else if (commutative && operand_equal_p (a00, a11, 0))
8013 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8014 fold_build2_loc (loc, code, type, a01, a10));
8015 else if (commutative && operand_equal_p (a01, a10, 0))
8016 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8017 fold_build2_loc (loc, code, type, a00, a11));
8019 /* This case if tricky because we must either have commutative
8020 operators or else A10 must not have side-effects. */
8022 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8023 && operand_equal_p (a01, a11, 0))
8024 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8025 fold_build2_loc (loc, code, type, a00, a10),
8026 a01);
8029 /* See if we can build a range comparison. */
8030 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8031 return tem;
8033 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8034 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8036 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8037 if (tem)
8038 return fold_build2_loc (loc, code, type, tem, arg1);
8041 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8042 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8044 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8045 if (tem)
8046 return fold_build2_loc (loc, code, type, arg0, tem);
8049 /* Check for the possibility of merging component references. If our
8050 lhs is another similar operation, try to merge its rhs with our
8051 rhs. Then try to merge our lhs and rhs. */
8052 if (TREE_CODE (arg0) == code
8053 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8054 TREE_OPERAND (arg0, 1), arg1)))
8055 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8057 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8058 return tem;
8060 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8061 && !flag_sanitize_coverage
8062 && (code == TRUTH_AND_EXPR
8063 || code == TRUTH_ANDIF_EXPR
8064 || code == TRUTH_OR_EXPR
8065 || code == TRUTH_ORIF_EXPR))
8067 enum tree_code ncode, icode;
8069 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8070 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8071 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8073 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8074 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8075 We don't want to pack more than two leafs to a non-IF AND/OR
8076 expression.
8077 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8078 equal to IF-CODE, then we don't want to add right-hand operand.
8079 If the inner right-hand side of left-hand operand has
8080 side-effects, or isn't simple, then we can't add to it,
8081 as otherwise we might destroy if-sequence. */
8082 if (TREE_CODE (arg0) == icode
8083 && simple_operand_p_2 (arg1)
8084 /* Needed for sequence points to handle trappings, and
8085 side-effects. */
8086 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8088 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8089 arg1);
8090 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8091 tem);
8093 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8094 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8095 else if (TREE_CODE (arg1) == icode
8096 && simple_operand_p_2 (arg0)
8097 /* Needed for sequence points to handle trappings, and
8098 side-effects. */
8099 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8101 tem = fold_build2_loc (loc, ncode, type,
8102 arg0, TREE_OPERAND (arg1, 0));
8103 return fold_build2_loc (loc, icode, type, tem,
8104 TREE_OPERAND (arg1, 1));
8106 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8107 into (A OR B).
8108 For sequence point consistancy, we need to check for trapping,
8109 and side-effects. */
8110 else if (code == icode && simple_operand_p_2 (arg0)
8111 && simple_operand_p_2 (arg1))
8112 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8115 return NULL_TREE;
8118 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8119 by changing CODE to reduce the magnitude of constants involved in
8120 ARG0 of the comparison.
8121 Returns a canonicalized comparison tree if a simplification was
8122 possible, otherwise returns NULL_TREE.
8123 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8124 valid if signed overflow is undefined. */
8126 static tree
8127 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8128 tree arg0, tree arg1,
8129 bool *strict_overflow_p)
8131 enum tree_code code0 = TREE_CODE (arg0);
8132 tree t, cst0 = NULL_TREE;
8133 int sgn0;
8135 /* Match A +- CST code arg1. We can change this only if overflow
8136 is undefined. */
8137 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8138 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8139 /* In principle pointers also have undefined overflow behavior,
8140 but that causes problems elsewhere. */
8141 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8142 && (code0 == MINUS_EXPR
8143 || code0 == PLUS_EXPR)
8144 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8145 return NULL_TREE;
8147 /* Identify the constant in arg0 and its sign. */
8148 cst0 = TREE_OPERAND (arg0, 1);
8149 sgn0 = tree_int_cst_sgn (cst0);
8151 /* Overflowed constants and zero will cause problems. */
8152 if (integer_zerop (cst0)
8153 || TREE_OVERFLOW (cst0))
8154 return NULL_TREE;
8156 /* See if we can reduce the magnitude of the constant in
8157 arg0 by changing the comparison code. */
8158 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8159 if (code == LT_EXPR
8160 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8161 code = LE_EXPR;
8162 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8163 else if (code == GT_EXPR
8164 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8165 code = GE_EXPR;
8166 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8167 else if (code == LE_EXPR
8168 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8169 code = LT_EXPR;
8170 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8171 else if (code == GE_EXPR
8172 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8173 code = GT_EXPR;
8174 else
8175 return NULL_TREE;
8176 *strict_overflow_p = true;
8178 /* Now build the constant reduced in magnitude. But not if that
8179 would produce one outside of its types range. */
8180 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8181 && ((sgn0 == 1
8182 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8183 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8184 || (sgn0 == -1
8185 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8186 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8187 return NULL_TREE;
8189 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8190 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8191 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8192 t = fold_convert (TREE_TYPE (arg1), t);
8194 return fold_build2_loc (loc, code, type, t, arg1);
8197 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8198 overflow further. Try to decrease the magnitude of constants involved
8199 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8200 and put sole constants at the second argument position.
8201 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8203 static tree
8204 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8205 tree arg0, tree arg1)
8207 tree t;
8208 bool strict_overflow_p;
8209 const char * const warnmsg = G_("assuming signed overflow does not occur "
8210 "when reducing constant in comparison");
8212 /* Try canonicalization by simplifying arg0. */
8213 strict_overflow_p = false;
8214 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8215 &strict_overflow_p);
8216 if (t)
8218 if (strict_overflow_p)
8219 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8220 return t;
8223 /* Try canonicalization by simplifying arg1 using the swapped
8224 comparison. */
8225 code = swap_tree_comparison (code);
8226 strict_overflow_p = false;
8227 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8228 &strict_overflow_p);
8229 if (t && strict_overflow_p)
8230 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8231 return t;
8234 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8235 space. This is used to avoid issuing overflow warnings for
8236 expressions like &p->x which can not wrap. */
8238 static bool
8239 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8241 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8242 return true;
8244 if (bitpos < 0)
8245 return true;
8247 wide_int wi_offset;
8248 int precision = TYPE_PRECISION (TREE_TYPE (base));
8249 if (offset == NULL_TREE)
8250 wi_offset = wi::zero (precision);
8251 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8252 return true;
8253 else
8254 wi_offset = wi::to_wide (offset);
8256 bool overflow;
8257 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8258 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8259 if (overflow)
8260 return true;
8262 if (!wi::fits_uhwi_p (total))
8263 return true;
8265 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8266 if (size <= 0)
8267 return true;
8269 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8270 array. */
8271 if (TREE_CODE (base) == ADDR_EXPR)
8273 HOST_WIDE_INT base_size;
8275 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8276 if (base_size > 0 && size < base_size)
8277 size = base_size;
8280 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8283 /* Return a positive integer when the symbol DECL is known to have
8284 a nonzero address, zero when it's known not to (e.g., it's a weak
8285 symbol), and a negative integer when the symbol is not yet in the
8286 symbol table and so whether or not its address is zero is unknown.
8287 For function local objects always return positive integer. */
8288 static int
8289 maybe_nonzero_address (tree decl)
8291 if (DECL_P (decl) && decl_in_symtab_p (decl))
8292 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8293 return symbol->nonzero_address ();
8295 /* Function local objects are never NULL. */
8296 if (DECL_P (decl)
8297 && (DECL_CONTEXT (decl)
8298 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8299 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8300 return 1;
8302 return -1;
8305 /* Subroutine of fold_binary. This routine performs all of the
8306 transformations that are common to the equality/inequality
8307 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8308 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8309 fold_binary should call fold_binary. Fold a comparison with
8310 tree code CODE and type TYPE with operands OP0 and OP1. Return
8311 the folded comparison or NULL_TREE. */
8313 static tree
8314 fold_comparison (location_t loc, enum tree_code code, tree type,
8315 tree op0, tree op1)
8317 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8318 tree arg0, arg1, tem;
8320 arg0 = op0;
8321 arg1 = op1;
8323 STRIP_SIGN_NOPS (arg0);
8324 STRIP_SIGN_NOPS (arg1);
8326 /* For comparisons of pointers we can decompose it to a compile time
8327 comparison of the base objects and the offsets into the object.
8328 This requires at least one operand being an ADDR_EXPR or a
8329 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8330 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8331 && (TREE_CODE (arg0) == ADDR_EXPR
8332 || TREE_CODE (arg1) == ADDR_EXPR
8333 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8334 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8336 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8337 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8338 machine_mode mode;
8339 int volatilep, reversep, unsignedp;
8340 bool indirect_base0 = false, indirect_base1 = false;
8342 /* Get base and offset for the access. Strip ADDR_EXPR for
8343 get_inner_reference, but put it back by stripping INDIRECT_REF
8344 off the base object if possible. indirect_baseN will be true
8345 if baseN is not an address but refers to the object itself. */
8346 base0 = arg0;
8347 if (TREE_CODE (arg0) == ADDR_EXPR)
8349 base0
8350 = get_inner_reference (TREE_OPERAND (arg0, 0),
8351 &bitsize, &bitpos0, &offset0, &mode,
8352 &unsignedp, &reversep, &volatilep);
8353 if (TREE_CODE (base0) == INDIRECT_REF)
8354 base0 = TREE_OPERAND (base0, 0);
8355 else
8356 indirect_base0 = true;
8358 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8360 base0 = TREE_OPERAND (arg0, 0);
8361 STRIP_SIGN_NOPS (base0);
8362 if (TREE_CODE (base0) == ADDR_EXPR)
8364 base0
8365 = get_inner_reference (TREE_OPERAND (base0, 0),
8366 &bitsize, &bitpos0, &offset0, &mode,
8367 &unsignedp, &reversep, &volatilep);
8368 if (TREE_CODE (base0) == INDIRECT_REF)
8369 base0 = TREE_OPERAND (base0, 0);
8370 else
8371 indirect_base0 = true;
8373 if (offset0 == NULL_TREE || integer_zerop (offset0))
8374 offset0 = TREE_OPERAND (arg0, 1);
8375 else
8376 offset0 = size_binop (PLUS_EXPR, offset0,
8377 TREE_OPERAND (arg0, 1));
8378 if (TREE_CODE (offset0) == INTEGER_CST)
8380 offset_int tem = wi::sext (wi::to_offset (offset0),
8381 TYPE_PRECISION (sizetype));
8382 tem <<= LOG2_BITS_PER_UNIT;
8383 tem += bitpos0;
8384 if (wi::fits_shwi_p (tem))
8386 bitpos0 = tem.to_shwi ();
8387 offset0 = NULL_TREE;
8392 base1 = arg1;
8393 if (TREE_CODE (arg1) == ADDR_EXPR)
8395 base1
8396 = get_inner_reference (TREE_OPERAND (arg1, 0),
8397 &bitsize, &bitpos1, &offset1, &mode,
8398 &unsignedp, &reversep, &volatilep);
8399 if (TREE_CODE (base1) == INDIRECT_REF)
8400 base1 = TREE_OPERAND (base1, 0);
8401 else
8402 indirect_base1 = true;
8404 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8406 base1 = TREE_OPERAND (arg1, 0);
8407 STRIP_SIGN_NOPS (base1);
8408 if (TREE_CODE (base1) == ADDR_EXPR)
8410 base1
8411 = get_inner_reference (TREE_OPERAND (base1, 0),
8412 &bitsize, &bitpos1, &offset1, &mode,
8413 &unsignedp, &reversep, &volatilep);
8414 if (TREE_CODE (base1) == INDIRECT_REF)
8415 base1 = TREE_OPERAND (base1, 0);
8416 else
8417 indirect_base1 = true;
8419 if (offset1 == NULL_TREE || integer_zerop (offset1))
8420 offset1 = TREE_OPERAND (arg1, 1);
8421 else
8422 offset1 = size_binop (PLUS_EXPR, offset1,
8423 TREE_OPERAND (arg1, 1));
8424 if (TREE_CODE (offset1) == INTEGER_CST)
8426 offset_int tem = wi::sext (wi::to_offset (offset1),
8427 TYPE_PRECISION (sizetype));
8428 tem <<= LOG2_BITS_PER_UNIT;
8429 tem += bitpos1;
8430 if (wi::fits_shwi_p (tem))
8432 bitpos1 = tem.to_shwi ();
8433 offset1 = NULL_TREE;
8438 /* If we have equivalent bases we might be able to simplify. */
8439 if (indirect_base0 == indirect_base1
8440 && operand_equal_p (base0, base1,
8441 indirect_base0 ? OEP_ADDRESS_OF : 0))
8443 /* We can fold this expression to a constant if the non-constant
8444 offset parts are equal. */
8445 if (offset0 == offset1
8446 || (offset0 && offset1
8447 && operand_equal_p (offset0, offset1, 0)))
8449 if (!equality_code
8450 && bitpos0 != bitpos1
8451 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8452 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8453 fold_overflow_warning (("assuming pointer wraparound does not "
8454 "occur when comparing P +- C1 with "
8455 "P +- C2"),
8456 WARN_STRICT_OVERFLOW_CONDITIONAL);
8458 switch (code)
8460 case EQ_EXPR:
8461 return constant_boolean_node (bitpos0 == bitpos1, type);
8462 case NE_EXPR:
8463 return constant_boolean_node (bitpos0 != bitpos1, type);
8464 case LT_EXPR:
8465 return constant_boolean_node (bitpos0 < bitpos1, type);
8466 case LE_EXPR:
8467 return constant_boolean_node (bitpos0 <= bitpos1, type);
8468 case GE_EXPR:
8469 return constant_boolean_node (bitpos0 >= bitpos1, type);
8470 case GT_EXPR:
8471 return constant_boolean_node (bitpos0 > bitpos1, type);
8472 default:;
8475 /* We can simplify the comparison to a comparison of the variable
8476 offset parts if the constant offset parts are equal.
8477 Be careful to use signed sizetype here because otherwise we
8478 mess with array offsets in the wrong way. This is possible
8479 because pointer arithmetic is restricted to retain within an
8480 object and overflow on pointer differences is undefined as of
8481 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8482 else if (bitpos0 == bitpos1)
8484 /* By converting to signed sizetype we cover middle-end pointer
8485 arithmetic which operates on unsigned pointer types of size
8486 type size and ARRAY_REF offsets which are properly sign or
8487 zero extended from their type in case it is narrower than
8488 sizetype. */
8489 if (offset0 == NULL_TREE)
8490 offset0 = build_int_cst (ssizetype, 0);
8491 else
8492 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8493 if (offset1 == NULL_TREE)
8494 offset1 = build_int_cst (ssizetype, 0);
8495 else
8496 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8498 if (!equality_code
8499 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8500 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8501 fold_overflow_warning (("assuming pointer wraparound does not "
8502 "occur when comparing P +- C1 with "
8503 "P +- C2"),
8504 WARN_STRICT_OVERFLOW_COMPARISON);
8506 return fold_build2_loc (loc, code, type, offset0, offset1);
8509 /* For equal offsets we can simplify to a comparison of the
8510 base addresses. */
8511 else if (bitpos0 == bitpos1
8512 && (indirect_base0
8513 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8514 && (indirect_base1
8515 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8516 && ((offset0 == offset1)
8517 || (offset0 && offset1
8518 && operand_equal_p (offset0, offset1, 0))))
8520 if (indirect_base0)
8521 base0 = build_fold_addr_expr_loc (loc, base0);
8522 if (indirect_base1)
8523 base1 = build_fold_addr_expr_loc (loc, base1);
8524 return fold_build2_loc (loc, code, type, base0, base1);
8526 /* Comparison between an ordinary (non-weak) symbol and a null
8527 pointer can be eliminated since such symbols must have a non
8528 null address. In C, relational expressions between pointers
8529 to objects and null pointers are undefined. The results
8530 below follow the C++ rules with the additional property that
8531 every object pointer compares greater than a null pointer.
8533 else if (((DECL_P (base0)
8534 && maybe_nonzero_address (base0) > 0
8535 /* Avoid folding references to struct members at offset 0 to
8536 prevent tests like '&ptr->firstmember == 0' from getting
8537 eliminated. When ptr is null, although the -> expression
8538 is strictly speaking invalid, GCC retains it as a matter
8539 of QoI. See PR c/44555. */
8540 && (offset0 == NULL_TREE && bitpos0 != 0))
8541 || CONSTANT_CLASS_P (base0))
8542 && indirect_base0
8543 /* The caller guarantees that when one of the arguments is
8544 constant (i.e., null in this case) it is second. */
8545 && integer_zerop (arg1))
8547 switch (code)
8549 case EQ_EXPR:
8550 case LE_EXPR:
8551 case LT_EXPR:
8552 return constant_boolean_node (false, type);
8553 case GE_EXPR:
8554 case GT_EXPR:
8555 case NE_EXPR:
8556 return constant_boolean_node (true, type);
8557 default:
8558 gcc_unreachable ();
8563 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8564 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8565 the resulting offset is smaller in absolute value than the
8566 original one and has the same sign. */
8567 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8568 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8569 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8570 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8571 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8572 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8573 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8574 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8576 tree const1 = TREE_OPERAND (arg0, 1);
8577 tree const2 = TREE_OPERAND (arg1, 1);
8578 tree variable1 = TREE_OPERAND (arg0, 0);
8579 tree variable2 = TREE_OPERAND (arg1, 0);
8580 tree cst;
8581 const char * const warnmsg = G_("assuming signed overflow does not "
8582 "occur when combining constants around "
8583 "a comparison");
8585 /* Put the constant on the side where it doesn't overflow and is
8586 of lower absolute value and of same sign than before. */
8587 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8588 ? MINUS_EXPR : PLUS_EXPR,
8589 const2, const1);
8590 if (!TREE_OVERFLOW (cst)
8591 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8592 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8594 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8595 return fold_build2_loc (loc, code, type,
8596 variable1,
8597 fold_build2_loc (loc, TREE_CODE (arg1),
8598 TREE_TYPE (arg1),
8599 variable2, cst));
8602 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8603 ? MINUS_EXPR : PLUS_EXPR,
8604 const1, const2);
8605 if (!TREE_OVERFLOW (cst)
8606 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8607 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8609 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8610 return fold_build2_loc (loc, code, type,
8611 fold_build2_loc (loc, TREE_CODE (arg0),
8612 TREE_TYPE (arg0),
8613 variable1, cst),
8614 variable2);
8618 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8619 if (tem)
8620 return tem;
8622 /* If we are comparing an expression that just has comparisons
8623 of two integer values, arithmetic expressions of those comparisons,
8624 and constants, we can simplify it. There are only three cases
8625 to check: the two values can either be equal, the first can be
8626 greater, or the second can be greater. Fold the expression for
8627 those three values. Since each value must be 0 or 1, we have
8628 eight possibilities, each of which corresponds to the constant 0
8629 or 1 or one of the six possible comparisons.
8631 This handles common cases like (a > b) == 0 but also handles
8632 expressions like ((x > y) - (y > x)) > 0, which supposedly
8633 occur in macroized code. */
8635 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8637 tree cval1 = 0, cval2 = 0;
8638 int save_p = 0;
8640 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8641 /* Don't handle degenerate cases here; they should already
8642 have been handled anyway. */
8643 && cval1 != 0 && cval2 != 0
8644 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8645 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8646 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8647 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8648 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8649 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8650 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8652 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8653 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8655 /* We can't just pass T to eval_subst in case cval1 or cval2
8656 was the same as ARG1. */
8658 tree high_result
8659 = fold_build2_loc (loc, code, type,
8660 eval_subst (loc, arg0, cval1, maxval,
8661 cval2, minval),
8662 arg1);
8663 tree equal_result
8664 = fold_build2_loc (loc, code, type,
8665 eval_subst (loc, arg0, cval1, maxval,
8666 cval2, maxval),
8667 arg1);
8668 tree low_result
8669 = fold_build2_loc (loc, code, type,
8670 eval_subst (loc, arg0, cval1, minval,
8671 cval2, maxval),
8672 arg1);
8674 /* All three of these results should be 0 or 1. Confirm they are.
8675 Then use those values to select the proper code to use. */
8677 if (TREE_CODE (high_result) == INTEGER_CST
8678 && TREE_CODE (equal_result) == INTEGER_CST
8679 && TREE_CODE (low_result) == INTEGER_CST)
8681 /* Make a 3-bit mask with the high-order bit being the
8682 value for `>', the next for '=', and the low for '<'. */
8683 switch ((integer_onep (high_result) * 4)
8684 + (integer_onep (equal_result) * 2)
8685 + integer_onep (low_result))
8687 case 0:
8688 /* Always false. */
8689 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8690 case 1:
8691 code = LT_EXPR;
8692 break;
8693 case 2:
8694 code = EQ_EXPR;
8695 break;
8696 case 3:
8697 code = LE_EXPR;
8698 break;
8699 case 4:
8700 code = GT_EXPR;
8701 break;
8702 case 5:
8703 code = NE_EXPR;
8704 break;
8705 case 6:
8706 code = GE_EXPR;
8707 break;
8708 case 7:
8709 /* Always true. */
8710 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8713 if (save_p)
8715 tem = save_expr (build2 (code, type, cval1, cval2));
8716 protected_set_expr_location (tem, loc);
8717 return tem;
8719 return fold_build2_loc (loc, code, type, cval1, cval2);
8724 return NULL_TREE;
8728 /* Subroutine of fold_binary. Optimize complex multiplications of the
8729 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8730 argument EXPR represents the expression "z" of type TYPE. */
8732 static tree
8733 fold_mult_zconjz (location_t loc, tree type, tree expr)
8735 tree itype = TREE_TYPE (type);
8736 tree rpart, ipart, tem;
8738 if (TREE_CODE (expr) == COMPLEX_EXPR)
8740 rpart = TREE_OPERAND (expr, 0);
8741 ipart = TREE_OPERAND (expr, 1);
8743 else if (TREE_CODE (expr) == COMPLEX_CST)
8745 rpart = TREE_REALPART (expr);
8746 ipart = TREE_IMAGPART (expr);
8748 else
8750 expr = save_expr (expr);
8751 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8752 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8755 rpart = save_expr (rpart);
8756 ipart = save_expr (ipart);
8757 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8758 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8759 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8760 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8761 build_zero_cst (itype));
8765 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8766 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8767 true if successful. */
8769 static bool
8770 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8772 unsigned int i;
8774 if (TREE_CODE (arg) == VECTOR_CST)
8776 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8777 elts[i] = VECTOR_CST_ELT (arg, i);
8779 else if (TREE_CODE (arg) == CONSTRUCTOR)
8781 constructor_elt *elt;
8783 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8784 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8785 return false;
8786 else
8787 elts[i] = elt->value;
8789 else
8790 return false;
8791 for (; i < nelts; i++)
8792 elts[i]
8793 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8794 return true;
8797 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8798 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8799 NULL_TREE otherwise. */
8801 static tree
8802 fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel)
8804 unsigned int i;
8805 bool need_ctor = false;
8807 unsigned int nelts = sel.length ();
8808 gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
8809 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8810 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8811 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8812 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8813 return NULL_TREE;
8815 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8816 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8817 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8818 return NULL_TREE;
8820 tree_vector_builder out_elts (type, nelts, 1);
8821 for (i = 0; i < nelts; i++)
8823 if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
8824 need_ctor = true;
8825 out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
8828 if (need_ctor)
8830 vec<constructor_elt, va_gc> *v;
8831 vec_alloc (v, nelts);
8832 for (i = 0; i < nelts; i++)
8833 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8834 return build_constructor (type, v);
8836 else
8837 return out_elts.build ();
8840 /* Try to fold a pointer difference of type TYPE two address expressions of
8841 array references AREF0 and AREF1 using location LOC. Return a
8842 simplified expression for the difference or NULL_TREE. */
8844 static tree
8845 fold_addr_of_array_ref_difference (location_t loc, tree type,
8846 tree aref0, tree aref1,
8847 bool use_pointer_diff)
8849 tree base0 = TREE_OPERAND (aref0, 0);
8850 tree base1 = TREE_OPERAND (aref1, 0);
8851 tree base_offset = build_int_cst (type, 0);
8853 /* If the bases are array references as well, recurse. If the bases
8854 are pointer indirections compute the difference of the pointers.
8855 If the bases are equal, we are set. */
8856 if ((TREE_CODE (base0) == ARRAY_REF
8857 && TREE_CODE (base1) == ARRAY_REF
8858 && (base_offset
8859 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
8860 use_pointer_diff)))
8861 || (INDIRECT_REF_P (base0)
8862 && INDIRECT_REF_P (base1)
8863 && (base_offset
8864 = use_pointer_diff
8865 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
8866 TREE_OPERAND (base0, 0),
8867 TREE_OPERAND (base1, 0))
8868 : fold_binary_loc (loc, MINUS_EXPR, type,
8869 fold_convert (type,
8870 TREE_OPERAND (base0, 0)),
8871 fold_convert (type,
8872 TREE_OPERAND (base1, 0)))))
8873 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8875 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8876 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8877 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8878 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8879 return fold_build2_loc (loc, PLUS_EXPR, type,
8880 base_offset,
8881 fold_build2_loc (loc, MULT_EXPR, type,
8882 diff, esz));
8884 return NULL_TREE;
8887 /* If the real or vector real constant CST of type TYPE has an exact
8888 inverse, return it, else return NULL. */
8890 tree
8891 exact_inverse (tree type, tree cst)
8893 REAL_VALUE_TYPE r;
8894 tree unit_type;
8895 machine_mode mode;
8897 switch (TREE_CODE (cst))
8899 case REAL_CST:
8900 r = TREE_REAL_CST (cst);
8902 if (exact_real_inverse (TYPE_MODE (type), &r))
8903 return build_real (type, r);
8905 return NULL_TREE;
8907 case VECTOR_CST:
8909 unit_type = TREE_TYPE (type);
8910 mode = TYPE_MODE (unit_type);
8912 tree_vector_builder elts;
8913 if (!elts.new_unary_operation (type, cst, false))
8914 return NULL_TREE;
8915 unsigned int count = elts.encoded_nelts ();
8916 for (unsigned int i = 0; i < count; ++i)
8918 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8919 if (!exact_real_inverse (mode, &r))
8920 return NULL_TREE;
8921 elts.quick_push (build_real (unit_type, r));
8924 return elts.build ();
8927 default:
8928 return NULL_TREE;
8932 /* Mask out the tz least significant bits of X of type TYPE where
8933 tz is the number of trailing zeroes in Y. */
8934 static wide_int
8935 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8937 int tz = wi::ctz (y);
8938 if (tz > 0)
8939 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8940 return x;
8943 /* Return true when T is an address and is known to be nonzero.
8944 For floating point we further ensure that T is not denormal.
8945 Similar logic is present in nonzero_address in rtlanal.h.
8947 If the return value is based on the assumption that signed overflow
8948 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8949 change *STRICT_OVERFLOW_P. */
8951 static bool
8952 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8954 tree type = TREE_TYPE (t);
8955 enum tree_code code;
8957 /* Doing something useful for floating point would need more work. */
8958 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8959 return false;
8961 code = TREE_CODE (t);
8962 switch (TREE_CODE_CLASS (code))
8964 case tcc_unary:
8965 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8966 strict_overflow_p);
8967 case tcc_binary:
8968 case tcc_comparison:
8969 return tree_binary_nonzero_warnv_p (code, type,
8970 TREE_OPERAND (t, 0),
8971 TREE_OPERAND (t, 1),
8972 strict_overflow_p);
8973 case tcc_constant:
8974 case tcc_declaration:
8975 case tcc_reference:
8976 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8978 default:
8979 break;
8982 switch (code)
8984 case TRUTH_NOT_EXPR:
8985 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8986 strict_overflow_p);
8988 case TRUTH_AND_EXPR:
8989 case TRUTH_OR_EXPR:
8990 case TRUTH_XOR_EXPR:
8991 return tree_binary_nonzero_warnv_p (code, type,
8992 TREE_OPERAND (t, 0),
8993 TREE_OPERAND (t, 1),
8994 strict_overflow_p);
8996 case COND_EXPR:
8997 case CONSTRUCTOR:
8998 case OBJ_TYPE_REF:
8999 case ASSERT_EXPR:
9000 case ADDR_EXPR:
9001 case WITH_SIZE_EXPR:
9002 case SSA_NAME:
9003 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9005 case COMPOUND_EXPR:
9006 case MODIFY_EXPR:
9007 case BIND_EXPR:
9008 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9009 strict_overflow_p);
9011 case SAVE_EXPR:
9012 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9013 strict_overflow_p);
9015 case CALL_EXPR:
9017 tree fndecl = get_callee_fndecl (t);
9018 if (!fndecl) return false;
9019 if (flag_delete_null_pointer_checks && !flag_check_new
9020 && DECL_IS_OPERATOR_NEW (fndecl)
9021 && !TREE_NOTHROW (fndecl))
9022 return true;
9023 if (flag_delete_null_pointer_checks
9024 && lookup_attribute ("returns_nonnull",
9025 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9026 return true;
9027 return alloca_call_p (t);
9030 default:
9031 break;
9033 return false;
9036 /* Return true when T is an address and is known to be nonzero.
9037 Handle warnings about undefined signed overflow. */
9039 bool
9040 tree_expr_nonzero_p (tree t)
9042 bool ret, strict_overflow_p;
9044 strict_overflow_p = false;
9045 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9046 if (strict_overflow_p)
9047 fold_overflow_warning (("assuming signed overflow does not occur when "
9048 "determining that expression is always "
9049 "non-zero"),
9050 WARN_STRICT_OVERFLOW_MISC);
9051 return ret;
9054 /* Return true if T is known not to be equal to an integer W. */
9056 bool
9057 expr_not_equal_to (tree t, const wide_int &w)
9059 wide_int min, max, nz;
9060 value_range_type rtype;
9061 switch (TREE_CODE (t))
9063 case INTEGER_CST:
9064 return wi::to_wide (t) != w;
9066 case SSA_NAME:
9067 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9068 return false;
9069 rtype = get_range_info (t, &min, &max);
9070 if (rtype == VR_RANGE)
9072 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9073 return true;
9074 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9075 return true;
9077 else if (rtype == VR_ANTI_RANGE
9078 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9079 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9080 return true;
9081 /* If T has some known zero bits and W has any of those bits set,
9082 then T is known not to be equal to W. */
9083 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9084 TYPE_PRECISION (TREE_TYPE (t))), 0))
9085 return true;
9086 return false;
9088 default:
9089 return false;
9093 /* Fold a binary expression of code CODE and type TYPE with operands
9094 OP0 and OP1. LOC is the location of the resulting expression.
9095 Return the folded expression if folding is successful. Otherwise,
9096 return NULL_TREE. */
9098 tree
9099 fold_binary_loc (location_t loc,
9100 enum tree_code code, tree type, tree op0, tree op1)
9102 enum tree_code_class kind = TREE_CODE_CLASS (code);
9103 tree arg0, arg1, tem;
9104 tree t1 = NULL_TREE;
9105 bool strict_overflow_p;
9106 unsigned int prec;
9108 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9109 && TREE_CODE_LENGTH (code) == 2
9110 && op0 != NULL_TREE
9111 && op1 != NULL_TREE);
9113 arg0 = op0;
9114 arg1 = op1;
9116 /* Strip any conversions that don't change the mode. This is
9117 safe for every expression, except for a comparison expression
9118 because its signedness is derived from its operands. So, in
9119 the latter case, only strip conversions that don't change the
9120 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9121 preserved.
9123 Note that this is done as an internal manipulation within the
9124 constant folder, in order to find the simplest representation
9125 of the arguments so that their form can be studied. In any
9126 cases, the appropriate type conversions should be put back in
9127 the tree that will get out of the constant folder. */
9129 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9131 STRIP_SIGN_NOPS (arg0);
9132 STRIP_SIGN_NOPS (arg1);
9134 else
9136 STRIP_NOPS (arg0);
9137 STRIP_NOPS (arg1);
9140 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9141 constant but we can't do arithmetic on them. */
9142 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9144 tem = const_binop (code, type, arg0, arg1);
9145 if (tem != NULL_TREE)
9147 if (TREE_TYPE (tem) != type)
9148 tem = fold_convert_loc (loc, type, tem);
9149 return tem;
9153 /* If this is a commutative operation, and ARG0 is a constant, move it
9154 to ARG1 to reduce the number of tests below. */
9155 if (commutative_tree_code (code)
9156 && tree_swap_operands_p (arg0, arg1))
9157 return fold_build2_loc (loc, code, type, op1, op0);
9159 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9160 to ARG1 to reduce the number of tests below. */
9161 if (kind == tcc_comparison
9162 && tree_swap_operands_p (arg0, arg1))
9163 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9165 tem = generic_simplify (loc, code, type, op0, op1);
9166 if (tem)
9167 return tem;
9169 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9171 First check for cases where an arithmetic operation is applied to a
9172 compound, conditional, or comparison operation. Push the arithmetic
9173 operation inside the compound or conditional to see if any folding
9174 can then be done. Convert comparison to conditional for this purpose.
9175 The also optimizes non-constant cases that used to be done in
9176 expand_expr.
9178 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9179 one of the operands is a comparison and the other is a comparison, a
9180 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9181 code below would make the expression more complex. Change it to a
9182 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9183 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9185 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9186 || code == EQ_EXPR || code == NE_EXPR)
9187 && TREE_CODE (type) != VECTOR_TYPE
9188 && ((truth_value_p (TREE_CODE (arg0))
9189 && (truth_value_p (TREE_CODE (arg1))
9190 || (TREE_CODE (arg1) == BIT_AND_EXPR
9191 && integer_onep (TREE_OPERAND (arg1, 1)))))
9192 || (truth_value_p (TREE_CODE (arg1))
9193 && (truth_value_p (TREE_CODE (arg0))
9194 || (TREE_CODE (arg0) == BIT_AND_EXPR
9195 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9197 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9198 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9199 : TRUTH_XOR_EXPR,
9200 boolean_type_node,
9201 fold_convert_loc (loc, boolean_type_node, arg0),
9202 fold_convert_loc (loc, boolean_type_node, arg1));
9204 if (code == EQ_EXPR)
9205 tem = invert_truthvalue_loc (loc, tem);
9207 return fold_convert_loc (loc, type, tem);
9210 if (TREE_CODE_CLASS (code) == tcc_binary
9211 || TREE_CODE_CLASS (code) == tcc_comparison)
9213 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9215 tem = fold_build2_loc (loc, code, type,
9216 fold_convert_loc (loc, TREE_TYPE (op0),
9217 TREE_OPERAND (arg0, 1)), op1);
9218 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9219 tem);
9221 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9223 tem = fold_build2_loc (loc, code, type, op0,
9224 fold_convert_loc (loc, TREE_TYPE (op1),
9225 TREE_OPERAND (arg1, 1)));
9226 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9227 tem);
9230 if (TREE_CODE (arg0) == COND_EXPR
9231 || TREE_CODE (arg0) == VEC_COND_EXPR
9232 || COMPARISON_CLASS_P (arg0))
9234 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9235 arg0, arg1,
9236 /*cond_first_p=*/1);
9237 if (tem != NULL_TREE)
9238 return tem;
9241 if (TREE_CODE (arg1) == COND_EXPR
9242 || TREE_CODE (arg1) == VEC_COND_EXPR
9243 || COMPARISON_CLASS_P (arg1))
9245 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9246 arg1, arg0,
9247 /*cond_first_p=*/0);
9248 if (tem != NULL_TREE)
9249 return tem;
9253 switch (code)
9255 case MEM_REF:
9256 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9257 if (TREE_CODE (arg0) == ADDR_EXPR
9258 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9260 tree iref = TREE_OPERAND (arg0, 0);
9261 return fold_build2 (MEM_REF, type,
9262 TREE_OPERAND (iref, 0),
9263 int_const_binop (PLUS_EXPR, arg1,
9264 TREE_OPERAND (iref, 1)));
9267 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9268 if (TREE_CODE (arg0) == ADDR_EXPR
9269 && handled_component_p (TREE_OPERAND (arg0, 0)))
9271 tree base;
9272 HOST_WIDE_INT coffset;
9273 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9274 &coffset);
9275 if (!base)
9276 return NULL_TREE;
9277 return fold_build2 (MEM_REF, type,
9278 build_fold_addr_expr (base),
9279 int_const_binop (PLUS_EXPR, arg1,
9280 size_int (coffset)));
9283 return NULL_TREE;
9285 case POINTER_PLUS_EXPR:
9286 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9287 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9288 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9289 return fold_convert_loc (loc, type,
9290 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9291 fold_convert_loc (loc, sizetype,
9292 arg1),
9293 fold_convert_loc (loc, sizetype,
9294 arg0)));
9296 return NULL_TREE;
9298 case PLUS_EXPR:
9299 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9301 /* X + (X / CST) * -CST is X % CST. */
9302 if (TREE_CODE (arg1) == MULT_EXPR
9303 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9304 && operand_equal_p (arg0,
9305 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9307 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9308 tree cst1 = TREE_OPERAND (arg1, 1);
9309 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9310 cst1, cst0);
9311 if (sum && integer_zerop (sum))
9312 return fold_convert_loc (loc, type,
9313 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9314 TREE_TYPE (arg0), arg0,
9315 cst0));
9319 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9320 one. Make sure the type is not saturating and has the signedness of
9321 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9322 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9323 if ((TREE_CODE (arg0) == MULT_EXPR
9324 || TREE_CODE (arg1) == MULT_EXPR)
9325 && !TYPE_SATURATING (type)
9326 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9327 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9328 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9330 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9331 if (tem)
9332 return tem;
9335 if (! FLOAT_TYPE_P (type))
9337 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9338 (plus (plus (mult) (mult)) (foo)) so that we can
9339 take advantage of the factoring cases below. */
9340 if (ANY_INTEGRAL_TYPE_P (type)
9341 && TYPE_OVERFLOW_WRAPS (type)
9342 && (((TREE_CODE (arg0) == PLUS_EXPR
9343 || TREE_CODE (arg0) == MINUS_EXPR)
9344 && TREE_CODE (arg1) == MULT_EXPR)
9345 || ((TREE_CODE (arg1) == PLUS_EXPR
9346 || TREE_CODE (arg1) == MINUS_EXPR)
9347 && TREE_CODE (arg0) == MULT_EXPR)))
9349 tree parg0, parg1, parg, marg;
9350 enum tree_code pcode;
9352 if (TREE_CODE (arg1) == MULT_EXPR)
9353 parg = arg0, marg = arg1;
9354 else
9355 parg = arg1, marg = arg0;
9356 pcode = TREE_CODE (parg);
9357 parg0 = TREE_OPERAND (parg, 0);
9358 parg1 = TREE_OPERAND (parg, 1);
9359 STRIP_NOPS (parg0);
9360 STRIP_NOPS (parg1);
9362 if (TREE_CODE (parg0) == MULT_EXPR
9363 && TREE_CODE (parg1) != MULT_EXPR)
9364 return fold_build2_loc (loc, pcode, type,
9365 fold_build2_loc (loc, PLUS_EXPR, type,
9366 fold_convert_loc (loc, type,
9367 parg0),
9368 fold_convert_loc (loc, type,
9369 marg)),
9370 fold_convert_loc (loc, type, parg1));
9371 if (TREE_CODE (parg0) != MULT_EXPR
9372 && TREE_CODE (parg1) == MULT_EXPR)
9373 return
9374 fold_build2_loc (loc, PLUS_EXPR, type,
9375 fold_convert_loc (loc, type, parg0),
9376 fold_build2_loc (loc, pcode, type,
9377 fold_convert_loc (loc, type, marg),
9378 fold_convert_loc (loc, type,
9379 parg1)));
9382 else
9384 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9385 to __complex__ ( x, y ). This is not the same for SNaNs or
9386 if signed zeros are involved. */
9387 if (!HONOR_SNANS (element_mode (arg0))
9388 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9389 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9391 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9392 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9393 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9394 bool arg0rz = false, arg0iz = false;
9395 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9396 || (arg0i && (arg0iz = real_zerop (arg0i))))
9398 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9399 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9400 if (arg0rz && arg1i && real_zerop (arg1i))
9402 tree rp = arg1r ? arg1r
9403 : build1 (REALPART_EXPR, rtype, arg1);
9404 tree ip = arg0i ? arg0i
9405 : build1 (IMAGPART_EXPR, rtype, arg0);
9406 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9408 else if (arg0iz && arg1r && real_zerop (arg1r))
9410 tree rp = arg0r ? arg0r
9411 : build1 (REALPART_EXPR, rtype, arg0);
9412 tree ip = arg1i ? arg1i
9413 : build1 (IMAGPART_EXPR, rtype, arg1);
9414 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9419 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9420 We associate floats only if the user has specified
9421 -fassociative-math. */
9422 if (flag_associative_math
9423 && TREE_CODE (arg1) == PLUS_EXPR
9424 && TREE_CODE (arg0) != MULT_EXPR)
9426 tree tree10 = TREE_OPERAND (arg1, 0);
9427 tree tree11 = TREE_OPERAND (arg1, 1);
9428 if (TREE_CODE (tree11) == MULT_EXPR
9429 && TREE_CODE (tree10) == MULT_EXPR)
9431 tree tree0;
9432 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9433 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9436 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9437 We associate floats only if the user has specified
9438 -fassociative-math. */
9439 if (flag_associative_math
9440 && TREE_CODE (arg0) == PLUS_EXPR
9441 && TREE_CODE (arg1) != MULT_EXPR)
9443 tree tree00 = TREE_OPERAND (arg0, 0);
9444 tree tree01 = TREE_OPERAND (arg0, 1);
9445 if (TREE_CODE (tree01) == MULT_EXPR
9446 && TREE_CODE (tree00) == MULT_EXPR)
9448 tree tree0;
9449 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9450 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9455 bit_rotate:
9456 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9457 is a rotate of A by C1 bits. */
9458 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9459 is a rotate of A by B bits.
9460 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9461 though in this case CODE must be | and not + or ^, otherwise
9462 it doesn't return A when B is 0. */
9464 enum tree_code code0, code1;
9465 tree rtype;
9466 code0 = TREE_CODE (arg0);
9467 code1 = TREE_CODE (arg1);
9468 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9469 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9470 && operand_equal_p (TREE_OPERAND (arg0, 0),
9471 TREE_OPERAND (arg1, 0), 0)
9472 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9473 TYPE_UNSIGNED (rtype))
9474 /* Only create rotates in complete modes. Other cases are not
9475 expanded properly. */
9476 && (element_precision (rtype)
9477 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9479 tree tree01, tree11;
9480 tree orig_tree01, orig_tree11;
9481 enum tree_code code01, code11;
9483 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9484 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9485 STRIP_NOPS (tree01);
9486 STRIP_NOPS (tree11);
9487 code01 = TREE_CODE (tree01);
9488 code11 = TREE_CODE (tree11);
9489 if (code11 != MINUS_EXPR
9490 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9492 std::swap (code0, code1);
9493 std::swap (code01, code11);
9494 std::swap (tree01, tree11);
9495 std::swap (orig_tree01, orig_tree11);
9497 if (code01 == INTEGER_CST
9498 && code11 == INTEGER_CST
9499 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9500 == element_precision (rtype)))
9502 tem = build2_loc (loc, LROTATE_EXPR,
9503 rtype, TREE_OPERAND (arg0, 0),
9504 code0 == LSHIFT_EXPR
9505 ? orig_tree01 : orig_tree11);
9506 return fold_convert_loc (loc, type, tem);
9508 else if (code11 == MINUS_EXPR)
9510 tree tree110, tree111;
9511 tree110 = TREE_OPERAND (tree11, 0);
9512 tree111 = TREE_OPERAND (tree11, 1);
9513 STRIP_NOPS (tree110);
9514 STRIP_NOPS (tree111);
9515 if (TREE_CODE (tree110) == INTEGER_CST
9516 && 0 == compare_tree_int (tree110,
9517 element_precision (rtype))
9518 && operand_equal_p (tree01, tree111, 0))
9520 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9521 ? LROTATE_EXPR : RROTATE_EXPR),
9522 rtype, TREE_OPERAND (arg0, 0),
9523 orig_tree01);
9524 return fold_convert_loc (loc, type, tem);
9527 else if (code == BIT_IOR_EXPR
9528 && code11 == BIT_AND_EXPR
9529 && pow2p_hwi (element_precision (rtype)))
9531 tree tree110, tree111;
9532 tree110 = TREE_OPERAND (tree11, 0);
9533 tree111 = TREE_OPERAND (tree11, 1);
9534 STRIP_NOPS (tree110);
9535 STRIP_NOPS (tree111);
9536 if (TREE_CODE (tree110) == NEGATE_EXPR
9537 && TREE_CODE (tree111) == INTEGER_CST
9538 && 0 == compare_tree_int (tree111,
9539 element_precision (rtype) - 1)
9540 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9542 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9543 ? LROTATE_EXPR : RROTATE_EXPR),
9544 rtype, TREE_OPERAND (arg0, 0),
9545 orig_tree01);
9546 return fold_convert_loc (loc, type, tem);
9552 associate:
9553 /* In most languages, can't associate operations on floats through
9554 parentheses. Rather than remember where the parentheses were, we
9555 don't associate floats at all, unless the user has specified
9556 -fassociative-math.
9557 And, we need to make sure type is not saturating. */
9559 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9560 && !TYPE_SATURATING (type))
9562 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9563 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9564 tree atype = type;
9565 bool ok = true;
9567 /* Split both trees into variables, constants, and literals. Then
9568 associate each group together, the constants with literals,
9569 then the result with variables. This increases the chances of
9570 literals being recombined later and of generating relocatable
9571 expressions for the sum of a constant and literal. */
9572 var0 = split_tree (arg0, type, code,
9573 &minus_var0, &con0, &minus_con0,
9574 &lit0, &minus_lit0, 0);
9575 var1 = split_tree (arg1, type, code,
9576 &minus_var1, &con1, &minus_con1,
9577 &lit1, &minus_lit1, code == MINUS_EXPR);
9579 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9580 if (code == MINUS_EXPR)
9581 code = PLUS_EXPR;
9583 /* With undefined overflow prefer doing association in a type
9584 which wraps on overflow, if that is one of the operand types. */
9585 if (POINTER_TYPE_P (type)
9586 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9588 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9589 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9590 atype = TREE_TYPE (arg0);
9591 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9592 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9593 atype = TREE_TYPE (arg1);
9594 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9597 /* With undefined overflow we can only associate constants with one
9598 variable, and constants whose association doesn't overflow. */
9599 if (POINTER_TYPE_P (atype)
9600 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9602 if ((var0 && var1) || (minus_var0 && minus_var1))
9604 /* ??? If split_tree would handle NEGATE_EXPR we could
9605 simply reject these cases and the allowed cases would
9606 be the var0/minus_var1 ones. */
9607 tree tmp0 = var0 ? var0 : minus_var0;
9608 tree tmp1 = var1 ? var1 : minus_var1;
9609 bool one_neg = false;
9611 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9613 tmp0 = TREE_OPERAND (tmp0, 0);
9614 one_neg = !one_neg;
9616 if (CONVERT_EXPR_P (tmp0)
9617 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9618 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9619 <= TYPE_PRECISION (atype)))
9620 tmp0 = TREE_OPERAND (tmp0, 0);
9621 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9623 tmp1 = TREE_OPERAND (tmp1, 0);
9624 one_neg = !one_neg;
9626 if (CONVERT_EXPR_P (tmp1)
9627 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9628 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9629 <= TYPE_PRECISION (atype)))
9630 tmp1 = TREE_OPERAND (tmp1, 0);
9631 /* The only case we can still associate with two variables
9632 is if they cancel out. */
9633 if (!one_neg
9634 || !operand_equal_p (tmp0, tmp1, 0))
9635 ok = false;
9637 else if ((var0 && minus_var1
9638 && ! operand_equal_p (var0, minus_var1, 0))
9639 || (minus_var0 && var1
9640 && ! operand_equal_p (minus_var0, var1, 0)))
9641 ok = false;
9644 /* Only do something if we found more than two objects. Otherwise,
9645 nothing has changed and we risk infinite recursion. */
9646 if (ok
9647 && (2 < ((var0 != 0) + (var1 != 0)
9648 + (minus_var0 != 0) + (minus_var1 != 0)
9649 + (con0 != 0) + (con1 != 0)
9650 + (minus_con0 != 0) + (minus_con1 != 0)
9651 + (lit0 != 0) + (lit1 != 0)
9652 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9654 var0 = associate_trees (loc, var0, var1, code, atype);
9655 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9656 code, atype);
9657 con0 = associate_trees (loc, con0, con1, code, atype);
9658 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9659 code, atype);
9660 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9661 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9662 code, atype);
9664 if (minus_var0 && var0)
9666 var0 = associate_trees (loc, var0, minus_var0,
9667 MINUS_EXPR, atype);
9668 minus_var0 = 0;
9670 if (minus_con0 && con0)
9672 con0 = associate_trees (loc, con0, minus_con0,
9673 MINUS_EXPR, atype);
9674 minus_con0 = 0;
9677 /* Preserve the MINUS_EXPR if the negative part of the literal is
9678 greater than the positive part. Otherwise, the multiplicative
9679 folding code (i.e extract_muldiv) may be fooled in case
9680 unsigned constants are subtracted, like in the following
9681 example: ((X*2 + 4) - 8U)/2. */
9682 if (minus_lit0 && lit0)
9684 if (TREE_CODE (lit0) == INTEGER_CST
9685 && TREE_CODE (minus_lit0) == INTEGER_CST
9686 && tree_int_cst_lt (lit0, minus_lit0)
9687 /* But avoid ending up with only negated parts. */
9688 && (var0 || con0))
9690 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9691 MINUS_EXPR, atype);
9692 lit0 = 0;
9694 else
9696 lit0 = associate_trees (loc, lit0, minus_lit0,
9697 MINUS_EXPR, atype);
9698 minus_lit0 = 0;
9702 /* Don't introduce overflows through reassociation. */
9703 if ((lit0 && TREE_OVERFLOW_P (lit0))
9704 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9705 return NULL_TREE;
9707 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9708 con0 = associate_trees (loc, con0, lit0, code, atype);
9709 lit0 = 0;
9710 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9711 code, atype);
9712 minus_lit0 = 0;
9714 /* Eliminate minus_con0. */
9715 if (minus_con0)
9717 if (con0)
9718 con0 = associate_trees (loc, con0, minus_con0,
9719 MINUS_EXPR, atype);
9720 else if (var0)
9721 var0 = associate_trees (loc, var0, minus_con0,
9722 MINUS_EXPR, atype);
9723 else
9724 gcc_unreachable ();
9725 minus_con0 = 0;
9728 /* Eliminate minus_var0. */
9729 if (minus_var0)
9731 if (con0)
9732 con0 = associate_trees (loc, con0, minus_var0,
9733 MINUS_EXPR, atype);
9734 else
9735 gcc_unreachable ();
9736 minus_var0 = 0;
9739 return
9740 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9741 code, atype));
9745 return NULL_TREE;
9747 case POINTER_DIFF_EXPR:
9748 case MINUS_EXPR:
9749 /* Fold &a[i] - &a[j] to i-j. */
9750 if (TREE_CODE (arg0) == ADDR_EXPR
9751 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9752 && TREE_CODE (arg1) == ADDR_EXPR
9753 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9755 tree tem = fold_addr_of_array_ref_difference (loc, type,
9756 TREE_OPERAND (arg0, 0),
9757 TREE_OPERAND (arg1, 0),
9758 code
9759 == POINTER_DIFF_EXPR);
9760 if (tem)
9761 return tem;
9764 /* Further transformations are not for pointers. */
9765 if (code == POINTER_DIFF_EXPR)
9766 return NULL_TREE;
9768 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9769 if (TREE_CODE (arg0) == NEGATE_EXPR
9770 && negate_expr_p (op1))
9771 return fold_build2_loc (loc, MINUS_EXPR, type,
9772 negate_expr (op1),
9773 fold_convert_loc (loc, type,
9774 TREE_OPERAND (arg0, 0)));
9776 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9777 __complex__ ( x, -y ). This is not the same for SNaNs or if
9778 signed zeros are involved. */
9779 if (!HONOR_SNANS (element_mode (arg0))
9780 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9781 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9783 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9784 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9785 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9786 bool arg0rz = false, arg0iz = false;
9787 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9788 || (arg0i && (arg0iz = real_zerop (arg0i))))
9790 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9791 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9792 if (arg0rz && arg1i && real_zerop (arg1i))
9794 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9795 arg1r ? arg1r
9796 : build1 (REALPART_EXPR, rtype, arg1));
9797 tree ip = arg0i ? arg0i
9798 : build1 (IMAGPART_EXPR, rtype, arg0);
9799 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9801 else if (arg0iz && arg1r && real_zerop (arg1r))
9803 tree rp = arg0r ? arg0r
9804 : build1 (REALPART_EXPR, rtype, arg0);
9805 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9806 arg1i ? arg1i
9807 : build1 (IMAGPART_EXPR, rtype, arg1));
9808 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9813 /* A - B -> A + (-B) if B is easily negatable. */
9814 if (negate_expr_p (op1)
9815 && ! TYPE_OVERFLOW_SANITIZED (type)
9816 && ((FLOAT_TYPE_P (type)
9817 /* Avoid this transformation if B is a positive REAL_CST. */
9818 && (TREE_CODE (op1) != REAL_CST
9819 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9820 || INTEGRAL_TYPE_P (type)))
9821 return fold_build2_loc (loc, PLUS_EXPR, type,
9822 fold_convert_loc (loc, type, arg0),
9823 negate_expr (op1));
9825 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9826 one. Make sure the type is not saturating and has the signedness of
9827 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9828 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9829 if ((TREE_CODE (arg0) == MULT_EXPR
9830 || TREE_CODE (arg1) == MULT_EXPR)
9831 && !TYPE_SATURATING (type)
9832 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9833 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9834 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9836 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9837 if (tem)
9838 return tem;
9841 goto associate;
9843 case MULT_EXPR:
9844 if (! FLOAT_TYPE_P (type))
9846 /* Transform x * -C into -x * C if x is easily negatable. */
9847 if (TREE_CODE (op1) == INTEGER_CST
9848 && tree_int_cst_sgn (op1) == -1
9849 && negate_expr_p (op0)
9850 && negate_expr_p (op1)
9851 && (tem = negate_expr (op1)) != op1
9852 && ! TREE_OVERFLOW (tem))
9853 return fold_build2_loc (loc, MULT_EXPR, type,
9854 fold_convert_loc (loc, type,
9855 negate_expr (op0)), tem);
9857 strict_overflow_p = false;
9858 if (TREE_CODE (arg1) == INTEGER_CST
9859 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9860 &strict_overflow_p)))
9862 if (strict_overflow_p)
9863 fold_overflow_warning (("assuming signed overflow does not "
9864 "occur when simplifying "
9865 "multiplication"),
9866 WARN_STRICT_OVERFLOW_MISC);
9867 return fold_convert_loc (loc, type, tem);
9870 /* Optimize z * conj(z) for integer complex numbers. */
9871 if (TREE_CODE (arg0) == CONJ_EXPR
9872 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9873 return fold_mult_zconjz (loc, type, arg1);
9874 if (TREE_CODE (arg1) == CONJ_EXPR
9875 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9876 return fold_mult_zconjz (loc, type, arg0);
9878 else
9880 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9881 This is not the same for NaNs or if signed zeros are
9882 involved. */
9883 if (!HONOR_NANS (arg0)
9884 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9885 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9886 && TREE_CODE (arg1) == COMPLEX_CST
9887 && real_zerop (TREE_REALPART (arg1)))
9889 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9890 if (real_onep (TREE_IMAGPART (arg1)))
9891 return
9892 fold_build2_loc (loc, COMPLEX_EXPR, type,
9893 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9894 rtype, arg0)),
9895 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9896 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9897 return
9898 fold_build2_loc (loc, COMPLEX_EXPR, type,
9899 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9900 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9901 rtype, arg0)));
9904 /* Optimize z * conj(z) for floating point complex numbers.
9905 Guarded by flag_unsafe_math_optimizations as non-finite
9906 imaginary components don't produce scalar results. */
9907 if (flag_unsafe_math_optimizations
9908 && TREE_CODE (arg0) == CONJ_EXPR
9909 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9910 return fold_mult_zconjz (loc, type, arg1);
9911 if (flag_unsafe_math_optimizations
9912 && TREE_CODE (arg1) == CONJ_EXPR
9913 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9914 return fold_mult_zconjz (loc, type, arg0);
9916 goto associate;
9918 case BIT_IOR_EXPR:
9919 /* Canonicalize (X & C1) | C2. */
9920 if (TREE_CODE (arg0) == BIT_AND_EXPR
9921 && TREE_CODE (arg1) == INTEGER_CST
9922 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9924 int width = TYPE_PRECISION (type), w;
9925 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
9926 wide_int c2 = wi::to_wide (arg1);
9928 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9929 if ((c1 & c2) == c1)
9930 return omit_one_operand_loc (loc, type, arg1,
9931 TREE_OPERAND (arg0, 0));
9933 wide_int msk = wi::mask (width, false,
9934 TYPE_PRECISION (TREE_TYPE (arg1)));
9936 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9937 if (wi::bit_and_not (msk, c1 | c2) == 0)
9939 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9940 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9943 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9944 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9945 mode which allows further optimizations. */
9946 c1 &= msk;
9947 c2 &= msk;
9948 wide_int c3 = wi::bit_and_not (c1, c2);
9949 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9951 wide_int mask = wi::mask (w, false,
9952 TYPE_PRECISION (type));
9953 if (((c1 | c2) & mask) == mask
9954 && wi::bit_and_not (c1, mask) == 0)
9956 c3 = mask;
9957 break;
9961 if (c3 != c1)
9963 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9964 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9965 wide_int_to_tree (type, c3));
9966 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9970 /* See if this can be simplified into a rotate first. If that
9971 is unsuccessful continue in the association code. */
9972 goto bit_rotate;
9974 case BIT_XOR_EXPR:
9975 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9976 if (TREE_CODE (arg0) == BIT_AND_EXPR
9977 && INTEGRAL_TYPE_P (type)
9978 && integer_onep (TREE_OPERAND (arg0, 1))
9979 && integer_onep (arg1))
9980 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9981 build_zero_cst (TREE_TYPE (arg0)));
9983 /* See if this can be simplified into a rotate first. If that
9984 is unsuccessful continue in the association code. */
9985 goto bit_rotate;
9987 case BIT_AND_EXPR:
9988 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9989 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9990 && INTEGRAL_TYPE_P (type)
9991 && integer_onep (TREE_OPERAND (arg0, 1))
9992 && integer_onep (arg1))
9994 tree tem2;
9995 tem = TREE_OPERAND (arg0, 0);
9996 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9997 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9998 tem, tem2);
9999 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10000 build_zero_cst (TREE_TYPE (tem)));
10002 /* Fold ~X & 1 as (X & 1) == 0. */
10003 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10004 && INTEGRAL_TYPE_P (type)
10005 && integer_onep (arg1))
10007 tree tem2;
10008 tem = TREE_OPERAND (arg0, 0);
10009 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10010 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10011 tem, tem2);
10012 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10013 build_zero_cst (TREE_TYPE (tem)));
10015 /* Fold !X & 1 as X == 0. */
10016 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10017 && integer_onep (arg1))
10019 tem = TREE_OPERAND (arg0, 0);
10020 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10021 build_zero_cst (TREE_TYPE (tem)));
10024 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10025 multiple of 1 << CST. */
10026 if (TREE_CODE (arg1) == INTEGER_CST)
10028 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10029 wide_int ncst1 = -cst1;
10030 if ((cst1 & ncst1) == ncst1
10031 && multiple_of_p (type, arg0,
10032 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10033 return fold_convert_loc (loc, type, arg0);
10036 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10037 bits from CST2. */
10038 if (TREE_CODE (arg1) == INTEGER_CST
10039 && TREE_CODE (arg0) == MULT_EXPR
10040 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10042 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10043 wide_int masked
10044 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10046 if (masked == 0)
10047 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10048 arg0, arg1);
10049 else if (masked != warg1)
10051 /* Avoid the transform if arg1 is a mask of some
10052 mode which allows further optimizations. */
10053 int pop = wi::popcount (warg1);
10054 if (!(pop >= BITS_PER_UNIT
10055 && pow2p_hwi (pop)
10056 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10057 return fold_build2_loc (loc, code, type, op0,
10058 wide_int_to_tree (type, masked));
10062 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10063 ((A & N) + B) & M -> (A + B) & M
10064 Similarly if (N & M) == 0,
10065 ((A | N) + B) & M -> (A + B) & M
10066 and for - instead of + (or unary - instead of +)
10067 and/or ^ instead of |.
10068 If B is constant and (B & M) == 0, fold into A & M. */
10069 if (TREE_CODE (arg1) == INTEGER_CST)
10071 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10072 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10073 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10074 && (TREE_CODE (arg0) == PLUS_EXPR
10075 || TREE_CODE (arg0) == MINUS_EXPR
10076 || TREE_CODE (arg0) == NEGATE_EXPR)
10077 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10078 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10080 tree pmop[2];
10081 int which = 0;
10082 wide_int cst0;
10084 /* Now we know that arg0 is (C + D) or (C - D) or
10085 -C and arg1 (M) is == (1LL << cst) - 1.
10086 Store C into PMOP[0] and D into PMOP[1]. */
10087 pmop[0] = TREE_OPERAND (arg0, 0);
10088 pmop[1] = NULL;
10089 if (TREE_CODE (arg0) != NEGATE_EXPR)
10091 pmop[1] = TREE_OPERAND (arg0, 1);
10092 which = 1;
10095 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10096 which = -1;
10098 for (; which >= 0; which--)
10099 switch (TREE_CODE (pmop[which]))
10101 case BIT_AND_EXPR:
10102 case BIT_IOR_EXPR:
10103 case BIT_XOR_EXPR:
10104 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10105 != INTEGER_CST)
10106 break;
10107 cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10108 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10110 if (cst0 != cst1)
10111 break;
10113 else if (cst0 != 0)
10114 break;
10115 /* If C or D is of the form (A & N) where
10116 (N & M) == M, or of the form (A | N) or
10117 (A ^ N) where (N & M) == 0, replace it with A. */
10118 pmop[which] = TREE_OPERAND (pmop[which], 0);
10119 break;
10120 case INTEGER_CST:
10121 /* If C or D is a N where (N & M) == 0, it can be
10122 omitted (assumed 0). */
10123 if ((TREE_CODE (arg0) == PLUS_EXPR
10124 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10125 && (cst1 & wi::to_wide (pmop[which])) == 0)
10126 pmop[which] = NULL;
10127 break;
10128 default:
10129 break;
10132 /* Only build anything new if we optimized one or both arguments
10133 above. */
10134 if (pmop[0] != TREE_OPERAND (arg0, 0)
10135 || (TREE_CODE (arg0) != NEGATE_EXPR
10136 && pmop[1] != TREE_OPERAND (arg0, 1)))
10138 tree utype = TREE_TYPE (arg0);
10139 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10141 /* Perform the operations in a type that has defined
10142 overflow behavior. */
10143 utype = unsigned_type_for (TREE_TYPE (arg0));
10144 if (pmop[0] != NULL)
10145 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10146 if (pmop[1] != NULL)
10147 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10150 if (TREE_CODE (arg0) == NEGATE_EXPR)
10151 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10152 else if (TREE_CODE (arg0) == PLUS_EXPR)
10154 if (pmop[0] != NULL && pmop[1] != NULL)
10155 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10156 pmop[0], pmop[1]);
10157 else if (pmop[0] != NULL)
10158 tem = pmop[0];
10159 else if (pmop[1] != NULL)
10160 tem = pmop[1];
10161 else
10162 return build_int_cst (type, 0);
10164 else if (pmop[0] == NULL)
10165 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10166 else
10167 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10168 pmop[0], pmop[1]);
10169 /* TEM is now the new binary +, - or unary - replacement. */
10170 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10171 fold_convert_loc (loc, utype, arg1));
10172 return fold_convert_loc (loc, type, tem);
10177 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10178 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10179 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10181 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10183 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10184 if (mask == -1)
10185 return
10186 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10189 goto associate;
10191 case RDIV_EXPR:
10192 /* Don't touch a floating-point divide by zero unless the mode
10193 of the constant can represent infinity. */
10194 if (TREE_CODE (arg1) == REAL_CST
10195 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10196 && real_zerop (arg1))
10197 return NULL_TREE;
10199 /* (-A) / (-B) -> A / B */
10200 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10201 return fold_build2_loc (loc, RDIV_EXPR, type,
10202 TREE_OPERAND (arg0, 0),
10203 negate_expr (arg1));
10204 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10205 return fold_build2_loc (loc, RDIV_EXPR, type,
10206 negate_expr (arg0),
10207 TREE_OPERAND (arg1, 0));
10208 return NULL_TREE;
10210 case TRUNC_DIV_EXPR:
10211 /* Fall through */
10213 case FLOOR_DIV_EXPR:
10214 /* Simplify A / (B << N) where A and B are positive and B is
10215 a power of 2, to A >> (N + log2(B)). */
10216 strict_overflow_p = false;
10217 if (TREE_CODE (arg1) == LSHIFT_EXPR
10218 && (TYPE_UNSIGNED (type)
10219 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10221 tree sval = TREE_OPERAND (arg1, 0);
10222 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10224 tree sh_cnt = TREE_OPERAND (arg1, 1);
10225 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10226 wi::exact_log2 (wi::to_wide (sval)));
10228 if (strict_overflow_p)
10229 fold_overflow_warning (("assuming signed overflow does not "
10230 "occur when simplifying A / (B << N)"),
10231 WARN_STRICT_OVERFLOW_MISC);
10233 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10234 sh_cnt, pow2);
10235 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10236 fold_convert_loc (loc, type, arg0), sh_cnt);
10240 /* Fall through */
10242 case ROUND_DIV_EXPR:
10243 case CEIL_DIV_EXPR:
10244 case EXACT_DIV_EXPR:
10245 if (integer_zerop (arg1))
10246 return NULL_TREE;
10248 /* Convert -A / -B to A / B when the type is signed and overflow is
10249 undefined. */
10250 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10251 && TREE_CODE (op0) == NEGATE_EXPR
10252 && negate_expr_p (op1))
10254 if (INTEGRAL_TYPE_P (type))
10255 fold_overflow_warning (("assuming signed overflow does not occur "
10256 "when distributing negation across "
10257 "division"),
10258 WARN_STRICT_OVERFLOW_MISC);
10259 return fold_build2_loc (loc, code, type,
10260 fold_convert_loc (loc, type,
10261 TREE_OPERAND (arg0, 0)),
10262 negate_expr (op1));
10264 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10265 && TREE_CODE (arg1) == NEGATE_EXPR
10266 && negate_expr_p (op0))
10268 if (INTEGRAL_TYPE_P (type))
10269 fold_overflow_warning (("assuming signed overflow does not occur "
10270 "when distributing negation across "
10271 "division"),
10272 WARN_STRICT_OVERFLOW_MISC);
10273 return fold_build2_loc (loc, code, type,
10274 negate_expr (op0),
10275 fold_convert_loc (loc, type,
10276 TREE_OPERAND (arg1, 0)));
10279 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10280 operation, EXACT_DIV_EXPR.
10282 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10283 At one time others generated faster code, it's not clear if they do
10284 after the last round to changes to the DIV code in expmed.c. */
10285 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10286 && multiple_of_p (type, arg0, arg1))
10287 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10288 fold_convert (type, arg0),
10289 fold_convert (type, arg1));
10291 strict_overflow_p = false;
10292 if (TREE_CODE (arg1) == INTEGER_CST
10293 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10294 &strict_overflow_p)))
10296 if (strict_overflow_p)
10297 fold_overflow_warning (("assuming signed overflow does not occur "
10298 "when simplifying division"),
10299 WARN_STRICT_OVERFLOW_MISC);
10300 return fold_convert_loc (loc, type, tem);
10303 return NULL_TREE;
10305 case CEIL_MOD_EXPR:
10306 case FLOOR_MOD_EXPR:
10307 case ROUND_MOD_EXPR:
10308 case TRUNC_MOD_EXPR:
10309 strict_overflow_p = false;
10310 if (TREE_CODE (arg1) == INTEGER_CST
10311 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10312 &strict_overflow_p)))
10314 if (strict_overflow_p)
10315 fold_overflow_warning (("assuming signed overflow does not occur "
10316 "when simplifying modulus"),
10317 WARN_STRICT_OVERFLOW_MISC);
10318 return fold_convert_loc (loc, type, tem);
10321 return NULL_TREE;
10323 case LROTATE_EXPR:
10324 case RROTATE_EXPR:
10325 case RSHIFT_EXPR:
10326 case LSHIFT_EXPR:
10327 /* Since negative shift count is not well-defined,
10328 don't try to compute it in the compiler. */
10329 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10330 return NULL_TREE;
10332 prec = element_precision (type);
10334 /* If we have a rotate of a bit operation with the rotate count and
10335 the second operand of the bit operation both constant,
10336 permute the two operations. */
10337 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10338 && (TREE_CODE (arg0) == BIT_AND_EXPR
10339 || TREE_CODE (arg0) == BIT_IOR_EXPR
10340 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10341 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10343 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10344 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10345 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10346 fold_build2_loc (loc, code, type,
10347 arg00, arg1),
10348 fold_build2_loc (loc, code, type,
10349 arg01, arg1));
10352 /* Two consecutive rotates adding up to the some integer
10353 multiple of the precision of the type can be ignored. */
10354 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10355 && TREE_CODE (arg0) == RROTATE_EXPR
10356 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10357 && wi::umod_trunc (wi::to_wide (arg1)
10358 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10359 prec) == 0)
10360 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10362 return NULL_TREE;
10364 case MIN_EXPR:
10365 case MAX_EXPR:
10366 goto associate;
10368 case TRUTH_ANDIF_EXPR:
10369 /* Note that the operands of this must be ints
10370 and their values must be 0 or 1.
10371 ("true" is a fixed value perhaps depending on the language.) */
10372 /* If first arg is constant zero, return it. */
10373 if (integer_zerop (arg0))
10374 return fold_convert_loc (loc, type, arg0);
10375 /* FALLTHRU */
10376 case TRUTH_AND_EXPR:
10377 /* If either arg is constant true, drop it. */
10378 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10379 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10380 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10381 /* Preserve sequence points. */
10382 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10383 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10384 /* If second arg is constant zero, result is zero, but first arg
10385 must be evaluated. */
10386 if (integer_zerop (arg1))
10387 return omit_one_operand_loc (loc, type, arg1, arg0);
10388 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10389 case will be handled here. */
10390 if (integer_zerop (arg0))
10391 return omit_one_operand_loc (loc, type, arg0, arg1);
10393 /* !X && X is always false. */
10394 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10395 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10396 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10397 /* X && !X is always false. */
10398 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10399 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10400 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10402 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10403 means A >= Y && A != MAX, but in this case we know that
10404 A < X <= MAX. */
10406 if (!TREE_SIDE_EFFECTS (arg0)
10407 && !TREE_SIDE_EFFECTS (arg1))
10409 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10410 if (tem && !operand_equal_p (tem, arg0, 0))
10411 return fold_build2_loc (loc, code, type, tem, arg1);
10413 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10414 if (tem && !operand_equal_p (tem, arg1, 0))
10415 return fold_build2_loc (loc, code, type, arg0, tem);
10418 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10419 != NULL_TREE)
10420 return tem;
10422 return NULL_TREE;
10424 case TRUTH_ORIF_EXPR:
10425 /* Note that the operands of this must be ints
10426 and their values must be 0 or true.
10427 ("true" is a fixed value perhaps depending on the language.) */
10428 /* If first arg is constant true, return it. */
10429 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10430 return fold_convert_loc (loc, type, arg0);
10431 /* FALLTHRU */
10432 case TRUTH_OR_EXPR:
10433 /* If either arg is constant zero, drop it. */
10434 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10435 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10436 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10437 /* Preserve sequence points. */
10438 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10439 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10440 /* If second arg is constant true, result is true, but we must
10441 evaluate first arg. */
10442 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10443 return omit_one_operand_loc (loc, type, arg1, arg0);
10444 /* Likewise for first arg, but note this only occurs here for
10445 TRUTH_OR_EXPR. */
10446 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10447 return omit_one_operand_loc (loc, type, arg0, arg1);
10449 /* !X || X is always true. */
10450 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10451 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10452 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10453 /* X || !X is always true. */
10454 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10455 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10456 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10458 /* (X && !Y) || (!X && Y) is X ^ Y */
10459 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10460 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10462 tree a0, a1, l0, l1, n0, n1;
10464 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10465 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10467 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10468 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10470 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10471 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10473 if ((operand_equal_p (n0, a0, 0)
10474 && operand_equal_p (n1, a1, 0))
10475 || (operand_equal_p (n0, a1, 0)
10476 && operand_equal_p (n1, a0, 0)))
10477 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10480 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10481 != NULL_TREE)
10482 return tem;
10484 return NULL_TREE;
10486 case TRUTH_XOR_EXPR:
10487 /* If the second arg is constant zero, drop it. */
10488 if (integer_zerop (arg1))
10489 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10490 /* If the second arg is constant true, this is a logical inversion. */
10491 if (integer_onep (arg1))
10493 tem = invert_truthvalue_loc (loc, arg0);
10494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10496 /* Identical arguments cancel to zero. */
10497 if (operand_equal_p (arg0, arg1, 0))
10498 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10500 /* !X ^ X is always true. */
10501 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10502 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10503 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10505 /* X ^ !X is always true. */
10506 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10507 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10508 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10510 return NULL_TREE;
10512 case EQ_EXPR:
10513 case NE_EXPR:
10514 STRIP_NOPS (arg0);
10515 STRIP_NOPS (arg1);
10517 tem = fold_comparison (loc, code, type, op0, op1);
10518 if (tem != NULL_TREE)
10519 return tem;
10521 /* bool_var != 1 becomes !bool_var. */
10522 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10523 && code == NE_EXPR)
10524 return fold_convert_loc (loc, type,
10525 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10526 TREE_TYPE (arg0), arg0));
10528 /* bool_var == 0 becomes !bool_var. */
10529 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10530 && code == EQ_EXPR)
10531 return fold_convert_loc (loc, type,
10532 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10533 TREE_TYPE (arg0), arg0));
10535 /* !exp != 0 becomes !exp */
10536 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10537 && code == NE_EXPR)
10538 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10540 /* If this is an EQ or NE comparison with zero and ARG0 is
10541 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10542 two operations, but the latter can be done in one less insn
10543 on machines that have only two-operand insns or on which a
10544 constant cannot be the first operand. */
10545 if (TREE_CODE (arg0) == BIT_AND_EXPR
10546 && integer_zerop (arg1))
10548 tree arg00 = TREE_OPERAND (arg0, 0);
10549 tree arg01 = TREE_OPERAND (arg0, 1);
10550 if (TREE_CODE (arg00) == LSHIFT_EXPR
10551 && integer_onep (TREE_OPERAND (arg00, 0)))
10553 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10554 arg01, TREE_OPERAND (arg00, 1));
10555 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10556 build_int_cst (TREE_TYPE (arg0), 1));
10557 return fold_build2_loc (loc, code, type,
10558 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10559 arg1);
10561 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10562 && integer_onep (TREE_OPERAND (arg01, 0)))
10564 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10565 arg00, TREE_OPERAND (arg01, 1));
10566 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10567 build_int_cst (TREE_TYPE (arg0), 1));
10568 return fold_build2_loc (loc, code, type,
10569 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10570 arg1);
10574 /* If this is an NE or EQ comparison of zero against the result of a
10575 signed MOD operation whose second operand is a power of 2, make
10576 the MOD operation unsigned since it is simpler and equivalent. */
10577 if (integer_zerop (arg1)
10578 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10579 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10580 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10581 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10582 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10583 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10585 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10586 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10587 fold_convert_loc (loc, newtype,
10588 TREE_OPERAND (arg0, 0)),
10589 fold_convert_loc (loc, newtype,
10590 TREE_OPERAND (arg0, 1)));
10592 return fold_build2_loc (loc, code, type, newmod,
10593 fold_convert_loc (loc, newtype, arg1));
10596 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10597 C1 is a valid shift constant, and C2 is a power of two, i.e.
10598 a single bit. */
10599 if (TREE_CODE (arg0) == BIT_AND_EXPR
10600 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10601 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10602 == INTEGER_CST
10603 && integer_pow2p (TREE_OPERAND (arg0, 1))
10604 && integer_zerop (arg1))
10606 tree itype = TREE_TYPE (arg0);
10607 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10608 prec = TYPE_PRECISION (itype);
10610 /* Check for a valid shift count. */
10611 if (wi::ltu_p (wi::to_wide (arg001), prec))
10613 tree arg01 = TREE_OPERAND (arg0, 1);
10614 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10615 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10616 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10617 can be rewritten as (X & (C2 << C1)) != 0. */
10618 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10620 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10621 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10622 return fold_build2_loc (loc, code, type, tem,
10623 fold_convert_loc (loc, itype, arg1));
10625 /* Otherwise, for signed (arithmetic) shifts,
10626 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10627 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10628 else if (!TYPE_UNSIGNED (itype))
10629 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10630 arg000, build_int_cst (itype, 0));
10631 /* Otherwise, of unsigned (logical) shifts,
10632 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10633 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10634 else
10635 return omit_one_operand_loc (loc, type,
10636 code == EQ_EXPR ? integer_one_node
10637 : integer_zero_node,
10638 arg000);
10642 /* If this is a comparison of a field, we may be able to simplify it. */
10643 if ((TREE_CODE (arg0) == COMPONENT_REF
10644 || TREE_CODE (arg0) == BIT_FIELD_REF)
10645 /* Handle the constant case even without -O
10646 to make sure the warnings are given. */
10647 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10649 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10650 if (t1)
10651 return t1;
10654 /* Optimize comparisons of strlen vs zero to a compare of the
10655 first character of the string vs zero. To wit,
10656 strlen(ptr) == 0 => *ptr == 0
10657 strlen(ptr) != 0 => *ptr != 0
10658 Other cases should reduce to one of these two (or a constant)
10659 due to the return value of strlen being unsigned. */
10660 if (TREE_CODE (arg0) == CALL_EXPR
10661 && integer_zerop (arg1))
10663 tree fndecl = get_callee_fndecl (arg0);
10665 if (fndecl
10666 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10667 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10668 && call_expr_nargs (arg0) == 1
10669 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10671 tree iref = build_fold_indirect_ref_loc (loc,
10672 CALL_EXPR_ARG (arg0, 0));
10673 return fold_build2_loc (loc, code, type, iref,
10674 build_int_cst (TREE_TYPE (iref), 0));
10678 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10679 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10680 if (TREE_CODE (arg0) == RSHIFT_EXPR
10681 && integer_zerop (arg1)
10682 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10684 tree arg00 = TREE_OPERAND (arg0, 0);
10685 tree arg01 = TREE_OPERAND (arg0, 1);
10686 tree itype = TREE_TYPE (arg00);
10687 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10689 if (TYPE_UNSIGNED (itype))
10691 itype = signed_type_for (itype);
10692 arg00 = fold_convert_loc (loc, itype, arg00);
10694 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10695 type, arg00, build_zero_cst (itype));
10699 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10700 (X & C) == 0 when C is a single bit. */
10701 if (TREE_CODE (arg0) == BIT_AND_EXPR
10702 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10703 && integer_zerop (arg1)
10704 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10706 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10707 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10708 TREE_OPERAND (arg0, 1));
10709 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10710 type, tem,
10711 fold_convert_loc (loc, TREE_TYPE (arg0),
10712 arg1));
10715 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10716 constant C is a power of two, i.e. a single bit. */
10717 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10718 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10719 && integer_zerop (arg1)
10720 && integer_pow2p (TREE_OPERAND (arg0, 1))
10721 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10722 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10724 tree arg00 = TREE_OPERAND (arg0, 0);
10725 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10726 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10729 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10730 when is C is a power of two, i.e. a single bit. */
10731 if (TREE_CODE (arg0) == BIT_AND_EXPR
10732 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10733 && integer_zerop (arg1)
10734 && integer_pow2p (TREE_OPERAND (arg0, 1))
10735 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10736 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10738 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10739 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10740 arg000, TREE_OPERAND (arg0, 1));
10741 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10742 tem, build_int_cst (TREE_TYPE (tem), 0));
10745 if (integer_zerop (arg1)
10746 && tree_expr_nonzero_p (arg0))
10748 tree res = constant_boolean_node (code==NE_EXPR, type);
10749 return omit_one_operand_loc (loc, type, res, arg0);
10752 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10753 if (TREE_CODE (arg0) == BIT_AND_EXPR
10754 && TREE_CODE (arg1) == BIT_AND_EXPR)
10756 tree arg00 = TREE_OPERAND (arg0, 0);
10757 tree arg01 = TREE_OPERAND (arg0, 1);
10758 tree arg10 = TREE_OPERAND (arg1, 0);
10759 tree arg11 = TREE_OPERAND (arg1, 1);
10760 tree itype = TREE_TYPE (arg0);
10762 if (operand_equal_p (arg01, arg11, 0))
10764 tem = fold_convert_loc (loc, itype, arg10);
10765 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10766 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10767 return fold_build2_loc (loc, code, type, tem,
10768 build_zero_cst (itype));
10770 if (operand_equal_p (arg01, arg10, 0))
10772 tem = fold_convert_loc (loc, itype, arg11);
10773 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10774 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10775 return fold_build2_loc (loc, code, type, tem,
10776 build_zero_cst (itype));
10778 if (operand_equal_p (arg00, arg11, 0))
10780 tem = fold_convert_loc (loc, itype, arg10);
10781 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10782 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10783 return fold_build2_loc (loc, code, type, tem,
10784 build_zero_cst (itype));
10786 if (operand_equal_p (arg00, arg10, 0))
10788 tem = fold_convert_loc (loc, itype, arg11);
10789 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10790 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10791 return fold_build2_loc (loc, code, type, tem,
10792 build_zero_cst (itype));
10796 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10797 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10799 tree arg00 = TREE_OPERAND (arg0, 0);
10800 tree arg01 = TREE_OPERAND (arg0, 1);
10801 tree arg10 = TREE_OPERAND (arg1, 0);
10802 tree arg11 = TREE_OPERAND (arg1, 1);
10803 tree itype = TREE_TYPE (arg0);
10805 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10806 operand_equal_p guarantees no side-effects so we don't need
10807 to use omit_one_operand on Z. */
10808 if (operand_equal_p (arg01, arg11, 0))
10809 return fold_build2_loc (loc, code, type, arg00,
10810 fold_convert_loc (loc, TREE_TYPE (arg00),
10811 arg10));
10812 if (operand_equal_p (arg01, arg10, 0))
10813 return fold_build2_loc (loc, code, type, arg00,
10814 fold_convert_loc (loc, TREE_TYPE (arg00),
10815 arg11));
10816 if (operand_equal_p (arg00, arg11, 0))
10817 return fold_build2_loc (loc, code, type, arg01,
10818 fold_convert_loc (loc, TREE_TYPE (arg01),
10819 arg10));
10820 if (operand_equal_p (arg00, arg10, 0))
10821 return fold_build2_loc (loc, code, type, arg01,
10822 fold_convert_loc (loc, TREE_TYPE (arg01),
10823 arg11));
10825 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10826 if (TREE_CODE (arg01) == INTEGER_CST
10827 && TREE_CODE (arg11) == INTEGER_CST)
10829 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10830 fold_convert_loc (loc, itype, arg11));
10831 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10832 return fold_build2_loc (loc, code, type, tem,
10833 fold_convert_loc (loc, itype, arg10));
10837 /* Attempt to simplify equality/inequality comparisons of complex
10838 values. Only lower the comparison if the result is known or
10839 can be simplified to a single scalar comparison. */
10840 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10841 || TREE_CODE (arg0) == COMPLEX_CST)
10842 && (TREE_CODE (arg1) == COMPLEX_EXPR
10843 || TREE_CODE (arg1) == COMPLEX_CST))
10845 tree real0, imag0, real1, imag1;
10846 tree rcond, icond;
10848 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10850 real0 = TREE_OPERAND (arg0, 0);
10851 imag0 = TREE_OPERAND (arg0, 1);
10853 else
10855 real0 = TREE_REALPART (arg0);
10856 imag0 = TREE_IMAGPART (arg0);
10859 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10861 real1 = TREE_OPERAND (arg1, 0);
10862 imag1 = TREE_OPERAND (arg1, 1);
10864 else
10866 real1 = TREE_REALPART (arg1);
10867 imag1 = TREE_IMAGPART (arg1);
10870 rcond = fold_binary_loc (loc, code, type, real0, real1);
10871 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10873 if (integer_zerop (rcond))
10875 if (code == EQ_EXPR)
10876 return omit_two_operands_loc (loc, type, boolean_false_node,
10877 imag0, imag1);
10878 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10880 else
10882 if (code == NE_EXPR)
10883 return omit_two_operands_loc (loc, type, boolean_true_node,
10884 imag0, imag1);
10885 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10889 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10890 if (icond && TREE_CODE (icond) == INTEGER_CST)
10892 if (integer_zerop (icond))
10894 if (code == EQ_EXPR)
10895 return omit_two_operands_loc (loc, type, boolean_false_node,
10896 real0, real1);
10897 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10899 else
10901 if (code == NE_EXPR)
10902 return omit_two_operands_loc (loc, type, boolean_true_node,
10903 real0, real1);
10904 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10909 return NULL_TREE;
10911 case LT_EXPR:
10912 case GT_EXPR:
10913 case LE_EXPR:
10914 case GE_EXPR:
10915 tem = fold_comparison (loc, code, type, op0, op1);
10916 if (tem != NULL_TREE)
10917 return tem;
10919 /* Transform comparisons of the form X +- C CMP X. */
10920 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10921 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10922 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10923 && !HONOR_SNANS (arg0))
10925 tree arg01 = TREE_OPERAND (arg0, 1);
10926 enum tree_code code0 = TREE_CODE (arg0);
10927 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10929 /* (X - c) > X becomes false. */
10930 if (code == GT_EXPR
10931 && ((code0 == MINUS_EXPR && is_positive >= 0)
10932 || (code0 == PLUS_EXPR && is_positive <= 0)))
10933 return constant_boolean_node (0, type);
10935 /* Likewise (X + c) < X becomes false. */
10936 if (code == LT_EXPR
10937 && ((code0 == PLUS_EXPR && is_positive >= 0)
10938 || (code0 == MINUS_EXPR && is_positive <= 0)))
10939 return constant_boolean_node (0, type);
10941 /* Convert (X - c) <= X to true. */
10942 if (!HONOR_NANS (arg1)
10943 && code == LE_EXPR
10944 && ((code0 == MINUS_EXPR && is_positive >= 0)
10945 || (code0 == PLUS_EXPR && is_positive <= 0)))
10946 return constant_boolean_node (1, type);
10948 /* Convert (X + c) >= X to true. */
10949 if (!HONOR_NANS (arg1)
10950 && code == GE_EXPR
10951 && ((code0 == PLUS_EXPR && is_positive >= 0)
10952 || (code0 == MINUS_EXPR && is_positive <= 0)))
10953 return constant_boolean_node (1, type);
10956 /* If we are comparing an ABS_EXPR with a constant, we can
10957 convert all the cases into explicit comparisons, but they may
10958 well not be faster than doing the ABS and one comparison.
10959 But ABS (X) <= C is a range comparison, which becomes a subtraction
10960 and a comparison, and is probably faster. */
10961 if (code == LE_EXPR
10962 && TREE_CODE (arg1) == INTEGER_CST
10963 && TREE_CODE (arg0) == ABS_EXPR
10964 && ! TREE_SIDE_EFFECTS (arg0)
10965 && (0 != (tem = negate_expr (arg1)))
10966 && TREE_CODE (tem) == INTEGER_CST
10967 && !TREE_OVERFLOW (tem))
10968 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
10969 build2 (GE_EXPR, type,
10970 TREE_OPERAND (arg0, 0), tem),
10971 build2 (LE_EXPR, type,
10972 TREE_OPERAND (arg0, 0), arg1));
10974 /* Convert ABS_EXPR<x> >= 0 to true. */
10975 strict_overflow_p = false;
10976 if (code == GE_EXPR
10977 && (integer_zerop (arg1)
10978 || (! HONOR_NANS (arg0)
10979 && real_zerop (arg1)))
10980 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10982 if (strict_overflow_p)
10983 fold_overflow_warning (("assuming signed overflow does not occur "
10984 "when simplifying comparison of "
10985 "absolute value and zero"),
10986 WARN_STRICT_OVERFLOW_CONDITIONAL);
10987 return omit_one_operand_loc (loc, type,
10988 constant_boolean_node (true, type),
10989 arg0);
10992 /* Convert ABS_EXPR<x> < 0 to false. */
10993 strict_overflow_p = false;
10994 if (code == LT_EXPR
10995 && (integer_zerop (arg1) || real_zerop (arg1))
10996 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10998 if (strict_overflow_p)
10999 fold_overflow_warning (("assuming signed overflow does not occur "
11000 "when simplifying comparison of "
11001 "absolute value and zero"),
11002 WARN_STRICT_OVERFLOW_CONDITIONAL);
11003 return omit_one_operand_loc (loc, type,
11004 constant_boolean_node (false, type),
11005 arg0);
11008 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11009 and similarly for >= into !=. */
11010 if ((code == LT_EXPR || code == GE_EXPR)
11011 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11012 && TREE_CODE (arg1) == LSHIFT_EXPR
11013 && integer_onep (TREE_OPERAND (arg1, 0)))
11014 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11015 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11016 TREE_OPERAND (arg1, 1)),
11017 build_zero_cst (TREE_TYPE (arg0)));
11019 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11020 otherwise Y might be >= # of bits in X's type and thus e.g.
11021 (unsigned char) (1 << Y) for Y 15 might be 0.
11022 If the cast is widening, then 1 << Y should have unsigned type,
11023 otherwise if Y is number of bits in the signed shift type minus 1,
11024 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11025 31 might be 0xffffffff80000000. */
11026 if ((code == LT_EXPR || code == GE_EXPR)
11027 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11028 && CONVERT_EXPR_P (arg1)
11029 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11030 && (element_precision (TREE_TYPE (arg1))
11031 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11032 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11033 || (element_precision (TREE_TYPE (arg1))
11034 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11035 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11037 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11038 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11039 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11040 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11041 build_zero_cst (TREE_TYPE (arg0)));
11044 return NULL_TREE;
11046 case UNORDERED_EXPR:
11047 case ORDERED_EXPR:
11048 case UNLT_EXPR:
11049 case UNLE_EXPR:
11050 case UNGT_EXPR:
11051 case UNGE_EXPR:
11052 case UNEQ_EXPR:
11053 case LTGT_EXPR:
11054 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11056 tree targ0 = strip_float_extensions (arg0);
11057 tree targ1 = strip_float_extensions (arg1);
11058 tree newtype = TREE_TYPE (targ0);
11060 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11061 newtype = TREE_TYPE (targ1);
11063 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11064 return fold_build2_loc (loc, code, type,
11065 fold_convert_loc (loc, newtype, targ0),
11066 fold_convert_loc (loc, newtype, targ1));
11069 return NULL_TREE;
11071 case COMPOUND_EXPR:
11072 /* When pedantic, a compound expression can be neither an lvalue
11073 nor an integer constant expression. */
11074 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11075 return NULL_TREE;
11076 /* Don't let (0, 0) be null pointer constant. */
11077 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11078 : fold_convert_loc (loc, type, arg1);
11079 return pedantic_non_lvalue_loc (loc, tem);
11081 case ASSERT_EXPR:
11082 /* An ASSERT_EXPR should never be passed to fold_binary. */
11083 gcc_unreachable ();
11085 default:
11086 return NULL_TREE;
11087 } /* switch (code) */
11090 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11091 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11092 of GOTO_EXPR. */
11094 static tree
11095 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11097 switch (TREE_CODE (*tp))
11099 case LABEL_EXPR:
11100 return *tp;
11102 case GOTO_EXPR:
11103 *walk_subtrees = 0;
11105 /* fall through */
11107 default:
11108 return NULL_TREE;
11112 /* Return whether the sub-tree ST contains a label which is accessible from
11113 outside the sub-tree. */
11115 static bool
11116 contains_label_p (tree st)
11118 return
11119 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11122 /* Fold a ternary expression of code CODE and type TYPE with operands
11123 OP0, OP1, and OP2. Return the folded expression if folding is
11124 successful. Otherwise, return NULL_TREE. */
11126 tree
11127 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11128 tree op0, tree op1, tree op2)
11130 tree tem;
11131 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11132 enum tree_code_class kind = TREE_CODE_CLASS (code);
11134 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11135 && TREE_CODE_LENGTH (code) == 3);
11137 /* If this is a commutative operation, and OP0 is a constant, move it
11138 to OP1 to reduce the number of tests below. */
11139 if (commutative_ternary_tree_code (code)
11140 && tree_swap_operands_p (op0, op1))
11141 return fold_build3_loc (loc, code, type, op1, op0, op2);
11143 tem = generic_simplify (loc, code, type, op0, op1, op2);
11144 if (tem)
11145 return tem;
11147 /* Strip any conversions that don't change the mode. This is safe
11148 for every expression, except for a comparison expression because
11149 its signedness is derived from its operands. So, in the latter
11150 case, only strip conversions that don't change the signedness.
11152 Note that this is done as an internal manipulation within the
11153 constant folder, in order to find the simplest representation of
11154 the arguments so that their form can be studied. In any cases,
11155 the appropriate type conversions should be put back in the tree
11156 that will get out of the constant folder. */
11157 if (op0)
11159 arg0 = op0;
11160 STRIP_NOPS (arg0);
11163 if (op1)
11165 arg1 = op1;
11166 STRIP_NOPS (arg1);
11169 if (op2)
11171 arg2 = op2;
11172 STRIP_NOPS (arg2);
11175 switch (code)
11177 case COMPONENT_REF:
11178 if (TREE_CODE (arg0) == CONSTRUCTOR
11179 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11181 unsigned HOST_WIDE_INT idx;
11182 tree field, value;
11183 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11184 if (field == arg1)
11185 return value;
11187 return NULL_TREE;
11189 case COND_EXPR:
11190 case VEC_COND_EXPR:
11191 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11192 so all simple results must be passed through pedantic_non_lvalue. */
11193 if (TREE_CODE (arg0) == INTEGER_CST)
11195 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11196 tem = integer_zerop (arg0) ? op2 : op1;
11197 /* Only optimize constant conditions when the selected branch
11198 has the same type as the COND_EXPR. This avoids optimizing
11199 away "c ? x : throw", where the throw has a void type.
11200 Avoid throwing away that operand which contains label. */
11201 if ((!TREE_SIDE_EFFECTS (unused_op)
11202 || !contains_label_p (unused_op))
11203 && (! VOID_TYPE_P (TREE_TYPE (tem))
11204 || VOID_TYPE_P (type)))
11205 return pedantic_non_lvalue_loc (loc, tem);
11206 return NULL_TREE;
11208 else if (TREE_CODE (arg0) == VECTOR_CST)
11210 if ((TREE_CODE (arg1) == VECTOR_CST
11211 || TREE_CODE (arg1) == CONSTRUCTOR)
11212 && (TREE_CODE (arg2) == VECTOR_CST
11213 || TREE_CODE (arg2) == CONSTRUCTOR))
11215 unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
11216 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11217 auto_vec_perm_indices sel (nelts);
11218 for (i = 0; i < nelts; i++)
11220 tree val = VECTOR_CST_ELT (arg0, i);
11221 if (integer_all_onesp (val))
11222 sel.quick_push (i);
11223 else if (integer_zerop (val))
11224 sel.quick_push (nelts + i);
11225 else /* Currently unreachable. */
11226 return NULL_TREE;
11228 tree t = fold_vec_perm (type, arg1, arg2, sel);
11229 if (t != NULL_TREE)
11230 return t;
11234 /* If we have A op B ? A : C, we may be able to convert this to a
11235 simpler expression, depending on the operation and the values
11236 of B and C. Signed zeros prevent all of these transformations,
11237 for reasons given above each one.
11239 Also try swapping the arguments and inverting the conditional. */
11240 if (COMPARISON_CLASS_P (arg0)
11241 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11242 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11244 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11245 if (tem)
11246 return tem;
11249 if (COMPARISON_CLASS_P (arg0)
11250 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11251 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11253 location_t loc0 = expr_location_or (arg0, loc);
11254 tem = fold_invert_truthvalue (loc0, arg0);
11255 if (tem && COMPARISON_CLASS_P (tem))
11257 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11258 if (tem)
11259 return tem;
11263 /* If the second operand is simpler than the third, swap them
11264 since that produces better jump optimization results. */
11265 if (truth_value_p (TREE_CODE (arg0))
11266 && tree_swap_operands_p (op1, op2))
11268 location_t loc0 = expr_location_or (arg0, loc);
11269 /* See if this can be inverted. If it can't, possibly because
11270 it was a floating-point inequality comparison, don't do
11271 anything. */
11272 tem = fold_invert_truthvalue (loc0, arg0);
11273 if (tem)
11274 return fold_build3_loc (loc, code, type, tem, op2, op1);
11277 /* Convert A ? 1 : 0 to simply A. */
11278 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11279 : (integer_onep (op1)
11280 && !VECTOR_TYPE_P (type)))
11281 && integer_zerop (op2)
11282 /* If we try to convert OP0 to our type, the
11283 call to fold will try to move the conversion inside
11284 a COND, which will recurse. In that case, the COND_EXPR
11285 is probably the best choice, so leave it alone. */
11286 && type == TREE_TYPE (arg0))
11287 return pedantic_non_lvalue_loc (loc, arg0);
11289 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11290 over COND_EXPR in cases such as floating point comparisons. */
11291 if (integer_zerop (op1)
11292 && code == COND_EXPR
11293 && integer_onep (op2)
11294 && !VECTOR_TYPE_P (type)
11295 && truth_value_p (TREE_CODE (arg0)))
11296 return pedantic_non_lvalue_loc (loc,
11297 fold_convert_loc (loc, type,
11298 invert_truthvalue_loc (loc,
11299 arg0)));
11301 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11302 if (TREE_CODE (arg0) == LT_EXPR
11303 && integer_zerop (TREE_OPERAND (arg0, 1))
11304 && integer_zerop (op2)
11305 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11307 /* sign_bit_p looks through both zero and sign extensions,
11308 but for this optimization only sign extensions are
11309 usable. */
11310 tree tem2 = TREE_OPERAND (arg0, 0);
11311 while (tem != tem2)
11313 if (TREE_CODE (tem2) != NOP_EXPR
11314 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11316 tem = NULL_TREE;
11317 break;
11319 tem2 = TREE_OPERAND (tem2, 0);
11321 /* sign_bit_p only checks ARG1 bits within A's precision.
11322 If <sign bit of A> has wider type than A, bits outside
11323 of A's precision in <sign bit of A> need to be checked.
11324 If they are all 0, this optimization needs to be done
11325 in unsigned A's type, if they are all 1 in signed A's type,
11326 otherwise this can't be done. */
11327 if (tem
11328 && TYPE_PRECISION (TREE_TYPE (tem))
11329 < TYPE_PRECISION (TREE_TYPE (arg1))
11330 && TYPE_PRECISION (TREE_TYPE (tem))
11331 < TYPE_PRECISION (type))
11333 int inner_width, outer_width;
11334 tree tem_type;
11336 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11337 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11338 if (outer_width > TYPE_PRECISION (type))
11339 outer_width = TYPE_PRECISION (type);
11341 wide_int mask = wi::shifted_mask
11342 (inner_width, outer_width - inner_width, false,
11343 TYPE_PRECISION (TREE_TYPE (arg1)));
11345 wide_int common = mask & wi::to_wide (arg1);
11346 if (common == mask)
11348 tem_type = signed_type_for (TREE_TYPE (tem));
11349 tem = fold_convert_loc (loc, tem_type, tem);
11351 else if (common == 0)
11353 tem_type = unsigned_type_for (TREE_TYPE (tem));
11354 tem = fold_convert_loc (loc, tem_type, tem);
11356 else
11357 tem = NULL;
11360 if (tem)
11361 return
11362 fold_convert_loc (loc, type,
11363 fold_build2_loc (loc, BIT_AND_EXPR,
11364 TREE_TYPE (tem), tem,
11365 fold_convert_loc (loc,
11366 TREE_TYPE (tem),
11367 arg1)));
11370 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11371 already handled above. */
11372 if (TREE_CODE (arg0) == BIT_AND_EXPR
11373 && integer_onep (TREE_OPERAND (arg0, 1))
11374 && integer_zerop (op2)
11375 && integer_pow2p (arg1))
11377 tree tem = TREE_OPERAND (arg0, 0);
11378 STRIP_NOPS (tem);
11379 if (TREE_CODE (tem) == RSHIFT_EXPR
11380 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11381 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11382 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11383 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11384 fold_convert_loc (loc, type,
11385 TREE_OPERAND (tem, 0)),
11386 op1);
11389 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11390 is probably obsolete because the first operand should be a
11391 truth value (that's why we have the two cases above), but let's
11392 leave it in until we can confirm this for all front-ends. */
11393 if (integer_zerop (op2)
11394 && TREE_CODE (arg0) == NE_EXPR
11395 && integer_zerop (TREE_OPERAND (arg0, 1))
11396 && integer_pow2p (arg1)
11397 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11398 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11399 arg1, OEP_ONLY_CONST))
11400 return pedantic_non_lvalue_loc (loc,
11401 fold_convert_loc (loc, type,
11402 TREE_OPERAND (arg0, 0)));
11404 /* Disable the transformations below for vectors, since
11405 fold_binary_op_with_conditional_arg may undo them immediately,
11406 yielding an infinite loop. */
11407 if (code == VEC_COND_EXPR)
11408 return NULL_TREE;
11410 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11411 if (integer_zerop (op2)
11412 && truth_value_p (TREE_CODE (arg0))
11413 && truth_value_p (TREE_CODE (arg1))
11414 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11415 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11416 : TRUTH_ANDIF_EXPR,
11417 type, fold_convert_loc (loc, type, arg0), op1);
11419 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11420 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11421 && truth_value_p (TREE_CODE (arg0))
11422 && truth_value_p (TREE_CODE (arg1))
11423 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11425 location_t loc0 = expr_location_or (arg0, loc);
11426 /* Only perform transformation if ARG0 is easily inverted. */
11427 tem = fold_invert_truthvalue (loc0, arg0);
11428 if (tem)
11429 return fold_build2_loc (loc, code == VEC_COND_EXPR
11430 ? BIT_IOR_EXPR
11431 : TRUTH_ORIF_EXPR,
11432 type, fold_convert_loc (loc, type, tem),
11433 op1);
11436 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11437 if (integer_zerop (arg1)
11438 && truth_value_p (TREE_CODE (arg0))
11439 && truth_value_p (TREE_CODE (op2))
11440 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11442 location_t loc0 = expr_location_or (arg0, loc);
11443 /* Only perform transformation if ARG0 is easily inverted. */
11444 tem = fold_invert_truthvalue (loc0, arg0);
11445 if (tem)
11446 return fold_build2_loc (loc, code == VEC_COND_EXPR
11447 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11448 type, fold_convert_loc (loc, type, tem),
11449 op2);
11452 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11453 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11454 && truth_value_p (TREE_CODE (arg0))
11455 && truth_value_p (TREE_CODE (op2))
11456 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11457 return fold_build2_loc (loc, code == VEC_COND_EXPR
11458 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11459 type, fold_convert_loc (loc, type, arg0), op2);
11461 return NULL_TREE;
11463 case CALL_EXPR:
11464 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11465 of fold_ternary on them. */
11466 gcc_unreachable ();
11468 case BIT_FIELD_REF:
11469 if (TREE_CODE (arg0) == VECTOR_CST
11470 && (type == TREE_TYPE (TREE_TYPE (arg0))
11471 || (TREE_CODE (type) == VECTOR_TYPE
11472 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11474 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11475 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11476 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11477 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11479 if (n != 0
11480 && (idx % width) == 0
11481 && (n % width) == 0
11482 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11484 idx = idx / width;
11485 n = n / width;
11487 if (TREE_CODE (arg0) == VECTOR_CST)
11489 if (n == 1)
11490 return VECTOR_CST_ELT (arg0, idx);
11492 tree_vector_builder vals (type, n, 1);
11493 for (unsigned i = 0; i < n; ++i)
11494 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11495 return vals.build ();
11500 /* On constants we can use native encode/interpret to constant
11501 fold (nearly) all BIT_FIELD_REFs. */
11502 if (CONSTANT_CLASS_P (arg0)
11503 && can_native_interpret_type_p (type)
11504 && BITS_PER_UNIT == 8)
11506 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11507 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11508 /* Limit us to a reasonable amount of work. To relax the
11509 other limitations we need bit-shifting of the buffer
11510 and rounding up the size. */
11511 if (bitpos % BITS_PER_UNIT == 0
11512 && bitsize % BITS_PER_UNIT == 0
11513 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11515 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11516 unsigned HOST_WIDE_INT len
11517 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11518 bitpos / BITS_PER_UNIT);
11519 if (len > 0
11520 && len * BITS_PER_UNIT >= bitsize)
11522 tree v = native_interpret_expr (type, b,
11523 bitsize / BITS_PER_UNIT);
11524 if (v)
11525 return v;
11530 return NULL_TREE;
11532 case FMA_EXPR:
11533 /* For integers we can decompose the FMA if possible. */
11534 if (TREE_CODE (arg0) == INTEGER_CST
11535 && TREE_CODE (arg1) == INTEGER_CST)
11536 return fold_build2_loc (loc, PLUS_EXPR, type,
11537 const_binop (MULT_EXPR, arg0, arg1), arg2);
11538 if (integer_zerop (arg2))
11539 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11541 return fold_fma (loc, type, arg0, arg1, arg2);
11543 case VEC_PERM_EXPR:
11544 if (TREE_CODE (arg2) == VECTOR_CST)
11546 unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
11547 bool need_mask_canon = false;
11548 bool need_mask_canon2 = false;
11549 bool all_in_vec0 = true;
11550 bool all_in_vec1 = true;
11551 bool maybe_identity = true;
11552 bool single_arg = (op0 == op1);
11553 bool changed = false;
11555 mask2 = 2 * nelts - 1;
11556 mask = single_arg ? (nelts - 1) : mask2;
11557 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11558 auto_vec_perm_indices sel (nelts);
11559 auto_vec_perm_indices sel2 (nelts);
11560 for (i = 0; i < nelts; i++)
11562 tree val = VECTOR_CST_ELT (arg2, i);
11563 if (TREE_CODE (val) != INTEGER_CST)
11564 return NULL_TREE;
11566 /* Make sure that the perm value is in an acceptable
11567 range. */
11568 wi::tree_to_wide_ref t = wi::to_wide (val);
11569 need_mask_canon |= wi::gtu_p (t, mask);
11570 need_mask_canon2 |= wi::gtu_p (t, mask2);
11571 unsigned int elt = t.to_uhwi () & mask;
11572 unsigned int elt2 = t.to_uhwi () & mask2;
11574 if (elt < nelts)
11575 all_in_vec1 = false;
11576 else
11577 all_in_vec0 = false;
11579 if ((elt & (nelts - 1)) != i)
11580 maybe_identity = false;
11582 sel.quick_push (elt);
11583 sel2.quick_push (elt2);
11586 if (maybe_identity)
11588 if (all_in_vec0)
11589 return op0;
11590 if (all_in_vec1)
11591 return op1;
11594 if (all_in_vec0)
11595 op1 = op0;
11596 else if (all_in_vec1)
11598 op0 = op1;
11599 for (i = 0; i < nelts; i++)
11600 sel[i] -= nelts;
11601 need_mask_canon = true;
11604 if ((TREE_CODE (op0) == VECTOR_CST
11605 || TREE_CODE (op0) == CONSTRUCTOR)
11606 && (TREE_CODE (op1) == VECTOR_CST
11607 || TREE_CODE (op1) == CONSTRUCTOR))
11609 tree t = fold_vec_perm (type, op0, op1, sel);
11610 if (t != NULL_TREE)
11611 return t;
11614 if (op0 == op1 && !single_arg)
11615 changed = true;
11617 /* Some targets are deficient and fail to expand a single
11618 argument permutation while still allowing an equivalent
11619 2-argument version. */
11620 if (need_mask_canon && arg2 == op2
11621 && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
11622 && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
11624 need_mask_canon = need_mask_canon2;
11625 sel = sel2;
11628 if (need_mask_canon && arg2 == op2)
11630 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11631 tree_vector_builder tsel (TREE_TYPE (arg2), nelts, 1);
11632 for (i = 0; i < nelts; i++)
11633 tsel.quick_push (build_int_cst (eltype, sel[i]));
11634 op2 = tsel.build ();
11635 changed = true;
11638 if (changed)
11639 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11641 return NULL_TREE;
11643 case BIT_INSERT_EXPR:
11644 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11645 if (TREE_CODE (arg0) == INTEGER_CST
11646 && TREE_CODE (arg1) == INTEGER_CST)
11648 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11649 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11650 wide_int tem = (wi::to_wide (arg0)
11651 & wi::shifted_mask (bitpos, bitsize, true,
11652 TYPE_PRECISION (type)));
11653 wide_int tem2
11654 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11655 bitsize), bitpos);
11656 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11658 else if (TREE_CODE (arg0) == VECTOR_CST
11659 && CONSTANT_CLASS_P (arg1)
11660 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11661 TREE_TYPE (arg1)))
11663 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11664 unsigned HOST_WIDE_INT elsize
11665 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11666 if (bitpos % elsize == 0)
11668 unsigned k = bitpos / elsize;
11669 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11670 return arg0;
11671 else
11673 unsigned int nelts = VECTOR_CST_NELTS (arg0);
11674 tree_vector_builder elts (type, nelts, 1);
11675 elts.quick_grow (nelts);
11676 for (unsigned int i = 0; i < nelts; ++i)
11677 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11678 return elts.build ();
11682 return NULL_TREE;
11684 default:
11685 return NULL_TREE;
11686 } /* switch (code) */
11689 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11690 of an array (or vector). */
11692 tree
11693 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11695 tree index_type = NULL_TREE;
11696 offset_int low_bound = 0;
11698 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11700 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11701 if (domain_type && TYPE_MIN_VALUE (domain_type))
11703 /* Static constructors for variably sized objects makes no sense. */
11704 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11705 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11706 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11710 if (index_type)
11711 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11712 TYPE_SIGN (index_type));
11714 offset_int index = low_bound - 1;
11715 if (index_type)
11716 index = wi::ext (index, TYPE_PRECISION (index_type),
11717 TYPE_SIGN (index_type));
11719 offset_int max_index;
11720 unsigned HOST_WIDE_INT cnt;
11721 tree cfield, cval;
11723 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11725 /* Array constructor might explicitly set index, or specify a range,
11726 or leave index NULL meaning that it is next index after previous
11727 one. */
11728 if (cfield)
11730 if (TREE_CODE (cfield) == INTEGER_CST)
11731 max_index = index = wi::to_offset (cfield);
11732 else
11734 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11735 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11736 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11739 else
11741 index += 1;
11742 if (index_type)
11743 index = wi::ext (index, TYPE_PRECISION (index_type),
11744 TYPE_SIGN (index_type));
11745 max_index = index;
11748 /* Do we have match? */
11749 if (wi::cmpu (access_index, index) >= 0
11750 && wi::cmpu (access_index, max_index) <= 0)
11751 return cval;
11753 return NULL_TREE;
11756 /* Perform constant folding and related simplification of EXPR.
11757 The related simplifications include x*1 => x, x*0 => 0, etc.,
11758 and application of the associative law.
11759 NOP_EXPR conversions may be removed freely (as long as we
11760 are careful not to change the type of the overall expression).
11761 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11762 but we can constant-fold them if they have constant operands. */
11764 #ifdef ENABLE_FOLD_CHECKING
11765 # define fold(x) fold_1 (x)
11766 static tree fold_1 (tree);
11767 static
11768 #endif
11769 tree
11770 fold (tree expr)
11772 const tree t = expr;
11773 enum tree_code code = TREE_CODE (t);
11774 enum tree_code_class kind = TREE_CODE_CLASS (code);
11775 tree tem;
11776 location_t loc = EXPR_LOCATION (expr);
11778 /* Return right away if a constant. */
11779 if (kind == tcc_constant)
11780 return t;
11782 /* CALL_EXPR-like objects with variable numbers of operands are
11783 treated specially. */
11784 if (kind == tcc_vl_exp)
11786 if (code == CALL_EXPR)
11788 tem = fold_call_expr (loc, expr, false);
11789 return tem ? tem : expr;
11791 return expr;
11794 if (IS_EXPR_CODE_CLASS (kind))
11796 tree type = TREE_TYPE (t);
11797 tree op0, op1, op2;
11799 switch (TREE_CODE_LENGTH (code))
11801 case 1:
11802 op0 = TREE_OPERAND (t, 0);
11803 tem = fold_unary_loc (loc, code, type, op0);
11804 return tem ? tem : expr;
11805 case 2:
11806 op0 = TREE_OPERAND (t, 0);
11807 op1 = TREE_OPERAND (t, 1);
11808 tem = fold_binary_loc (loc, code, type, op0, op1);
11809 return tem ? tem : expr;
11810 case 3:
11811 op0 = TREE_OPERAND (t, 0);
11812 op1 = TREE_OPERAND (t, 1);
11813 op2 = TREE_OPERAND (t, 2);
11814 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11815 return tem ? tem : expr;
11816 default:
11817 break;
11821 switch (code)
11823 case ARRAY_REF:
11825 tree op0 = TREE_OPERAND (t, 0);
11826 tree op1 = TREE_OPERAND (t, 1);
11828 if (TREE_CODE (op1) == INTEGER_CST
11829 && TREE_CODE (op0) == CONSTRUCTOR
11830 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11832 tree val = get_array_ctor_element_at_index (op0,
11833 wi::to_offset (op1));
11834 if (val)
11835 return val;
11838 return t;
11841 /* Return a VECTOR_CST if possible. */
11842 case CONSTRUCTOR:
11844 tree type = TREE_TYPE (t);
11845 if (TREE_CODE (type) != VECTOR_TYPE)
11846 return t;
11848 unsigned i;
11849 tree val;
11850 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11851 if (! CONSTANT_CLASS_P (val))
11852 return t;
11854 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11857 case CONST_DECL:
11858 return fold (DECL_INITIAL (t));
11860 default:
11861 return t;
11862 } /* switch (code) */
11865 #ifdef ENABLE_FOLD_CHECKING
11866 #undef fold
11868 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11869 hash_table<nofree_ptr_hash<const tree_node> > *);
11870 static void fold_check_failed (const_tree, const_tree);
11871 void print_fold_checksum (const_tree);
11873 /* When --enable-checking=fold, compute a digest of expr before
11874 and after actual fold call to see if fold did not accidentally
11875 change original expr. */
11877 tree
11878 fold (tree expr)
11880 tree ret;
11881 struct md5_ctx ctx;
11882 unsigned char checksum_before[16], checksum_after[16];
11883 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11885 md5_init_ctx (&ctx);
11886 fold_checksum_tree (expr, &ctx, &ht);
11887 md5_finish_ctx (&ctx, checksum_before);
11888 ht.empty ();
11890 ret = fold_1 (expr);
11892 md5_init_ctx (&ctx);
11893 fold_checksum_tree (expr, &ctx, &ht);
11894 md5_finish_ctx (&ctx, checksum_after);
11896 if (memcmp (checksum_before, checksum_after, 16))
11897 fold_check_failed (expr, ret);
11899 return ret;
11902 void
11903 print_fold_checksum (const_tree expr)
11905 struct md5_ctx ctx;
11906 unsigned char checksum[16], cnt;
11907 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11909 md5_init_ctx (&ctx);
11910 fold_checksum_tree (expr, &ctx, &ht);
11911 md5_finish_ctx (&ctx, checksum);
11912 for (cnt = 0; cnt < 16; ++cnt)
11913 fprintf (stderr, "%02x", checksum[cnt]);
11914 putc ('\n', stderr);
11917 static void
11918 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
11920 internal_error ("fold check: original tree changed by fold");
11923 static void
11924 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
11925 hash_table<nofree_ptr_hash <const tree_node> > *ht)
11927 const tree_node **slot;
11928 enum tree_code code;
11929 union tree_node buf;
11930 int i, len;
11932 recursive_label:
11933 if (expr == NULL)
11934 return;
11935 slot = ht->find_slot (expr, INSERT);
11936 if (*slot != NULL)
11937 return;
11938 *slot = expr;
11939 code = TREE_CODE (expr);
11940 if (TREE_CODE_CLASS (code) == tcc_declaration
11941 && HAS_DECL_ASSEMBLER_NAME_P (expr))
11943 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
11944 memcpy ((char *) &buf, expr, tree_size (expr));
11945 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
11946 buf.decl_with_vis.symtab_node = NULL;
11947 expr = (tree) &buf;
11949 else if (TREE_CODE_CLASS (code) == tcc_type
11950 && (TYPE_POINTER_TO (expr)
11951 || TYPE_REFERENCE_TO (expr)
11952 || TYPE_CACHED_VALUES_P (expr)
11953 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
11954 || TYPE_NEXT_VARIANT (expr)
11955 || TYPE_ALIAS_SET_KNOWN_P (expr)))
11957 /* Allow these fields to be modified. */
11958 tree tmp;
11959 memcpy ((char *) &buf, expr, tree_size (expr));
11960 expr = tmp = (tree) &buf;
11961 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
11962 TYPE_POINTER_TO (tmp) = NULL;
11963 TYPE_REFERENCE_TO (tmp) = NULL;
11964 TYPE_NEXT_VARIANT (tmp) = NULL;
11965 TYPE_ALIAS_SET (tmp) = -1;
11966 if (TYPE_CACHED_VALUES_P (tmp))
11968 TYPE_CACHED_VALUES_P (tmp) = 0;
11969 TYPE_CACHED_VALUES (tmp) = NULL;
11972 md5_process_bytes (expr, tree_size (expr), ctx);
11973 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
11974 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11975 if (TREE_CODE_CLASS (code) != tcc_type
11976 && TREE_CODE_CLASS (code) != tcc_declaration
11977 && code != TREE_LIST
11978 && code != SSA_NAME
11979 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
11980 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11981 switch (TREE_CODE_CLASS (code))
11983 case tcc_constant:
11984 switch (code)
11986 case STRING_CST:
11987 md5_process_bytes (TREE_STRING_POINTER (expr),
11988 TREE_STRING_LENGTH (expr), ctx);
11989 break;
11990 case COMPLEX_CST:
11991 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11992 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11993 break;
11994 case VECTOR_CST:
11995 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
11996 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
11997 break;
11998 default:
11999 break;
12001 break;
12002 case tcc_exceptional:
12003 switch (code)
12005 case TREE_LIST:
12006 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12007 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12008 expr = TREE_CHAIN (expr);
12009 goto recursive_label;
12010 break;
12011 case TREE_VEC:
12012 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12013 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12014 break;
12015 default:
12016 break;
12018 break;
12019 case tcc_expression:
12020 case tcc_reference:
12021 case tcc_comparison:
12022 case tcc_unary:
12023 case tcc_binary:
12024 case tcc_statement:
12025 case tcc_vl_exp:
12026 len = TREE_OPERAND_LENGTH (expr);
12027 for (i = 0; i < len; ++i)
12028 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12029 break;
12030 case tcc_declaration:
12031 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12032 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12033 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12035 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12036 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12037 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12038 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12039 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12042 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12044 if (TREE_CODE (expr) == FUNCTION_DECL)
12046 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12047 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12049 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12051 break;
12052 case tcc_type:
12053 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12054 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12055 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12056 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12057 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12058 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12059 if (INTEGRAL_TYPE_P (expr)
12060 || SCALAR_FLOAT_TYPE_P (expr))
12062 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12063 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12065 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12066 if (TREE_CODE (expr) == RECORD_TYPE
12067 || TREE_CODE (expr) == UNION_TYPE
12068 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12069 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12070 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12071 break;
12072 default:
12073 break;
12077 /* Helper function for outputting the checksum of a tree T. When
12078 debugging with gdb, you can "define mynext" to be "next" followed
12079 by "call debug_fold_checksum (op0)", then just trace down till the
12080 outputs differ. */
12082 DEBUG_FUNCTION void
12083 debug_fold_checksum (const_tree t)
12085 int i;
12086 unsigned char checksum[16];
12087 struct md5_ctx ctx;
12088 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12090 md5_init_ctx (&ctx);
12091 fold_checksum_tree (t, &ctx, &ht);
12092 md5_finish_ctx (&ctx, checksum);
12093 ht.empty ();
12095 for (i = 0; i < 16; i++)
12096 fprintf (stderr, "%d ", checksum[i]);
12098 fprintf (stderr, "\n");
12101 #endif
12103 /* Fold a unary tree expression with code CODE of type TYPE with an
12104 operand OP0. LOC is the location of the resulting expression.
12105 Return a folded expression if successful. Otherwise, return a tree
12106 expression with code CODE of type TYPE with an operand OP0. */
12108 tree
12109 fold_build1_loc (location_t loc,
12110 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12112 tree tem;
12113 #ifdef ENABLE_FOLD_CHECKING
12114 unsigned char checksum_before[16], checksum_after[16];
12115 struct md5_ctx ctx;
12116 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12118 md5_init_ctx (&ctx);
12119 fold_checksum_tree (op0, &ctx, &ht);
12120 md5_finish_ctx (&ctx, checksum_before);
12121 ht.empty ();
12122 #endif
12124 tem = fold_unary_loc (loc, code, type, op0);
12125 if (!tem)
12126 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12128 #ifdef ENABLE_FOLD_CHECKING
12129 md5_init_ctx (&ctx);
12130 fold_checksum_tree (op0, &ctx, &ht);
12131 md5_finish_ctx (&ctx, checksum_after);
12133 if (memcmp (checksum_before, checksum_after, 16))
12134 fold_check_failed (op0, tem);
12135 #endif
12136 return tem;
12139 /* Fold a binary tree expression with code CODE of type TYPE with
12140 operands OP0 and OP1. LOC is the location of the resulting
12141 expression. Return a folded expression if successful. Otherwise,
12142 return a tree expression with code CODE of type TYPE with operands
12143 OP0 and OP1. */
12145 tree
12146 fold_build2_loc (location_t loc,
12147 enum tree_code code, tree type, tree op0, tree op1
12148 MEM_STAT_DECL)
12150 tree tem;
12151 #ifdef ENABLE_FOLD_CHECKING
12152 unsigned char checksum_before_op0[16],
12153 checksum_before_op1[16],
12154 checksum_after_op0[16],
12155 checksum_after_op1[16];
12156 struct md5_ctx ctx;
12157 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12159 md5_init_ctx (&ctx);
12160 fold_checksum_tree (op0, &ctx, &ht);
12161 md5_finish_ctx (&ctx, checksum_before_op0);
12162 ht.empty ();
12164 md5_init_ctx (&ctx);
12165 fold_checksum_tree (op1, &ctx, &ht);
12166 md5_finish_ctx (&ctx, checksum_before_op1);
12167 ht.empty ();
12168 #endif
12170 tem = fold_binary_loc (loc, code, type, op0, op1);
12171 if (!tem)
12172 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12174 #ifdef ENABLE_FOLD_CHECKING
12175 md5_init_ctx (&ctx);
12176 fold_checksum_tree (op0, &ctx, &ht);
12177 md5_finish_ctx (&ctx, checksum_after_op0);
12178 ht.empty ();
12180 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12181 fold_check_failed (op0, tem);
12183 md5_init_ctx (&ctx);
12184 fold_checksum_tree (op1, &ctx, &ht);
12185 md5_finish_ctx (&ctx, checksum_after_op1);
12187 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12188 fold_check_failed (op1, tem);
12189 #endif
12190 return tem;
12193 /* Fold a ternary tree expression with code CODE of type TYPE with
12194 operands OP0, OP1, and OP2. Return a folded expression if
12195 successful. Otherwise, return a tree expression with code CODE of
12196 type TYPE with operands OP0, OP1, and OP2. */
12198 tree
12199 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12200 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12202 tree tem;
12203 #ifdef ENABLE_FOLD_CHECKING
12204 unsigned char checksum_before_op0[16],
12205 checksum_before_op1[16],
12206 checksum_before_op2[16],
12207 checksum_after_op0[16],
12208 checksum_after_op1[16],
12209 checksum_after_op2[16];
12210 struct md5_ctx ctx;
12211 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12213 md5_init_ctx (&ctx);
12214 fold_checksum_tree (op0, &ctx, &ht);
12215 md5_finish_ctx (&ctx, checksum_before_op0);
12216 ht.empty ();
12218 md5_init_ctx (&ctx);
12219 fold_checksum_tree (op1, &ctx, &ht);
12220 md5_finish_ctx (&ctx, checksum_before_op1);
12221 ht.empty ();
12223 md5_init_ctx (&ctx);
12224 fold_checksum_tree (op2, &ctx, &ht);
12225 md5_finish_ctx (&ctx, checksum_before_op2);
12226 ht.empty ();
12227 #endif
12229 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12230 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12231 if (!tem)
12232 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12234 #ifdef ENABLE_FOLD_CHECKING
12235 md5_init_ctx (&ctx);
12236 fold_checksum_tree (op0, &ctx, &ht);
12237 md5_finish_ctx (&ctx, checksum_after_op0);
12238 ht.empty ();
12240 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12241 fold_check_failed (op0, tem);
12243 md5_init_ctx (&ctx);
12244 fold_checksum_tree (op1, &ctx, &ht);
12245 md5_finish_ctx (&ctx, checksum_after_op1);
12246 ht.empty ();
12248 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12249 fold_check_failed (op1, tem);
12251 md5_init_ctx (&ctx);
12252 fold_checksum_tree (op2, &ctx, &ht);
12253 md5_finish_ctx (&ctx, checksum_after_op2);
12255 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12256 fold_check_failed (op2, tem);
12257 #endif
12258 return tem;
12261 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12262 arguments in ARGARRAY, and a null static chain.
12263 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12264 of type TYPE from the given operands as constructed by build_call_array. */
12266 tree
12267 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12268 int nargs, tree *argarray)
12270 tree tem;
12271 #ifdef ENABLE_FOLD_CHECKING
12272 unsigned char checksum_before_fn[16],
12273 checksum_before_arglist[16],
12274 checksum_after_fn[16],
12275 checksum_after_arglist[16];
12276 struct md5_ctx ctx;
12277 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12278 int i;
12280 md5_init_ctx (&ctx);
12281 fold_checksum_tree (fn, &ctx, &ht);
12282 md5_finish_ctx (&ctx, checksum_before_fn);
12283 ht.empty ();
12285 md5_init_ctx (&ctx);
12286 for (i = 0; i < nargs; i++)
12287 fold_checksum_tree (argarray[i], &ctx, &ht);
12288 md5_finish_ctx (&ctx, checksum_before_arglist);
12289 ht.empty ();
12290 #endif
12292 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12293 if (!tem)
12294 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12296 #ifdef ENABLE_FOLD_CHECKING
12297 md5_init_ctx (&ctx);
12298 fold_checksum_tree (fn, &ctx, &ht);
12299 md5_finish_ctx (&ctx, checksum_after_fn);
12300 ht.empty ();
12302 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12303 fold_check_failed (fn, tem);
12305 md5_init_ctx (&ctx);
12306 for (i = 0; i < nargs; i++)
12307 fold_checksum_tree (argarray[i], &ctx, &ht);
12308 md5_finish_ctx (&ctx, checksum_after_arglist);
12310 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12311 fold_check_failed (NULL_TREE, tem);
12312 #endif
12313 return tem;
12316 /* Perform constant folding and related simplification of initializer
12317 expression EXPR. These behave identically to "fold_buildN" but ignore
12318 potential run-time traps and exceptions that fold must preserve. */
12320 #define START_FOLD_INIT \
12321 int saved_signaling_nans = flag_signaling_nans;\
12322 int saved_trapping_math = flag_trapping_math;\
12323 int saved_rounding_math = flag_rounding_math;\
12324 int saved_trapv = flag_trapv;\
12325 int saved_folding_initializer = folding_initializer;\
12326 flag_signaling_nans = 0;\
12327 flag_trapping_math = 0;\
12328 flag_rounding_math = 0;\
12329 flag_trapv = 0;\
12330 folding_initializer = 1;
12332 #define END_FOLD_INIT \
12333 flag_signaling_nans = saved_signaling_nans;\
12334 flag_trapping_math = saved_trapping_math;\
12335 flag_rounding_math = saved_rounding_math;\
12336 flag_trapv = saved_trapv;\
12337 folding_initializer = saved_folding_initializer;
12339 tree
12340 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12341 tree type, tree op)
12343 tree result;
12344 START_FOLD_INIT;
12346 result = fold_build1_loc (loc, code, type, op);
12348 END_FOLD_INIT;
12349 return result;
12352 tree
12353 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12354 tree type, tree op0, tree op1)
12356 tree result;
12357 START_FOLD_INIT;
12359 result = fold_build2_loc (loc, code, type, op0, op1);
12361 END_FOLD_INIT;
12362 return result;
12365 tree
12366 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12367 int nargs, tree *argarray)
12369 tree result;
12370 START_FOLD_INIT;
12372 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12374 END_FOLD_INIT;
12375 return result;
12378 #undef START_FOLD_INIT
12379 #undef END_FOLD_INIT
12381 /* Determine if first argument is a multiple of second argument. Return 0 if
12382 it is not, or we cannot easily determined it to be.
12384 An example of the sort of thing we care about (at this point; this routine
12385 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12386 fold cases do now) is discovering that
12388 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12390 is a multiple of
12392 SAVE_EXPR (J * 8)
12394 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12396 This code also handles discovering that
12398 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12400 is a multiple of 8 so we don't have to worry about dealing with a
12401 possible remainder.
12403 Note that we *look* inside a SAVE_EXPR only to determine how it was
12404 calculated; it is not safe for fold to do much of anything else with the
12405 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12406 at run time. For example, the latter example above *cannot* be implemented
12407 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12408 evaluation time of the original SAVE_EXPR is not necessarily the same at
12409 the time the new expression is evaluated. The only optimization of this
12410 sort that would be valid is changing
12412 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12414 divided by 8 to
12416 SAVE_EXPR (I) * SAVE_EXPR (J)
12418 (where the same SAVE_EXPR (J) is used in the original and the
12419 transformed version). */
12422 multiple_of_p (tree type, const_tree top, const_tree bottom)
12424 gimple *stmt;
12425 tree t1, op1, op2;
12427 if (operand_equal_p (top, bottom, 0))
12428 return 1;
12430 if (TREE_CODE (type) != INTEGER_TYPE)
12431 return 0;
12433 switch (TREE_CODE (top))
12435 case BIT_AND_EXPR:
12436 /* Bitwise and provides a power of two multiple. If the mask is
12437 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12438 if (!integer_pow2p (bottom))
12439 return 0;
12440 /* FALLTHRU */
12442 case MULT_EXPR:
12443 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12444 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12446 case MINUS_EXPR:
12447 /* It is impossible to prove if op0 - op1 is multiple of bottom
12448 precisely, so be conservative here checking if both op0 and op1
12449 are multiple of bottom. Note we check the second operand first
12450 since it's usually simpler. */
12451 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12452 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12454 case PLUS_EXPR:
12455 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12456 as op0 - 3 if the expression has unsigned type. For example,
12457 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12458 op1 = TREE_OPERAND (top, 1);
12459 if (TYPE_UNSIGNED (type)
12460 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12461 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12462 return (multiple_of_p (type, op1, bottom)
12463 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12465 case LSHIFT_EXPR:
12466 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12468 op1 = TREE_OPERAND (top, 1);
12469 /* const_binop may not detect overflow correctly,
12470 so check for it explicitly here. */
12471 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12472 wi::to_wide (op1))
12473 && 0 != (t1 = fold_convert (type,
12474 const_binop (LSHIFT_EXPR,
12475 size_one_node,
12476 op1)))
12477 && !TREE_OVERFLOW (t1))
12478 return multiple_of_p (type, t1, bottom);
12480 return 0;
12482 case NOP_EXPR:
12483 /* Can't handle conversions from non-integral or wider integral type. */
12484 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12485 || (TYPE_PRECISION (type)
12486 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12487 return 0;
12489 /* fall through */
12491 case SAVE_EXPR:
12492 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12494 case COND_EXPR:
12495 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12496 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12498 case INTEGER_CST:
12499 if (TREE_CODE (bottom) != INTEGER_CST
12500 || integer_zerop (bottom)
12501 || (TYPE_UNSIGNED (type)
12502 && (tree_int_cst_sgn (top) < 0
12503 || tree_int_cst_sgn (bottom) < 0)))
12504 return 0;
12505 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12506 SIGNED);
12508 case SSA_NAME:
12509 if (TREE_CODE (bottom) == INTEGER_CST
12510 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12511 && gimple_code (stmt) == GIMPLE_ASSIGN)
12513 enum tree_code code = gimple_assign_rhs_code (stmt);
12515 /* Check for special cases to see if top is defined as multiple
12516 of bottom:
12518 top = (X & ~(bottom - 1) ; bottom is power of 2
12522 Y = X % bottom
12523 top = X - Y. */
12524 if (code == BIT_AND_EXPR
12525 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12526 && TREE_CODE (op2) == INTEGER_CST
12527 && integer_pow2p (bottom)
12528 && wi::multiple_of_p (wi::to_widest (op2),
12529 wi::to_widest (bottom), UNSIGNED))
12530 return 1;
12532 op1 = gimple_assign_rhs1 (stmt);
12533 if (code == MINUS_EXPR
12534 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12535 && TREE_CODE (op2) == SSA_NAME
12536 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12537 && gimple_code (stmt) == GIMPLE_ASSIGN
12538 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12539 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12540 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12541 return 1;
12544 /* fall through */
12546 default:
12547 return 0;
12551 #define tree_expr_nonnegative_warnv_p(X, Y) \
12552 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12554 #define RECURSE(X) \
12555 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12557 /* Return true if CODE or TYPE is known to be non-negative. */
12559 static bool
12560 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12562 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12563 && truth_value_p (code))
12564 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12565 have a signed:1 type (where the value is -1 and 0). */
12566 return true;
12567 return false;
12570 /* Return true if (CODE OP0) is known to be non-negative. If the return
12571 value is based on the assumption that signed overflow is undefined,
12572 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12573 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12575 bool
12576 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12577 bool *strict_overflow_p, int depth)
12579 if (TYPE_UNSIGNED (type))
12580 return true;
12582 switch (code)
12584 case ABS_EXPR:
12585 /* We can't return 1 if flag_wrapv is set because
12586 ABS_EXPR<INT_MIN> = INT_MIN. */
12587 if (!ANY_INTEGRAL_TYPE_P (type))
12588 return true;
12589 if (TYPE_OVERFLOW_UNDEFINED (type))
12591 *strict_overflow_p = true;
12592 return true;
12594 break;
12596 case NON_LVALUE_EXPR:
12597 case FLOAT_EXPR:
12598 case FIX_TRUNC_EXPR:
12599 return RECURSE (op0);
12601 CASE_CONVERT:
12603 tree inner_type = TREE_TYPE (op0);
12604 tree outer_type = type;
12606 if (TREE_CODE (outer_type) == REAL_TYPE)
12608 if (TREE_CODE (inner_type) == REAL_TYPE)
12609 return RECURSE (op0);
12610 if (INTEGRAL_TYPE_P (inner_type))
12612 if (TYPE_UNSIGNED (inner_type))
12613 return true;
12614 return RECURSE (op0);
12617 else if (INTEGRAL_TYPE_P (outer_type))
12619 if (TREE_CODE (inner_type) == REAL_TYPE)
12620 return RECURSE (op0);
12621 if (INTEGRAL_TYPE_P (inner_type))
12622 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12623 && TYPE_UNSIGNED (inner_type);
12626 break;
12628 default:
12629 return tree_simple_nonnegative_warnv_p (code, type);
12632 /* We don't know sign of `t', so be conservative and return false. */
12633 return false;
12636 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12637 value is based on the assumption that signed overflow is undefined,
12638 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12639 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12641 bool
12642 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12643 tree op1, bool *strict_overflow_p,
12644 int depth)
12646 if (TYPE_UNSIGNED (type))
12647 return true;
12649 switch (code)
12651 case POINTER_PLUS_EXPR:
12652 case PLUS_EXPR:
12653 if (FLOAT_TYPE_P (type))
12654 return RECURSE (op0) && RECURSE (op1);
12656 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12657 both unsigned and at least 2 bits shorter than the result. */
12658 if (TREE_CODE (type) == INTEGER_TYPE
12659 && TREE_CODE (op0) == NOP_EXPR
12660 && TREE_CODE (op1) == NOP_EXPR)
12662 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12663 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12664 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12665 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12667 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12668 TYPE_PRECISION (inner2)) + 1;
12669 return prec < TYPE_PRECISION (type);
12672 break;
12674 case MULT_EXPR:
12675 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12677 /* x * x is always non-negative for floating point x
12678 or without overflow. */
12679 if (operand_equal_p (op0, op1, 0)
12680 || (RECURSE (op0) && RECURSE (op1)))
12682 if (ANY_INTEGRAL_TYPE_P (type)
12683 && TYPE_OVERFLOW_UNDEFINED (type))
12684 *strict_overflow_p = true;
12685 return true;
12689 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12690 both unsigned and their total bits is shorter than the result. */
12691 if (TREE_CODE (type) == INTEGER_TYPE
12692 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12693 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12695 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12696 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12697 : TREE_TYPE (op0);
12698 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12699 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12700 : TREE_TYPE (op1);
12702 bool unsigned0 = TYPE_UNSIGNED (inner0);
12703 bool unsigned1 = TYPE_UNSIGNED (inner1);
12705 if (TREE_CODE (op0) == INTEGER_CST)
12706 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12708 if (TREE_CODE (op1) == INTEGER_CST)
12709 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12711 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12712 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12714 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12715 ? tree_int_cst_min_precision (op0, UNSIGNED)
12716 : TYPE_PRECISION (inner0);
12718 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12719 ? tree_int_cst_min_precision (op1, UNSIGNED)
12720 : TYPE_PRECISION (inner1);
12722 return precision0 + precision1 < TYPE_PRECISION (type);
12725 return false;
12727 case BIT_AND_EXPR:
12728 case MAX_EXPR:
12729 return RECURSE (op0) || RECURSE (op1);
12731 case BIT_IOR_EXPR:
12732 case BIT_XOR_EXPR:
12733 case MIN_EXPR:
12734 case RDIV_EXPR:
12735 case TRUNC_DIV_EXPR:
12736 case CEIL_DIV_EXPR:
12737 case FLOOR_DIV_EXPR:
12738 case ROUND_DIV_EXPR:
12739 return RECURSE (op0) && RECURSE (op1);
12741 case TRUNC_MOD_EXPR:
12742 return RECURSE (op0);
12744 case FLOOR_MOD_EXPR:
12745 return RECURSE (op1);
12747 case CEIL_MOD_EXPR:
12748 case ROUND_MOD_EXPR:
12749 default:
12750 return tree_simple_nonnegative_warnv_p (code, type);
12753 /* We don't know sign of `t', so be conservative and return false. */
12754 return false;
12757 /* Return true if T is known to be non-negative. If the return
12758 value is based on the assumption that signed overflow is undefined,
12759 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12760 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12762 bool
12763 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12765 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12766 return true;
12768 switch (TREE_CODE (t))
12770 case INTEGER_CST:
12771 return tree_int_cst_sgn (t) >= 0;
12773 case REAL_CST:
12774 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12776 case FIXED_CST:
12777 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12779 case COND_EXPR:
12780 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12782 case SSA_NAME:
12783 /* Limit the depth of recursion to avoid quadratic behavior.
12784 This is expected to catch almost all occurrences in practice.
12785 If this code misses important cases that unbounded recursion
12786 would not, passes that need this information could be revised
12787 to provide it through dataflow propagation. */
12788 return (!name_registered_for_update_p (t)
12789 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12790 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12791 strict_overflow_p, depth));
12793 default:
12794 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12798 /* Return true if T is known to be non-negative. If the return
12799 value is based on the assumption that signed overflow is undefined,
12800 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12801 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12803 bool
12804 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12805 bool *strict_overflow_p, int depth)
12807 switch (fn)
12809 CASE_CFN_ACOS:
12810 CASE_CFN_ACOSH:
12811 CASE_CFN_CABS:
12812 CASE_CFN_COSH:
12813 CASE_CFN_ERFC:
12814 CASE_CFN_EXP:
12815 CASE_CFN_EXP10:
12816 CASE_CFN_EXP2:
12817 CASE_CFN_FABS:
12818 CASE_CFN_FDIM:
12819 CASE_CFN_HYPOT:
12820 CASE_CFN_POW10:
12821 CASE_CFN_FFS:
12822 CASE_CFN_PARITY:
12823 CASE_CFN_POPCOUNT:
12824 CASE_CFN_CLZ:
12825 CASE_CFN_CLRSB:
12826 case CFN_BUILT_IN_BSWAP32:
12827 case CFN_BUILT_IN_BSWAP64:
12828 /* Always true. */
12829 return true;
12831 CASE_CFN_SQRT:
12832 CASE_CFN_SQRT_FN:
12833 /* sqrt(-0.0) is -0.0. */
12834 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12835 return true;
12836 return RECURSE (arg0);
12838 CASE_CFN_ASINH:
12839 CASE_CFN_ATAN:
12840 CASE_CFN_ATANH:
12841 CASE_CFN_CBRT:
12842 CASE_CFN_CEIL:
12843 CASE_CFN_ERF:
12844 CASE_CFN_EXPM1:
12845 CASE_CFN_FLOOR:
12846 CASE_CFN_FMOD:
12847 CASE_CFN_FREXP:
12848 CASE_CFN_ICEIL:
12849 CASE_CFN_IFLOOR:
12850 CASE_CFN_IRINT:
12851 CASE_CFN_IROUND:
12852 CASE_CFN_LCEIL:
12853 CASE_CFN_LDEXP:
12854 CASE_CFN_LFLOOR:
12855 CASE_CFN_LLCEIL:
12856 CASE_CFN_LLFLOOR:
12857 CASE_CFN_LLRINT:
12858 CASE_CFN_LLROUND:
12859 CASE_CFN_LRINT:
12860 CASE_CFN_LROUND:
12861 CASE_CFN_MODF:
12862 CASE_CFN_NEARBYINT:
12863 CASE_CFN_RINT:
12864 CASE_CFN_ROUND:
12865 CASE_CFN_SCALB:
12866 CASE_CFN_SCALBLN:
12867 CASE_CFN_SCALBN:
12868 CASE_CFN_SIGNBIT:
12869 CASE_CFN_SIGNIFICAND:
12870 CASE_CFN_SINH:
12871 CASE_CFN_TANH:
12872 CASE_CFN_TRUNC:
12873 /* True if the 1st argument is nonnegative. */
12874 return RECURSE (arg0);
12876 CASE_CFN_FMAX:
12877 CASE_CFN_FMAX_FN:
12878 /* True if the 1st OR 2nd arguments are nonnegative. */
12879 return RECURSE (arg0) || RECURSE (arg1);
12881 CASE_CFN_FMIN:
12882 CASE_CFN_FMIN_FN:
12883 /* True if the 1st AND 2nd arguments are nonnegative. */
12884 return RECURSE (arg0) && RECURSE (arg1);
12886 CASE_CFN_COPYSIGN:
12887 CASE_CFN_COPYSIGN_FN:
12888 /* True if the 2nd argument is nonnegative. */
12889 return RECURSE (arg1);
12891 CASE_CFN_POWI:
12892 /* True if the 1st argument is nonnegative or the second
12893 argument is an even integer. */
12894 if (TREE_CODE (arg1) == INTEGER_CST
12895 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12896 return true;
12897 return RECURSE (arg0);
12899 CASE_CFN_POW:
12900 /* True if the 1st argument is nonnegative or the second
12901 argument is an even integer valued real. */
12902 if (TREE_CODE (arg1) == REAL_CST)
12904 REAL_VALUE_TYPE c;
12905 HOST_WIDE_INT n;
12907 c = TREE_REAL_CST (arg1);
12908 n = real_to_integer (&c);
12909 if ((n & 1) == 0)
12911 REAL_VALUE_TYPE cint;
12912 real_from_integer (&cint, VOIDmode, n, SIGNED);
12913 if (real_identical (&c, &cint))
12914 return true;
12917 return RECURSE (arg0);
12919 default:
12920 break;
12922 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
12925 /* Return true if T is known to be non-negative. If the return
12926 value is based on the assumption that signed overflow is undefined,
12927 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12928 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12930 static bool
12931 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12933 enum tree_code code = TREE_CODE (t);
12934 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12935 return true;
12937 switch (code)
12939 case TARGET_EXPR:
12941 tree temp = TARGET_EXPR_SLOT (t);
12942 t = TARGET_EXPR_INITIAL (t);
12944 /* If the initializer is non-void, then it's a normal expression
12945 that will be assigned to the slot. */
12946 if (!VOID_TYPE_P (t))
12947 return RECURSE (t);
12949 /* Otherwise, the initializer sets the slot in some way. One common
12950 way is an assignment statement at the end of the initializer. */
12951 while (1)
12953 if (TREE_CODE (t) == BIND_EXPR)
12954 t = expr_last (BIND_EXPR_BODY (t));
12955 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12956 || TREE_CODE (t) == TRY_CATCH_EXPR)
12957 t = expr_last (TREE_OPERAND (t, 0));
12958 else if (TREE_CODE (t) == STATEMENT_LIST)
12959 t = expr_last (t);
12960 else
12961 break;
12963 if (TREE_CODE (t) == MODIFY_EXPR
12964 && TREE_OPERAND (t, 0) == temp)
12965 return RECURSE (TREE_OPERAND (t, 1));
12967 return false;
12970 case CALL_EXPR:
12972 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
12973 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
12975 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
12976 get_call_combined_fn (t),
12977 arg0,
12978 arg1,
12979 strict_overflow_p, depth);
12981 case COMPOUND_EXPR:
12982 case MODIFY_EXPR:
12983 return RECURSE (TREE_OPERAND (t, 1));
12985 case BIND_EXPR:
12986 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
12988 case SAVE_EXPR:
12989 return RECURSE (TREE_OPERAND (t, 0));
12991 default:
12992 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12996 #undef RECURSE
12997 #undef tree_expr_nonnegative_warnv_p
12999 /* Return true if T is known to be non-negative. If the return
13000 value is based on the assumption that signed overflow is undefined,
13001 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13002 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13004 bool
13005 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13007 enum tree_code code;
13008 if (t == error_mark_node)
13009 return false;
13011 code = TREE_CODE (t);
13012 switch (TREE_CODE_CLASS (code))
13014 case tcc_binary:
13015 case tcc_comparison:
13016 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13017 TREE_TYPE (t),
13018 TREE_OPERAND (t, 0),
13019 TREE_OPERAND (t, 1),
13020 strict_overflow_p, depth);
13022 case tcc_unary:
13023 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13024 TREE_TYPE (t),
13025 TREE_OPERAND (t, 0),
13026 strict_overflow_p, depth);
13028 case tcc_constant:
13029 case tcc_declaration:
13030 case tcc_reference:
13031 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13033 default:
13034 break;
13037 switch (code)
13039 case TRUTH_AND_EXPR:
13040 case TRUTH_OR_EXPR:
13041 case TRUTH_XOR_EXPR:
13042 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13043 TREE_TYPE (t),
13044 TREE_OPERAND (t, 0),
13045 TREE_OPERAND (t, 1),
13046 strict_overflow_p, depth);
13047 case TRUTH_NOT_EXPR:
13048 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13049 TREE_TYPE (t),
13050 TREE_OPERAND (t, 0),
13051 strict_overflow_p, depth);
13053 case COND_EXPR:
13054 case CONSTRUCTOR:
13055 case OBJ_TYPE_REF:
13056 case ASSERT_EXPR:
13057 case ADDR_EXPR:
13058 case WITH_SIZE_EXPR:
13059 case SSA_NAME:
13060 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13062 default:
13063 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13067 /* Return true if `t' is known to be non-negative. Handle warnings
13068 about undefined signed overflow. */
13070 bool
13071 tree_expr_nonnegative_p (tree t)
13073 bool ret, strict_overflow_p;
13075 strict_overflow_p = false;
13076 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13077 if (strict_overflow_p)
13078 fold_overflow_warning (("assuming signed overflow does not occur when "
13079 "determining that expression is always "
13080 "non-negative"),
13081 WARN_STRICT_OVERFLOW_MISC);
13082 return ret;
13086 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13087 For floating point we further ensure that T is not denormal.
13088 Similar logic is present in nonzero_address in rtlanal.h.
13090 If the return value is based on the assumption that signed overflow
13091 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13092 change *STRICT_OVERFLOW_P. */
13094 bool
13095 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13096 bool *strict_overflow_p)
13098 switch (code)
13100 case ABS_EXPR:
13101 return tree_expr_nonzero_warnv_p (op0,
13102 strict_overflow_p);
13104 case NOP_EXPR:
13106 tree inner_type = TREE_TYPE (op0);
13107 tree outer_type = type;
13109 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13110 && tree_expr_nonzero_warnv_p (op0,
13111 strict_overflow_p));
13113 break;
13115 case NON_LVALUE_EXPR:
13116 return tree_expr_nonzero_warnv_p (op0,
13117 strict_overflow_p);
13119 default:
13120 break;
13123 return false;
13126 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13127 For floating point we further ensure that T is not denormal.
13128 Similar logic is present in nonzero_address in rtlanal.h.
13130 If the return value is based on the assumption that signed overflow
13131 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13132 change *STRICT_OVERFLOW_P. */
13134 bool
13135 tree_binary_nonzero_warnv_p (enum tree_code code,
13136 tree type,
13137 tree op0,
13138 tree op1, bool *strict_overflow_p)
13140 bool sub_strict_overflow_p;
13141 switch (code)
13143 case POINTER_PLUS_EXPR:
13144 case PLUS_EXPR:
13145 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13147 /* With the presence of negative values it is hard
13148 to say something. */
13149 sub_strict_overflow_p = false;
13150 if (!tree_expr_nonnegative_warnv_p (op0,
13151 &sub_strict_overflow_p)
13152 || !tree_expr_nonnegative_warnv_p (op1,
13153 &sub_strict_overflow_p))
13154 return false;
13155 /* One of operands must be positive and the other non-negative. */
13156 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13157 overflows, on a twos-complement machine the sum of two
13158 nonnegative numbers can never be zero. */
13159 return (tree_expr_nonzero_warnv_p (op0,
13160 strict_overflow_p)
13161 || tree_expr_nonzero_warnv_p (op1,
13162 strict_overflow_p));
13164 break;
13166 case MULT_EXPR:
13167 if (TYPE_OVERFLOW_UNDEFINED (type))
13169 if (tree_expr_nonzero_warnv_p (op0,
13170 strict_overflow_p)
13171 && tree_expr_nonzero_warnv_p (op1,
13172 strict_overflow_p))
13174 *strict_overflow_p = true;
13175 return true;
13178 break;
13180 case MIN_EXPR:
13181 sub_strict_overflow_p = false;
13182 if (tree_expr_nonzero_warnv_p (op0,
13183 &sub_strict_overflow_p)
13184 && tree_expr_nonzero_warnv_p (op1,
13185 &sub_strict_overflow_p))
13187 if (sub_strict_overflow_p)
13188 *strict_overflow_p = true;
13190 break;
13192 case MAX_EXPR:
13193 sub_strict_overflow_p = false;
13194 if (tree_expr_nonzero_warnv_p (op0,
13195 &sub_strict_overflow_p))
13197 if (sub_strict_overflow_p)
13198 *strict_overflow_p = true;
13200 /* When both operands are nonzero, then MAX must be too. */
13201 if (tree_expr_nonzero_warnv_p (op1,
13202 strict_overflow_p))
13203 return true;
13205 /* MAX where operand 0 is positive is positive. */
13206 return tree_expr_nonnegative_warnv_p (op0,
13207 strict_overflow_p);
13209 /* MAX where operand 1 is positive is positive. */
13210 else if (tree_expr_nonzero_warnv_p (op1,
13211 &sub_strict_overflow_p)
13212 && tree_expr_nonnegative_warnv_p (op1,
13213 &sub_strict_overflow_p))
13215 if (sub_strict_overflow_p)
13216 *strict_overflow_p = true;
13217 return true;
13219 break;
13221 case BIT_IOR_EXPR:
13222 return (tree_expr_nonzero_warnv_p (op1,
13223 strict_overflow_p)
13224 || tree_expr_nonzero_warnv_p (op0,
13225 strict_overflow_p));
13227 default:
13228 break;
13231 return false;
13234 /* Return true when T is an address and is known to be nonzero.
13235 For floating point we further ensure that T is not denormal.
13236 Similar logic is present in nonzero_address in rtlanal.h.
13238 If the return value is based on the assumption that signed overflow
13239 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13240 change *STRICT_OVERFLOW_P. */
13242 bool
13243 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13245 bool sub_strict_overflow_p;
13246 switch (TREE_CODE (t))
13248 case INTEGER_CST:
13249 return !integer_zerop (t);
13251 case ADDR_EXPR:
13253 tree base = TREE_OPERAND (t, 0);
13255 if (!DECL_P (base))
13256 base = get_base_address (base);
13258 if (base && TREE_CODE (base) == TARGET_EXPR)
13259 base = TARGET_EXPR_SLOT (base);
13261 if (!base)
13262 return false;
13264 /* For objects in symbol table check if we know they are non-zero.
13265 Don't do anything for variables and functions before symtab is built;
13266 it is quite possible that they will be declared weak later. */
13267 int nonzero_addr = maybe_nonzero_address (base);
13268 if (nonzero_addr >= 0)
13269 return nonzero_addr;
13271 /* Constants are never weak. */
13272 if (CONSTANT_CLASS_P (base))
13273 return true;
13275 return false;
13278 case COND_EXPR:
13279 sub_strict_overflow_p = false;
13280 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13281 &sub_strict_overflow_p)
13282 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13283 &sub_strict_overflow_p))
13285 if (sub_strict_overflow_p)
13286 *strict_overflow_p = true;
13287 return true;
13289 break;
13291 case SSA_NAME:
13292 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13293 break;
13294 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13296 default:
13297 break;
13299 return false;
13302 #define integer_valued_real_p(X) \
13303 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13305 #define RECURSE(X) \
13306 ((integer_valued_real_p) (X, depth + 1))
13308 /* Return true if the floating point result of (CODE OP0) has an
13309 integer value. We also allow +Inf, -Inf and NaN to be considered
13310 integer values. Return false for signaling NaN.
13312 DEPTH is the current nesting depth of the query. */
13314 bool
13315 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13317 switch (code)
13319 case FLOAT_EXPR:
13320 return true;
13322 case ABS_EXPR:
13323 return RECURSE (op0);
13325 CASE_CONVERT:
13327 tree type = TREE_TYPE (op0);
13328 if (TREE_CODE (type) == INTEGER_TYPE)
13329 return true;
13330 if (TREE_CODE (type) == REAL_TYPE)
13331 return RECURSE (op0);
13332 break;
13335 default:
13336 break;
13338 return false;
13341 /* Return true if the floating point result of (CODE OP0 OP1) has an
13342 integer value. We also allow +Inf, -Inf and NaN to be considered
13343 integer values. Return false for signaling NaN.
13345 DEPTH is the current nesting depth of the query. */
13347 bool
13348 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13350 switch (code)
13352 case PLUS_EXPR:
13353 case MINUS_EXPR:
13354 case MULT_EXPR:
13355 case MIN_EXPR:
13356 case MAX_EXPR:
13357 return RECURSE (op0) && RECURSE (op1);
13359 default:
13360 break;
13362 return false;
13365 /* Return true if the floating point result of calling FNDECL with arguments
13366 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13367 considered integer values. Return false for signaling NaN. If FNDECL
13368 takes fewer than 2 arguments, the remaining ARGn are null.
13370 DEPTH is the current nesting depth of the query. */
13372 bool
13373 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13375 switch (fn)
13377 CASE_CFN_CEIL:
13378 CASE_CFN_FLOOR:
13379 CASE_CFN_NEARBYINT:
13380 CASE_CFN_RINT:
13381 CASE_CFN_ROUND:
13382 CASE_CFN_TRUNC:
13383 return true;
13385 CASE_CFN_FMIN:
13386 CASE_CFN_FMIN_FN:
13387 CASE_CFN_FMAX:
13388 CASE_CFN_FMAX_FN:
13389 return RECURSE (arg0) && RECURSE (arg1);
13391 default:
13392 break;
13394 return false;
13397 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13398 has an integer value. We also allow +Inf, -Inf and NaN to be
13399 considered integer values. Return false for signaling NaN.
13401 DEPTH is the current nesting depth of the query. */
13403 bool
13404 integer_valued_real_single_p (tree t, int depth)
13406 switch (TREE_CODE (t))
13408 case REAL_CST:
13409 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13411 case COND_EXPR:
13412 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13414 case SSA_NAME:
13415 /* Limit the depth of recursion to avoid quadratic behavior.
13416 This is expected to catch almost all occurrences in practice.
13417 If this code misses important cases that unbounded recursion
13418 would not, passes that need this information could be revised
13419 to provide it through dataflow propagation. */
13420 return (!name_registered_for_update_p (t)
13421 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13422 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13423 depth));
13425 default:
13426 break;
13428 return false;
13431 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13432 has an integer value. We also allow +Inf, -Inf and NaN to be
13433 considered integer values. Return false for signaling NaN.
13435 DEPTH is the current nesting depth of the query. */
13437 static bool
13438 integer_valued_real_invalid_p (tree t, int depth)
13440 switch (TREE_CODE (t))
13442 case COMPOUND_EXPR:
13443 case MODIFY_EXPR:
13444 case BIND_EXPR:
13445 return RECURSE (TREE_OPERAND (t, 1));
13447 case SAVE_EXPR:
13448 return RECURSE (TREE_OPERAND (t, 0));
13450 default:
13451 break;
13453 return false;
13456 #undef RECURSE
13457 #undef integer_valued_real_p
13459 /* Return true if the floating point expression T has an integer value.
13460 We also allow +Inf, -Inf and NaN to be considered integer values.
13461 Return false for signaling NaN.
13463 DEPTH is the current nesting depth of the query. */
13465 bool
13466 integer_valued_real_p (tree t, int depth)
13468 if (t == error_mark_node)
13469 return false;
13471 tree_code code = TREE_CODE (t);
13472 switch (TREE_CODE_CLASS (code))
13474 case tcc_binary:
13475 case tcc_comparison:
13476 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13477 TREE_OPERAND (t, 1), depth);
13479 case tcc_unary:
13480 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13482 case tcc_constant:
13483 case tcc_declaration:
13484 case tcc_reference:
13485 return integer_valued_real_single_p (t, depth);
13487 default:
13488 break;
13491 switch (code)
13493 case COND_EXPR:
13494 case SSA_NAME:
13495 return integer_valued_real_single_p (t, depth);
13497 case CALL_EXPR:
13499 tree arg0 = (call_expr_nargs (t) > 0
13500 ? CALL_EXPR_ARG (t, 0)
13501 : NULL_TREE);
13502 tree arg1 = (call_expr_nargs (t) > 1
13503 ? CALL_EXPR_ARG (t, 1)
13504 : NULL_TREE);
13505 return integer_valued_real_call_p (get_call_combined_fn (t),
13506 arg0, arg1, depth);
13509 default:
13510 return integer_valued_real_invalid_p (t, depth);
13514 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13515 attempt to fold the expression to a constant without modifying TYPE,
13516 OP0 or OP1.
13518 If the expression could be simplified to a constant, then return
13519 the constant. If the expression would not be simplified to a
13520 constant, then return NULL_TREE. */
13522 tree
13523 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13525 tree tem = fold_binary (code, type, op0, op1);
13526 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13529 /* Given the components of a unary expression CODE, TYPE and OP0,
13530 attempt to fold the expression to a constant without modifying
13531 TYPE or OP0.
13533 If the expression could be simplified to a constant, then return
13534 the constant. If the expression would not be simplified to a
13535 constant, then return NULL_TREE. */
13537 tree
13538 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13540 tree tem = fold_unary (code, type, op0);
13541 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13544 /* If EXP represents referencing an element in a constant string
13545 (either via pointer arithmetic or array indexing), return the
13546 tree representing the value accessed, otherwise return NULL. */
13548 tree
13549 fold_read_from_constant_string (tree exp)
13551 if ((TREE_CODE (exp) == INDIRECT_REF
13552 || TREE_CODE (exp) == ARRAY_REF)
13553 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13555 tree exp1 = TREE_OPERAND (exp, 0);
13556 tree index;
13557 tree string;
13558 location_t loc = EXPR_LOCATION (exp);
13560 if (TREE_CODE (exp) == INDIRECT_REF)
13561 string = string_constant (exp1, &index);
13562 else
13564 tree low_bound = array_ref_low_bound (exp);
13565 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13567 /* Optimize the special-case of a zero lower bound.
13569 We convert the low_bound to sizetype to avoid some problems
13570 with constant folding. (E.g. suppose the lower bound is 1,
13571 and its mode is QI. Without the conversion,l (ARRAY
13572 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13573 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13574 if (! integer_zerop (low_bound))
13575 index = size_diffop_loc (loc, index,
13576 fold_convert_loc (loc, sizetype, low_bound));
13578 string = exp1;
13581 scalar_int_mode char_mode;
13582 if (string
13583 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13584 && TREE_CODE (string) == STRING_CST
13585 && TREE_CODE (index) == INTEGER_CST
13586 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13587 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13588 &char_mode)
13589 && GET_MODE_SIZE (char_mode) == 1)
13590 return build_int_cst_type (TREE_TYPE (exp),
13591 (TREE_STRING_POINTER (string)
13592 [TREE_INT_CST_LOW (index)]));
13594 return NULL;
13597 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13598 an integer constant, real, or fixed-point constant.
13600 TYPE is the type of the result. */
13602 static tree
13603 fold_negate_const (tree arg0, tree type)
13605 tree t = NULL_TREE;
13607 switch (TREE_CODE (arg0))
13609 case INTEGER_CST:
13611 bool overflow;
13612 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13613 t = force_fit_type (type, val, 1,
13614 (overflow && ! TYPE_UNSIGNED (type))
13615 || TREE_OVERFLOW (arg0));
13616 break;
13619 case REAL_CST:
13620 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13621 break;
13623 case FIXED_CST:
13625 FIXED_VALUE_TYPE f;
13626 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13627 &(TREE_FIXED_CST (arg0)), NULL,
13628 TYPE_SATURATING (type));
13629 t = build_fixed (type, f);
13630 /* Propagate overflow flags. */
13631 if (overflow_p | TREE_OVERFLOW (arg0))
13632 TREE_OVERFLOW (t) = 1;
13633 break;
13636 default:
13637 gcc_unreachable ();
13640 return t;
13643 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13644 an integer constant or real constant.
13646 TYPE is the type of the result. */
13648 tree
13649 fold_abs_const (tree arg0, tree type)
13651 tree t = NULL_TREE;
13653 switch (TREE_CODE (arg0))
13655 case INTEGER_CST:
13657 /* If the value is unsigned or non-negative, then the absolute value
13658 is the same as the ordinary value. */
13659 if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13660 t = arg0;
13662 /* If the value is negative, then the absolute value is
13663 its negation. */
13664 else
13666 bool overflow;
13667 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13668 t = force_fit_type (type, val, -1,
13669 overflow | TREE_OVERFLOW (arg0));
13672 break;
13674 case REAL_CST:
13675 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13676 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13677 else
13678 t = arg0;
13679 break;
13681 default:
13682 gcc_unreachable ();
13685 return t;
13688 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13689 constant. TYPE is the type of the result. */
13691 static tree
13692 fold_not_const (const_tree arg0, tree type)
13694 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13696 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13699 /* Given CODE, a relational operator, the target type, TYPE and two
13700 constant operands OP0 and OP1, return the result of the
13701 relational operation. If the result is not a compile time
13702 constant, then return NULL_TREE. */
13704 static tree
13705 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13707 int result, invert;
13709 /* From here on, the only cases we handle are when the result is
13710 known to be a constant. */
13712 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13714 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13715 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13717 /* Handle the cases where either operand is a NaN. */
13718 if (real_isnan (c0) || real_isnan (c1))
13720 switch (code)
13722 case EQ_EXPR:
13723 case ORDERED_EXPR:
13724 result = 0;
13725 break;
13727 case NE_EXPR:
13728 case UNORDERED_EXPR:
13729 case UNLT_EXPR:
13730 case UNLE_EXPR:
13731 case UNGT_EXPR:
13732 case UNGE_EXPR:
13733 case UNEQ_EXPR:
13734 result = 1;
13735 break;
13737 case LT_EXPR:
13738 case LE_EXPR:
13739 case GT_EXPR:
13740 case GE_EXPR:
13741 case LTGT_EXPR:
13742 if (flag_trapping_math)
13743 return NULL_TREE;
13744 result = 0;
13745 break;
13747 default:
13748 gcc_unreachable ();
13751 return constant_boolean_node (result, type);
13754 return constant_boolean_node (real_compare (code, c0, c1), type);
13757 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13759 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13760 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13761 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13764 /* Handle equality/inequality of complex constants. */
13765 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13767 tree rcond = fold_relational_const (code, type,
13768 TREE_REALPART (op0),
13769 TREE_REALPART (op1));
13770 tree icond = fold_relational_const (code, type,
13771 TREE_IMAGPART (op0),
13772 TREE_IMAGPART (op1));
13773 if (code == EQ_EXPR)
13774 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13775 else if (code == NE_EXPR)
13776 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13777 else
13778 return NULL_TREE;
13781 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13783 if (!VECTOR_TYPE_P (type))
13785 /* Have vector comparison with scalar boolean result. */
13786 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13787 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13788 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13790 tree elem0 = VECTOR_CST_ELT (op0, i);
13791 tree elem1 = VECTOR_CST_ELT (op1, i);
13792 tree tmp = fold_relational_const (code, type, elem0, elem1);
13793 if (tmp == NULL_TREE)
13794 return NULL_TREE;
13795 if (integer_zerop (tmp))
13796 return constant_boolean_node (false, type);
13798 return constant_boolean_node (true, type);
13800 tree_vector_builder elts;
13801 if (!elts.new_binary_operation (type, op0, op1, false))
13802 return NULL_TREE;
13803 unsigned int count = elts.encoded_nelts ();
13804 for (unsigned i = 0; i < count; i++)
13806 tree elem_type = TREE_TYPE (type);
13807 tree elem0 = VECTOR_CST_ELT (op0, i);
13808 tree elem1 = VECTOR_CST_ELT (op1, i);
13810 tree tem = fold_relational_const (code, elem_type,
13811 elem0, elem1);
13813 if (tem == NULL_TREE)
13814 return NULL_TREE;
13816 elts.quick_push (build_int_cst (elem_type,
13817 integer_zerop (tem) ? 0 : -1));
13820 return elts.build ();
13823 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13825 To compute GT, swap the arguments and do LT.
13826 To compute GE, do LT and invert the result.
13827 To compute LE, swap the arguments, do LT and invert the result.
13828 To compute NE, do EQ and invert the result.
13830 Therefore, the code below must handle only EQ and LT. */
13832 if (code == LE_EXPR || code == GT_EXPR)
13834 std::swap (op0, op1);
13835 code = swap_tree_comparison (code);
13838 /* Note that it is safe to invert for real values here because we
13839 have already handled the one case that it matters. */
13841 invert = 0;
13842 if (code == NE_EXPR || code == GE_EXPR)
13844 invert = 1;
13845 code = invert_tree_comparison (code, false);
13848 /* Compute a result for LT or EQ if args permit;
13849 Otherwise return T. */
13850 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13852 if (code == EQ_EXPR)
13853 result = tree_int_cst_equal (op0, op1);
13854 else
13855 result = tree_int_cst_lt (op0, op1);
13857 else
13858 return NULL_TREE;
13860 if (invert)
13861 result ^= 1;
13862 return constant_boolean_node (result, type);
13865 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13866 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13867 itself. */
13869 tree
13870 fold_build_cleanup_point_expr (tree type, tree expr)
13872 /* If the expression does not have side effects then we don't have to wrap
13873 it with a cleanup point expression. */
13874 if (!TREE_SIDE_EFFECTS (expr))
13875 return expr;
13877 /* If the expression is a return, check to see if the expression inside the
13878 return has no side effects or the right hand side of the modify expression
13879 inside the return. If either don't have side effects set we don't need to
13880 wrap the expression in a cleanup point expression. Note we don't check the
13881 left hand side of the modify because it should always be a return decl. */
13882 if (TREE_CODE (expr) == RETURN_EXPR)
13884 tree op = TREE_OPERAND (expr, 0);
13885 if (!op || !TREE_SIDE_EFFECTS (op))
13886 return expr;
13887 op = TREE_OPERAND (op, 1);
13888 if (!TREE_SIDE_EFFECTS (op))
13889 return expr;
13892 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13895 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13896 of an indirection through OP0, or NULL_TREE if no simplification is
13897 possible. */
13899 tree
13900 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13902 tree sub = op0;
13903 tree subtype;
13905 STRIP_NOPS (sub);
13906 subtype = TREE_TYPE (sub);
13907 if (!POINTER_TYPE_P (subtype)
13908 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
13909 return NULL_TREE;
13911 if (TREE_CODE (sub) == ADDR_EXPR)
13913 tree op = TREE_OPERAND (sub, 0);
13914 tree optype = TREE_TYPE (op);
13915 /* *&CONST_DECL -> to the value of the const decl. */
13916 if (TREE_CODE (op) == CONST_DECL)
13917 return DECL_INITIAL (op);
13918 /* *&p => p; make sure to handle *&"str"[cst] here. */
13919 if (type == optype)
13921 tree fop = fold_read_from_constant_string (op);
13922 if (fop)
13923 return fop;
13924 else
13925 return op;
13927 /* *(foo *)&fooarray => fooarray[0] */
13928 else if (TREE_CODE (optype) == ARRAY_TYPE
13929 && type == TREE_TYPE (optype)
13930 && (!in_gimple_form
13931 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13933 tree type_domain = TYPE_DOMAIN (optype);
13934 tree min_val = size_zero_node;
13935 if (type_domain && TYPE_MIN_VALUE (type_domain))
13936 min_val = TYPE_MIN_VALUE (type_domain);
13937 if (in_gimple_form
13938 && TREE_CODE (min_val) != INTEGER_CST)
13939 return NULL_TREE;
13940 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13941 NULL_TREE, NULL_TREE);
13943 /* *(foo *)&complexfoo => __real__ complexfoo */
13944 else if (TREE_CODE (optype) == COMPLEX_TYPE
13945 && type == TREE_TYPE (optype))
13946 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13947 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13948 else if (TREE_CODE (optype) == VECTOR_TYPE
13949 && type == TREE_TYPE (optype))
13951 tree part_width = TYPE_SIZE (type);
13952 tree index = bitsize_int (0);
13953 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13957 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13958 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13960 tree op00 = TREE_OPERAND (sub, 0);
13961 tree op01 = TREE_OPERAND (sub, 1);
13963 STRIP_NOPS (op00);
13964 if (TREE_CODE (op00) == ADDR_EXPR)
13966 tree op00type;
13967 op00 = TREE_OPERAND (op00, 0);
13968 op00type = TREE_TYPE (op00);
13970 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13971 if (TREE_CODE (op00type) == VECTOR_TYPE
13972 && type == TREE_TYPE (op00type))
13974 tree part_width = TYPE_SIZE (type);
13975 unsigned HOST_WIDE_INT max_offset
13976 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
13977 * TYPE_VECTOR_SUBPARTS (op00type));
13978 if (tree_int_cst_sign_bit (op01) == 0
13979 && compare_tree_int (op01, max_offset) == -1)
13981 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
13982 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13983 tree index = bitsize_int (indexi);
13984 return fold_build3_loc (loc,
13985 BIT_FIELD_REF, type, op00,
13986 part_width, index);
13989 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13990 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13991 && type == TREE_TYPE (op00type))
13993 tree size = TYPE_SIZE_UNIT (type);
13994 if (tree_int_cst_equal (size, op01))
13995 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13997 /* ((foo *)&fooarray)[1] => fooarray[1] */
13998 else if (TREE_CODE (op00type) == ARRAY_TYPE
13999 && type == TREE_TYPE (op00type))
14001 tree type_domain = TYPE_DOMAIN (op00type);
14002 tree min = size_zero_node;
14003 if (type_domain && TYPE_MIN_VALUE (type_domain))
14004 min = TYPE_MIN_VALUE (type_domain);
14005 offset_int off = wi::to_offset (op01);
14006 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
14007 offset_int remainder;
14008 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
14009 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
14011 off = off + wi::to_offset (min);
14012 op01 = wide_int_to_tree (sizetype, off);
14013 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14014 NULL_TREE, NULL_TREE);
14020 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14021 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14022 && type == TREE_TYPE (TREE_TYPE (subtype))
14023 && (!in_gimple_form
14024 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14026 tree type_domain;
14027 tree min_val = size_zero_node;
14028 sub = build_fold_indirect_ref_loc (loc, sub);
14029 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14030 if (type_domain && TYPE_MIN_VALUE (type_domain))
14031 min_val = TYPE_MIN_VALUE (type_domain);
14032 if (in_gimple_form
14033 && TREE_CODE (min_val) != INTEGER_CST)
14034 return NULL_TREE;
14035 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14036 NULL_TREE);
14039 return NULL_TREE;
14042 /* Builds an expression for an indirection through T, simplifying some
14043 cases. */
14045 tree
14046 build_fold_indirect_ref_loc (location_t loc, tree t)
14048 tree type = TREE_TYPE (TREE_TYPE (t));
14049 tree sub = fold_indirect_ref_1 (loc, type, t);
14051 if (sub)
14052 return sub;
14054 return build1_loc (loc, INDIRECT_REF, type, t);
14057 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14059 tree
14060 fold_indirect_ref_loc (location_t loc, tree t)
14062 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14064 if (sub)
14065 return sub;
14066 else
14067 return t;
14070 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14071 whose result is ignored. The type of the returned tree need not be
14072 the same as the original expression. */
14074 tree
14075 fold_ignored_result (tree t)
14077 if (!TREE_SIDE_EFFECTS (t))
14078 return integer_zero_node;
14080 for (;;)
14081 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14083 case tcc_unary:
14084 t = TREE_OPERAND (t, 0);
14085 break;
14087 case tcc_binary:
14088 case tcc_comparison:
14089 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14090 t = TREE_OPERAND (t, 0);
14091 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14092 t = TREE_OPERAND (t, 1);
14093 else
14094 return t;
14095 break;
14097 case tcc_expression:
14098 switch (TREE_CODE (t))
14100 case COMPOUND_EXPR:
14101 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14102 return t;
14103 t = TREE_OPERAND (t, 0);
14104 break;
14106 case COND_EXPR:
14107 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14108 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14109 return t;
14110 t = TREE_OPERAND (t, 0);
14111 break;
14113 default:
14114 return t;
14116 break;
14118 default:
14119 return t;
14123 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14125 tree
14126 round_up_loc (location_t loc, tree value, unsigned int divisor)
14128 tree div = NULL_TREE;
14130 if (divisor == 1)
14131 return value;
14133 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14134 have to do anything. Only do this when we are not given a const,
14135 because in that case, this check is more expensive than just
14136 doing it. */
14137 if (TREE_CODE (value) != INTEGER_CST)
14139 div = build_int_cst (TREE_TYPE (value), divisor);
14141 if (multiple_of_p (TREE_TYPE (value), value, div))
14142 return value;
14145 /* If divisor is a power of two, simplify this to bit manipulation. */
14146 if (pow2_or_zerop (divisor))
14148 if (TREE_CODE (value) == INTEGER_CST)
14150 wide_int val = wi::to_wide (value);
14151 bool overflow_p;
14153 if ((val & (divisor - 1)) == 0)
14154 return value;
14156 overflow_p = TREE_OVERFLOW (value);
14157 val += divisor - 1;
14158 val &= (int) -divisor;
14159 if (val == 0)
14160 overflow_p = true;
14162 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14164 else
14166 tree t;
14168 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14169 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14170 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14171 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14174 else
14176 if (!div)
14177 div = build_int_cst (TREE_TYPE (value), divisor);
14178 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14179 value = size_binop_loc (loc, MULT_EXPR, value, div);
14182 return value;
14185 /* Likewise, but round down. */
14187 tree
14188 round_down_loc (location_t loc, tree value, int divisor)
14190 tree div = NULL_TREE;
14192 gcc_assert (divisor > 0);
14193 if (divisor == 1)
14194 return value;
14196 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14197 have to do anything. Only do this when we are not given a const,
14198 because in that case, this check is more expensive than just
14199 doing it. */
14200 if (TREE_CODE (value) != INTEGER_CST)
14202 div = build_int_cst (TREE_TYPE (value), divisor);
14204 if (multiple_of_p (TREE_TYPE (value), value, div))
14205 return value;
14208 /* If divisor is a power of two, simplify this to bit manipulation. */
14209 if (pow2_or_zerop (divisor))
14211 tree t;
14213 t = build_int_cst (TREE_TYPE (value), -divisor);
14214 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14216 else
14218 if (!div)
14219 div = build_int_cst (TREE_TYPE (value), divisor);
14220 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14221 value = size_binop_loc (loc, MULT_EXPR, value, div);
14224 return value;
14227 /* Returns the pointer to the base of the object addressed by EXP and
14228 extracts the information about the offset of the access, storing it
14229 to PBITPOS and POFFSET. */
14231 static tree
14232 split_address_to_core_and_offset (tree exp,
14233 HOST_WIDE_INT *pbitpos, tree *poffset)
14235 tree core;
14236 machine_mode mode;
14237 int unsignedp, reversep, volatilep;
14238 HOST_WIDE_INT bitsize;
14239 location_t loc = EXPR_LOCATION (exp);
14241 if (TREE_CODE (exp) == ADDR_EXPR)
14243 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14244 poffset, &mode, &unsignedp, &reversep,
14245 &volatilep);
14246 core = build_fold_addr_expr_loc (loc, core);
14248 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14250 core = TREE_OPERAND (exp, 0);
14251 STRIP_NOPS (core);
14252 *pbitpos = 0;
14253 *poffset = TREE_OPERAND (exp, 1);
14254 if (TREE_CODE (*poffset) == INTEGER_CST)
14256 offset_int tem = wi::sext (wi::to_offset (*poffset),
14257 TYPE_PRECISION (TREE_TYPE (*poffset)));
14258 tem <<= LOG2_BITS_PER_UNIT;
14259 if (wi::fits_shwi_p (tem))
14261 *pbitpos = tem.to_shwi ();
14262 *poffset = NULL_TREE;
14266 else
14268 core = exp;
14269 *pbitpos = 0;
14270 *poffset = NULL_TREE;
14273 return core;
14276 /* Returns true if addresses of E1 and E2 differ by a constant, false
14277 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14279 bool
14280 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14282 tree core1, core2;
14283 HOST_WIDE_INT bitpos1, bitpos2;
14284 tree toffset1, toffset2, tdiff, type;
14286 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14287 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14289 if (bitpos1 % BITS_PER_UNIT != 0
14290 || bitpos2 % BITS_PER_UNIT != 0
14291 || !operand_equal_p (core1, core2, 0))
14292 return false;
14294 if (toffset1 && toffset2)
14296 type = TREE_TYPE (toffset1);
14297 if (type != TREE_TYPE (toffset2))
14298 toffset2 = fold_convert (type, toffset2);
14300 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14301 if (!cst_and_fits_in_hwi (tdiff))
14302 return false;
14304 *diff = int_cst_value (tdiff);
14306 else if (toffset1 || toffset2)
14308 /* If only one of the offsets is non-constant, the difference cannot
14309 be a constant. */
14310 return false;
14312 else
14313 *diff = 0;
14315 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14316 return true;
14319 /* Return OFF converted to a pointer offset type suitable as offset for
14320 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14321 tree
14322 convert_to_ptrofftype_loc (location_t loc, tree off)
14324 return fold_convert_loc (loc, sizetype, off);
14327 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14328 tree
14329 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14331 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14332 ptr, convert_to_ptrofftype_loc (loc, off));
14335 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14336 tree
14337 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14339 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14340 ptr, size_int (off));
14343 /* Return a char pointer for a C string if it is a string constant
14344 or sum of string constant and integer constant. We only support
14345 string constants properly terminated with '\0' character.
14346 If STRLEN is a valid pointer, length (including terminating character)
14347 of returned string is stored to the argument. */
14349 const char *
14350 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14352 tree offset_node;
14354 if (strlen)
14355 *strlen = 0;
14357 src = string_constant (src, &offset_node);
14358 if (src == 0)
14359 return NULL;
14361 unsigned HOST_WIDE_INT offset = 0;
14362 if (offset_node != NULL_TREE)
14364 if (!tree_fits_uhwi_p (offset_node))
14365 return NULL;
14366 else
14367 offset = tree_to_uhwi (offset_node);
14370 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14371 const char *string = TREE_STRING_POINTER (src);
14373 /* Support only properly null-terminated strings. */
14374 if (string_length == 0
14375 || string[string_length - 1] != '\0'
14376 || offset >= string_length)
14377 return NULL;
14379 if (strlen)
14380 *strlen = string_length - offset;
14381 return string + offset;
14384 #if CHECKING_P
14386 namespace selftest {
14388 /* Helper functions for writing tests of folding trees. */
14390 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14392 static void
14393 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14394 tree constant)
14396 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14399 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14400 wrapping WRAPPED_EXPR. */
14402 static void
14403 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14404 tree wrapped_expr)
14406 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14407 ASSERT_NE (wrapped_expr, result);
14408 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14409 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14412 /* Verify that various arithmetic binary operations are folded
14413 correctly. */
14415 static void
14416 test_arithmetic_folding ()
14418 tree type = integer_type_node;
14419 tree x = create_tmp_var_raw (type, "x");
14420 tree zero = build_zero_cst (type);
14421 tree one = build_int_cst (type, 1);
14423 /* Addition. */
14424 /* 1 <-- (0 + 1) */
14425 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14426 one);
14427 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14428 one);
14430 /* (nonlvalue)x <-- (x + 0) */
14431 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14434 /* Subtraction. */
14435 /* 0 <-- (x - x) */
14436 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14437 zero);
14438 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14441 /* Multiplication. */
14442 /* 0 <-- (x * 0) */
14443 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14444 zero);
14446 /* (nonlvalue)x <-- (x * 1) */
14447 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14451 /* Verify that various binary operations on vectors are folded
14452 correctly. */
14454 static void
14455 test_vector_folding ()
14457 tree inner_type = integer_type_node;
14458 tree type = build_vector_type (inner_type, 4);
14459 tree zero = build_zero_cst (type);
14460 tree one = build_one_cst (type);
14462 /* Verify equality tests that return a scalar boolean result. */
14463 tree res_type = boolean_type_node;
14464 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14465 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14466 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14467 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14470 /* Run all of the selftests within this file. */
14472 void
14473 fold_const_c_tests ()
14475 test_arithmetic_folding ();
14476 test_vector_folding ();
14479 } // namespace selftest
14481 #endif /* CHECKING_P */