Use tree_vector_builder::new_unary_operation for folding
[official-gcc.git] / gcc / fold-const.c
blobaf1f426300bc9d521d9bf11a3b922481f50e3dc7
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
86 /* Nonzero if we are folding constants inside an initializer; zero
87 otherwise. */
88 int folding_initializer = 0;
90 /* The following constants represent a bit based encoding of GCC's
91 comparison operators. This encoding simplifies transformations
92 on relational comparison operators, such as AND and OR. */
93 enum comparison_code {
94 COMPCODE_FALSE = 0,
95 COMPCODE_LT = 1,
96 COMPCODE_EQ = 2,
97 COMPCODE_LE = 3,
98 COMPCODE_GT = 4,
99 COMPCODE_LTGT = 5,
100 COMPCODE_GE = 6,
101 COMPCODE_ORD = 7,
102 COMPCODE_UNORD = 8,
103 COMPCODE_UNLT = 9,
104 COMPCODE_UNEQ = 10,
105 COMPCODE_UNLE = 11,
106 COMPCODE_UNGT = 12,
107 COMPCODE_NE = 13,
108 COMPCODE_UNGE = 14,
109 COMPCODE_TRUE = 15
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 tree, tree, tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 enum tree_code, tree,
133 tree, tree,
134 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144 Otherwise, return LOC. */
146 static location_t
147 expr_location_or (tree t, location_t loc)
149 location_t tloc = EXPR_LOCATION (t);
150 return tloc == UNKNOWN_LOCATION ? loc : tloc;
153 /* Similar to protected_set_expr_location, but never modify x in place,
154 if location can and needs to be set, unshare it. */
156 static inline tree
157 protected_set_expr_location_unshare (tree x, location_t loc)
159 if (CAN_HAVE_LOCATION_P (x)
160 && EXPR_LOCATION (x) != loc
161 && !(TREE_CODE (x) == SAVE_EXPR
162 || TREE_CODE (x) == TARGET_EXPR
163 || TREE_CODE (x) == BIND_EXPR))
165 x = copy_node (x);
166 SET_EXPR_LOCATION (x, loc);
168 return x;
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172 division and returns the quotient. Otherwise returns
173 NULL_TREE. */
175 tree
176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 widest_int quo;
180 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 SIGNED, &quo))
182 return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 return NULL_TREE;
187 /* This is nonzero if we should defer warnings about undefined
188 overflow. This facility exists because these warnings are a
189 special case. The code to estimate loop iterations does not want
190 to issue any warnings, since it works with expressions which do not
191 occur in user code. Various bits of cleanup code call fold(), but
192 only use the result if it has certain characteristics (e.g., is a
193 constant); that code only wants to issue a warning if the result is
194 used. */
196 static int fold_deferring_overflow_warnings;
198 /* If a warning about undefined overflow is deferred, this is the
199 warning. Note that this may cause us to turn two warnings into
200 one, but that is fine since it is sufficient to only give one
201 warning per expression. */
203 static const char* fold_deferred_overflow_warning;
205 /* If a warning about undefined overflow is deferred, this is the
206 level at which the warning should be emitted. */
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 /* Start deferring overflow warnings. We could use a stack here to
211 permit nested calls, but at present it is not necessary. */
213 void
214 fold_defer_overflow_warnings (void)
216 ++fold_deferring_overflow_warnings;
219 /* Stop deferring overflow warnings. If there is a pending warning,
220 and ISSUE is true, then issue the warning if appropriate. STMT is
221 the statement with which the warning should be associated (used for
222 location information); STMT may be NULL. CODE is the level of the
223 warning--a warn_strict_overflow_code value. This function will use
224 the smaller of CODE and the deferred code when deciding whether to
225 issue the warning. CODE may be zero to mean to always use the
226 deferred code. */
228 void
229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 const char *warnmsg;
232 location_t locus;
234 gcc_assert (fold_deferring_overflow_warnings > 0);
235 --fold_deferring_overflow_warnings;
236 if (fold_deferring_overflow_warnings > 0)
238 if (fold_deferred_overflow_warning != NULL
239 && code != 0
240 && code < (int) fold_deferred_overflow_code)
241 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242 return;
245 warnmsg = fold_deferred_overflow_warning;
246 fold_deferred_overflow_warning = NULL;
248 if (!issue || warnmsg == NULL)
249 return;
251 if (gimple_no_warning_p (stmt))
252 return;
254 /* Use the smallest code level when deciding to issue the
255 warning. */
256 if (code == 0 || code > (int) fold_deferred_overflow_code)
257 code = fold_deferred_overflow_code;
259 if (!issue_strict_overflow_warning (code))
260 return;
262 if (stmt == NULL)
263 locus = input_location;
264 else
265 locus = gimple_location (stmt);
266 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
269 /* Stop deferring overflow warnings, ignoring any deferred
270 warnings. */
272 void
273 fold_undefer_and_ignore_overflow_warnings (void)
275 fold_undefer_overflow_warnings (false, NULL, 0);
278 /* Whether we are deferring overflow warnings. */
280 bool
281 fold_deferring_overflow_warnings_p (void)
283 return fold_deferring_overflow_warnings > 0;
286 /* This is called when we fold something based on the fact that signed
287 overflow is undefined. */
289 void
290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 if (fold_deferring_overflow_warnings > 0)
294 if (fold_deferred_overflow_warning == NULL
295 || wc < fold_deferred_overflow_code)
297 fold_deferred_overflow_warning = gmsgid;
298 fold_deferred_overflow_code = wc;
301 else if (issue_strict_overflow_warning (wc))
302 warning (OPT_Wstrict_overflow, gmsgid);
305 /* Return true if the built-in mathematical function specified by CODE
306 is odd, i.e. -f(x) == f(-x). */
308 bool
309 negate_mathfn_p (combined_fn fn)
311 switch (fn)
313 CASE_CFN_ASIN:
314 CASE_CFN_ASINH:
315 CASE_CFN_ATAN:
316 CASE_CFN_ATANH:
317 CASE_CFN_CASIN:
318 CASE_CFN_CASINH:
319 CASE_CFN_CATAN:
320 CASE_CFN_CATANH:
321 CASE_CFN_CBRT:
322 CASE_CFN_CPROJ:
323 CASE_CFN_CSIN:
324 CASE_CFN_CSINH:
325 CASE_CFN_CTAN:
326 CASE_CFN_CTANH:
327 CASE_CFN_ERF:
328 CASE_CFN_LLROUND:
329 CASE_CFN_LROUND:
330 CASE_CFN_ROUND:
331 CASE_CFN_SIN:
332 CASE_CFN_SINH:
333 CASE_CFN_TAN:
334 CASE_CFN_TANH:
335 CASE_CFN_TRUNC:
336 return true;
338 CASE_CFN_LLRINT:
339 CASE_CFN_LRINT:
340 CASE_CFN_NEARBYINT:
341 CASE_CFN_RINT:
342 return !flag_rounding_math;
344 default:
345 break;
347 return false;
350 /* Check whether we may negate an integer constant T without causing
351 overflow. */
353 bool
354 may_negate_without_overflow_p (const_tree t)
356 tree type;
358 gcc_assert (TREE_CODE (t) == INTEGER_CST);
360 type = TREE_TYPE (t);
361 if (TYPE_UNSIGNED (type))
362 return false;
364 return !wi::only_sign_bit_p (wi::to_wide (t));
367 /* Determine whether an expression T can be cheaply negated using
368 the function negate_expr without introducing undefined overflow. */
370 static bool
371 negate_expr_p (tree t)
373 tree type;
375 if (t == 0)
376 return false;
378 type = TREE_TYPE (t);
380 STRIP_SIGN_NOPS (t);
381 switch (TREE_CODE (t))
383 case INTEGER_CST:
384 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
385 return true;
387 /* Check that -CST will not overflow type. */
388 return may_negate_without_overflow_p (t);
389 case BIT_NOT_EXPR:
390 return (INTEGRAL_TYPE_P (type)
391 && TYPE_OVERFLOW_WRAPS (type));
393 case FIXED_CST:
394 return true;
396 case NEGATE_EXPR:
397 return !TYPE_OVERFLOW_SANITIZED (type);
399 case REAL_CST:
400 /* We want to canonicalize to positive real constants. Pretend
401 that only negative ones can be easily negated. */
402 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
404 case COMPLEX_CST:
405 return negate_expr_p (TREE_REALPART (t))
406 && negate_expr_p (TREE_IMAGPART (t));
408 case VECTOR_CST:
410 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
411 return true;
413 int count = VECTOR_CST_NELTS (t), i;
415 for (i = 0; i < count; i++)
416 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
417 return false;
419 return true;
422 case COMPLEX_EXPR:
423 return negate_expr_p (TREE_OPERAND (t, 0))
424 && negate_expr_p (TREE_OPERAND (t, 1));
426 case CONJ_EXPR:
427 return negate_expr_p (TREE_OPERAND (t, 0));
429 case PLUS_EXPR:
430 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
431 || HONOR_SIGNED_ZEROS (element_mode (type))
432 || (ANY_INTEGRAL_TYPE_P (type)
433 && ! TYPE_OVERFLOW_WRAPS (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1)))
437 return true;
438 /* -(A + B) -> (-A) - B. */
439 return negate_expr_p (TREE_OPERAND (t, 0));
441 case MINUS_EXPR:
442 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
443 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
444 && !HONOR_SIGNED_ZEROS (element_mode (type))
445 && (! ANY_INTEGRAL_TYPE_P (type)
446 || TYPE_OVERFLOW_WRAPS (type));
448 case MULT_EXPR:
449 if (TYPE_UNSIGNED (type))
450 break;
451 /* INT_MIN/n * n doesn't overflow while negating one operand it does
452 if n is a (negative) power of two. */
453 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
454 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
455 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
456 && (wi::popcount
457 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
458 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
459 && (wi::popcount
460 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
461 break;
463 /* Fall through. */
465 case RDIV_EXPR:
466 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
467 return negate_expr_p (TREE_OPERAND (t, 1))
468 || negate_expr_p (TREE_OPERAND (t, 0));
469 break;
471 case TRUNC_DIV_EXPR:
472 case ROUND_DIV_EXPR:
473 case EXACT_DIV_EXPR:
474 if (TYPE_UNSIGNED (type))
475 break;
476 if (negate_expr_p (TREE_OPERAND (t, 0)))
477 return true;
478 /* In general we can't negate B in A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. */
481 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
482 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
483 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
484 && ! integer_onep (TREE_OPERAND (t, 1))))
485 return negate_expr_p (TREE_OPERAND (t, 1));
486 break;
488 case NOP_EXPR:
489 /* Negate -((double)float) as (double)(-float). */
490 if (TREE_CODE (type) == REAL_TYPE)
492 tree tem = strip_float_extensions (t);
493 if (tem != t)
494 return negate_expr_p (tem);
496 break;
498 case CALL_EXPR:
499 /* Negate -f(x) as f(-x). */
500 if (negate_mathfn_p (get_call_combined_fn (t)))
501 return negate_expr_p (CALL_EXPR_ARG (t, 0));
502 break;
504 case RSHIFT_EXPR:
505 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
506 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 tree op1 = TREE_OPERAND (t, 1);
509 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
510 return true;
512 break;
514 default:
515 break;
517 return false;
520 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
521 simplification is possible.
522 If negate_expr_p would return true for T, NULL_TREE will never be
523 returned. */
525 static tree
526 fold_negate_expr_1 (location_t loc, tree t)
528 tree type = TREE_TYPE (t);
529 tree tem;
531 switch (TREE_CODE (t))
533 /* Convert - (~A) to A + 1. */
534 case BIT_NOT_EXPR:
535 if (INTEGRAL_TYPE_P (type))
536 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
537 build_one_cst (type));
538 break;
540 case INTEGER_CST:
541 tem = fold_negate_const (t, type);
542 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
543 || (ANY_INTEGRAL_TYPE_P (type)
544 && !TYPE_OVERFLOW_TRAPS (type)
545 && TYPE_OVERFLOW_WRAPS (type))
546 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
547 return tem;
548 break;
550 case REAL_CST:
551 tem = fold_negate_const (t, type);
552 return tem;
554 case FIXED_CST:
555 tem = fold_negate_const (t, type);
556 return tem;
558 case COMPLEX_CST:
560 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
561 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
562 if (rpart && ipart)
563 return build_complex (type, rpart, ipart);
565 break;
567 case VECTOR_CST:
569 tree_vector_builder elts;
570 elts.new_unary_operation (type, t, true);
571 unsigned int count = elts.encoded_nelts ();
572 for (unsigned int i = 0; i < count; ++i)
574 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
575 if (elt == NULL_TREE)
576 return NULL_TREE;
577 elts.quick_push (elt);
580 return elts.build ();
583 case COMPLEX_EXPR:
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
588 break;
590 case CONJ_EXPR:
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
594 break;
596 case NEGATE_EXPR:
597 if (!TYPE_OVERFLOW_SANITIZED (type))
598 return TREE_OPERAND (t, 0);
599 break;
601 case PLUS_EXPR:
602 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
603 && !HONOR_SIGNED_ZEROS (element_mode (type)))
605 /* -(A + B) -> (-B) - A. */
606 if (negate_expr_p (TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
621 break;
623 case MINUS_EXPR:
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
626 && !HONOR_SIGNED_ZEROS (element_mode (type)))
627 return fold_build2_loc (loc, MINUS_EXPR, type,
628 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
629 break;
631 case MULT_EXPR:
632 if (TYPE_UNSIGNED (type))
633 break;
635 /* Fall through. */
637 case RDIV_EXPR:
638 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
640 tem = TREE_OPERAND (t, 1);
641 if (negate_expr_p (tem))
642 return fold_build2_loc (loc, TREE_CODE (t), type,
643 TREE_OPERAND (t, 0), negate_expr (tem));
644 tem = TREE_OPERAND (t, 0);
645 if (negate_expr_p (tem))
646 return fold_build2_loc (loc, TREE_CODE (t), type,
647 negate_expr (tem), TREE_OPERAND (t, 1));
649 break;
651 case TRUNC_DIV_EXPR:
652 case ROUND_DIV_EXPR:
653 case EXACT_DIV_EXPR:
654 if (TYPE_UNSIGNED (type))
655 break;
656 if (negate_expr_p (TREE_OPERAND (t, 0)))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 negate_expr (TREE_OPERAND (t, 0)),
659 TREE_OPERAND (t, 1));
660 /* In general we can't negate B in A / B, because if A is INT_MIN and
661 B is 1, we may turn this into INT_MIN / -1 which is undefined
662 and actually traps on some architectures. */
663 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
664 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
665 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
666 && ! integer_onep (TREE_OPERAND (t, 1))))
667 && negate_expr_p (TREE_OPERAND (t, 1)))
668 return fold_build2_loc (loc, TREE_CODE (t), type,
669 TREE_OPERAND (t, 0),
670 negate_expr (TREE_OPERAND (t, 1)));
671 break;
673 case NOP_EXPR:
674 /* Convert -((double)float) into (double)(-float). */
675 if (TREE_CODE (type) == REAL_TYPE)
677 tem = strip_float_extensions (t);
678 if (tem != t && negate_expr_p (tem))
679 return fold_convert_loc (loc, type, negate_expr (tem));
681 break;
683 case CALL_EXPR:
684 /* Negate -f(x) as f(-x). */
685 if (negate_mathfn_p (get_call_combined_fn (t))
686 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
688 tree fndecl, arg;
690 fndecl = get_callee_fndecl (t);
691 arg = negate_expr (CALL_EXPR_ARG (t, 0));
692 return build_call_expr_loc (loc, fndecl, 1, arg);
694 break;
696 case RSHIFT_EXPR:
697 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
698 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
700 tree op1 = TREE_OPERAND (t, 1);
701 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
703 tree ntype = TYPE_UNSIGNED (type)
704 ? signed_type_for (type)
705 : unsigned_type_for (type);
706 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
707 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
708 return fold_convert_loc (loc, type, temp);
711 break;
713 default:
714 break;
717 return NULL_TREE;
720 /* A wrapper for fold_negate_expr_1. */
722 static tree
723 fold_negate_expr (location_t loc, tree t)
725 tree type = TREE_TYPE (t);
726 STRIP_SIGN_NOPS (t);
727 tree tem = fold_negate_expr_1 (loc, t);
728 if (tem == NULL_TREE)
729 return NULL_TREE;
730 return fold_convert_loc (loc, type, tem);
733 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
734 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
735 return NULL_TREE. */
737 static tree
738 negate_expr (tree t)
740 tree type, tem;
741 location_t loc;
743 if (t == NULL_TREE)
744 return NULL_TREE;
746 loc = EXPR_LOCATION (t);
747 type = TREE_TYPE (t);
748 STRIP_SIGN_NOPS (t);
750 tem = fold_negate_expr (loc, t);
751 if (!tem)
752 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
753 return fold_convert_loc (loc, type, tem);
756 /* Split a tree IN into a constant, literal and variable parts that could be
757 combined with CODE to make IN. "constant" means an expression with
758 TREE_CONSTANT but that isn't an actual constant. CODE must be a
759 commutative arithmetic operation. Store the constant part into *CONP,
760 the literal in *LITP and return the variable part. If a part isn't
761 present, set it to null. If the tree does not decompose in this way,
762 return the entire tree as the variable part and the other parts as null.
764 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
765 case, we negate an operand that was subtracted. Except if it is a
766 literal for which we use *MINUS_LITP instead.
768 If NEGATE_P is true, we are negating all of IN, again except a literal
769 for which we use *MINUS_LITP instead. If a variable part is of pointer
770 type, it is negated after converting to TYPE. This prevents us from
771 generating illegal MINUS pointer expression. LOC is the location of
772 the converted variable part.
774 If IN is itself a literal or constant, return it as appropriate.
776 Note that we do not guarantee that any of the three values will be the
777 same type as IN, but they will have the same signedness and mode. */
779 static tree
780 split_tree (tree in, tree type, enum tree_code code,
781 tree *minus_varp, tree *conp, tree *minus_conp,
782 tree *litp, tree *minus_litp, int negate_p)
784 tree var = 0;
785 *minus_varp = 0;
786 *conp = 0;
787 *minus_conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
805 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
806 || (code == MINUS_EXPR
807 && (TREE_CODE (in) == PLUS_EXPR
808 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
810 tree op0 = TREE_OPERAND (in, 0);
811 tree op1 = TREE_OPERAND (in, 1);
812 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
813 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
815 /* First see if either of the operands is a literal, then a constant. */
816 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
817 || TREE_CODE (op0) == FIXED_CST)
818 *litp = op0, op0 = 0;
819 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
820 || TREE_CODE (op1) == FIXED_CST)
821 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
823 if (op0 != 0 && TREE_CONSTANT (op0))
824 *conp = op0, op0 = 0;
825 else if (op1 != 0 && TREE_CONSTANT (op1))
826 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
828 /* If we haven't dealt with either operand, this is not a case we can
829 decompose. Otherwise, VAR is either of the ones remaining, if any. */
830 if (op0 != 0 && op1 != 0)
831 var = in;
832 else if (op0 != 0)
833 var = op0;
834 else
835 var = op1, neg_var_p = neg1_p;
837 /* Now do any needed negations. */
838 if (neg_litp_p)
839 *minus_litp = *litp, *litp = 0;
840 if (neg_conp_p && *conp)
841 *minus_conp = *conp, *conp = 0;
842 if (neg_var_p && var)
843 *minus_varp = var, var = 0;
845 else if (TREE_CONSTANT (in))
846 *conp = in;
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
850 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
851 when IN is constant. */
852 *litp = build_minus_one_cst (type);
853 *minus_varp = TREE_OPERAND (in, 0);
855 else
856 var = in;
858 if (negate_p)
860 if (*litp)
861 *minus_litp = *litp, *litp = 0;
862 else if (*minus_litp)
863 *litp = *minus_litp, *minus_litp = 0;
864 if (*conp)
865 *minus_conp = *conp, *conp = 0;
866 else if (*minus_conp)
867 *conp = *minus_conp, *minus_conp = 0;
868 if (var)
869 *minus_varp = var, var = 0;
870 else if (*minus_varp)
871 var = *minus_varp, *minus_varp = 0;
874 if (*litp
875 && TREE_OVERFLOW_P (*litp))
876 *litp = drop_tree_overflow (*litp);
877 if (*minus_litp
878 && TREE_OVERFLOW_P (*minus_litp))
879 *minus_litp = drop_tree_overflow (*minus_litp);
881 return var;
884 /* Re-associate trees split by the above function. T1 and T2 are
885 either expressions to associate or null. Return the new
886 expression, if any. LOC is the location of the new expression. If
887 we build an operation, do it in TYPE and with CODE. */
889 static tree
890 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
892 if (t1 == 0)
894 gcc_assert (t2 == 0 || code != MINUS_EXPR);
895 return t2;
897 else if (t2 == 0)
898 return t1;
900 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
901 try to fold this since we will have infinite recursion. But do
902 deal with any NEGATE_EXPRs. */
903 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
904 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
905 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
907 if (code == PLUS_EXPR)
909 if (TREE_CODE (t1) == NEGATE_EXPR)
910 return build2_loc (loc, MINUS_EXPR, type,
911 fold_convert_loc (loc, type, t2),
912 fold_convert_loc (loc, type,
913 TREE_OPERAND (t1, 0)));
914 else if (TREE_CODE (t2) == NEGATE_EXPR)
915 return build2_loc (loc, MINUS_EXPR, type,
916 fold_convert_loc (loc, type, t1),
917 fold_convert_loc (loc, type,
918 TREE_OPERAND (t2, 0)));
919 else if (integer_zerop (t2))
920 return fold_convert_loc (loc, type, t1);
922 else if (code == MINUS_EXPR)
924 if (integer_zerop (t2))
925 return fold_convert_loc (loc, type, t1);
928 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
929 fold_convert_loc (loc, type, t2));
932 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
933 fold_convert_loc (loc, type, t2));
936 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
937 for use in int_const_binop, size_binop and size_diffop. */
939 static bool
940 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
942 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
943 return false;
944 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
945 return false;
947 switch (code)
949 case LSHIFT_EXPR:
950 case RSHIFT_EXPR:
951 case LROTATE_EXPR:
952 case RROTATE_EXPR:
953 return true;
955 default:
956 break;
959 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
960 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
961 && TYPE_MODE (type1) == TYPE_MODE (type2);
965 /* Combine two integer constants PARG1 and PARG2 under operation CODE
966 to produce a new constant. Return NULL_TREE if we don't know how
967 to evaluate CODE at compile-time. */
969 static tree
970 int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
971 int overflowable)
973 wide_int res;
974 tree t;
975 tree type = TREE_TYPE (parg1);
976 signop sign = TYPE_SIGN (type);
977 bool overflow = false;
979 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
980 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
982 switch (code)
984 case BIT_IOR_EXPR:
985 res = wi::bit_or (arg1, arg2);
986 break;
988 case BIT_XOR_EXPR:
989 res = wi::bit_xor (arg1, arg2);
990 break;
992 case BIT_AND_EXPR:
993 res = wi::bit_and (arg1, arg2);
994 break;
996 case RSHIFT_EXPR:
997 case LSHIFT_EXPR:
998 if (wi::neg_p (arg2))
1000 arg2 = -arg2;
1001 if (code == RSHIFT_EXPR)
1002 code = LSHIFT_EXPR;
1003 else
1004 code = RSHIFT_EXPR;
1007 if (code == RSHIFT_EXPR)
1008 /* It's unclear from the C standard whether shifts can overflow.
1009 The following code ignores overflow; perhaps a C standard
1010 interpretation ruling is needed. */
1011 res = wi::rshift (arg1, arg2, sign);
1012 else
1013 res = wi::lshift (arg1, arg2);
1014 break;
1016 case RROTATE_EXPR:
1017 case LROTATE_EXPR:
1018 if (wi::neg_p (arg2))
1020 arg2 = -arg2;
1021 if (code == RROTATE_EXPR)
1022 code = LROTATE_EXPR;
1023 else
1024 code = RROTATE_EXPR;
1027 if (code == RROTATE_EXPR)
1028 res = wi::rrotate (arg1, arg2);
1029 else
1030 res = wi::lrotate (arg1, arg2);
1031 break;
1033 case PLUS_EXPR:
1034 res = wi::add (arg1, arg2, sign, &overflow);
1035 break;
1037 case MINUS_EXPR:
1038 res = wi::sub (arg1, arg2, sign, &overflow);
1039 break;
1041 case MULT_EXPR:
1042 res = wi::mul (arg1, arg2, sign, &overflow);
1043 break;
1045 case MULT_HIGHPART_EXPR:
1046 res = wi::mul_high (arg1, arg2, sign);
1047 break;
1049 case TRUNC_DIV_EXPR:
1050 case EXACT_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1054 break;
1056 case FLOOR_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_floor (arg1, arg2, sign, &overflow);
1060 break;
1062 case CEIL_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1066 break;
1068 case ROUND_DIV_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::div_round (arg1, arg2, sign, &overflow);
1072 break;
1074 case TRUNC_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1078 break;
1080 case FLOOR_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1084 break;
1086 case CEIL_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1090 break;
1092 case ROUND_MOD_EXPR:
1093 if (arg2 == 0)
1094 return NULL_TREE;
1095 res = wi::mod_round (arg1, arg2, sign, &overflow);
1096 break;
1098 case MIN_EXPR:
1099 res = wi::min (arg1, arg2, sign);
1100 break;
1102 case MAX_EXPR:
1103 res = wi::max (arg1, arg2, sign);
1104 break;
1106 default:
1107 return NULL_TREE;
1110 t = force_fit_type (type, res, overflowable,
1111 (((sign == SIGNED || overflowable == -1)
1112 && overflow)
1113 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1115 return t;
1118 tree
1119 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1121 return int_const_binop_1 (code, arg1, arg2, 1);
1124 /* Return true if binary operation OP distributes over addition in operand
1125 OPNO, with the other operand being held constant. OPNO counts from 1. */
1127 static bool
1128 distributes_over_addition_p (tree_code op, int opno)
1130 switch (op)
1132 case PLUS_EXPR:
1133 case MINUS_EXPR:
1134 case MULT_EXPR:
1135 return true;
1137 case LSHIFT_EXPR:
1138 return opno == 1;
1140 default:
1141 return false;
1145 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1146 constant. We assume ARG1 and ARG2 have the same data type, or at least
1147 are the same kind of constant and the same machine mode. Return zero if
1148 combining the constants is not allowed in the current operating mode. */
1150 static tree
1151 const_binop (enum tree_code code, tree arg1, tree arg2)
1153 /* Sanity check for the recursive cases. */
1154 if (!arg1 || !arg2)
1155 return NULL_TREE;
1157 STRIP_NOPS (arg1);
1158 STRIP_NOPS (arg2);
1160 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1162 if (code == POINTER_PLUS_EXPR)
1163 return int_const_binop (PLUS_EXPR,
1164 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1166 return int_const_binop (code, arg1, arg2);
1169 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1171 machine_mode mode;
1172 REAL_VALUE_TYPE d1;
1173 REAL_VALUE_TYPE d2;
1174 REAL_VALUE_TYPE value;
1175 REAL_VALUE_TYPE result;
1176 bool inexact;
1177 tree t, type;
1179 /* The following codes are handled by real_arithmetic. */
1180 switch (code)
1182 case PLUS_EXPR:
1183 case MINUS_EXPR:
1184 case MULT_EXPR:
1185 case RDIV_EXPR:
1186 case MIN_EXPR:
1187 case MAX_EXPR:
1188 break;
1190 default:
1191 return NULL_TREE;
1194 d1 = TREE_REAL_CST (arg1);
1195 d2 = TREE_REAL_CST (arg2);
1197 type = TREE_TYPE (arg1);
1198 mode = TYPE_MODE (type);
1200 /* Don't perform operation if we honor signaling NaNs and
1201 either operand is a signaling NaN. */
1202 if (HONOR_SNANS (mode)
1203 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1204 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1205 return NULL_TREE;
1207 /* Don't perform operation if it would raise a division
1208 by zero exception. */
1209 if (code == RDIV_EXPR
1210 && real_equal (&d2, &dconst0)
1211 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1212 return NULL_TREE;
1214 /* If either operand is a NaN, just return it. Otherwise, set up
1215 for floating-point trap; we return an overflow. */
1216 if (REAL_VALUE_ISNAN (d1))
1218 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1219 is off. */
1220 d1.signalling = 0;
1221 t = build_real (type, d1);
1222 return t;
1224 else if (REAL_VALUE_ISNAN (d2))
1226 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1227 is off. */
1228 d2.signalling = 0;
1229 t = build_real (type, d2);
1230 return t;
1233 inexact = real_arithmetic (&value, code, &d1, &d2);
1234 real_convert (&result, mode, &value);
1236 /* Don't constant fold this floating point operation if
1237 the result has overflowed and flag_trapping_math. */
1238 if (flag_trapping_math
1239 && MODE_HAS_INFINITIES (mode)
1240 && REAL_VALUE_ISINF (result)
1241 && !REAL_VALUE_ISINF (d1)
1242 && !REAL_VALUE_ISINF (d2))
1243 return NULL_TREE;
1245 /* Don't constant fold this floating point operation if the
1246 result may dependent upon the run-time rounding mode and
1247 flag_rounding_math is set, or if GCC's software emulation
1248 is unable to accurately represent the result. */
1249 if ((flag_rounding_math
1250 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1251 && (inexact || !real_identical (&result, &value)))
1252 return NULL_TREE;
1254 t = build_real (type, result);
1256 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1257 return t;
1260 if (TREE_CODE (arg1) == FIXED_CST)
1262 FIXED_VALUE_TYPE f1;
1263 FIXED_VALUE_TYPE f2;
1264 FIXED_VALUE_TYPE result;
1265 tree t, type;
1266 int sat_p;
1267 bool overflow_p;
1269 /* The following codes are handled by fixed_arithmetic. */
1270 switch (code)
1272 case PLUS_EXPR:
1273 case MINUS_EXPR:
1274 case MULT_EXPR:
1275 case TRUNC_DIV_EXPR:
1276 if (TREE_CODE (arg2) != FIXED_CST)
1277 return NULL_TREE;
1278 f2 = TREE_FIXED_CST (arg2);
1279 break;
1281 case LSHIFT_EXPR:
1282 case RSHIFT_EXPR:
1284 if (TREE_CODE (arg2) != INTEGER_CST)
1285 return NULL_TREE;
1286 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1287 f2.data.high = w2.elt (1);
1288 f2.data.low = w2.ulow ();
1289 f2.mode = SImode;
1291 break;
1293 default:
1294 return NULL_TREE;
1297 f1 = TREE_FIXED_CST (arg1);
1298 type = TREE_TYPE (arg1);
1299 sat_p = TYPE_SATURATING (type);
1300 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1301 t = build_fixed (type, result);
1302 /* Propagate overflow flags. */
1303 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1304 TREE_OVERFLOW (t) = 1;
1305 return t;
1308 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1310 tree type = TREE_TYPE (arg1);
1311 tree r1 = TREE_REALPART (arg1);
1312 tree i1 = TREE_IMAGPART (arg1);
1313 tree r2 = TREE_REALPART (arg2);
1314 tree i2 = TREE_IMAGPART (arg2);
1315 tree real, imag;
1317 switch (code)
1319 case PLUS_EXPR:
1320 case MINUS_EXPR:
1321 real = const_binop (code, r1, r2);
1322 imag = const_binop (code, i1, i2);
1323 break;
1325 case MULT_EXPR:
1326 if (COMPLEX_FLOAT_TYPE_P (type))
1327 return do_mpc_arg2 (arg1, arg2, type,
1328 /* do_nonfinite= */ folding_initializer,
1329 mpc_mul);
1331 real = const_binop (MINUS_EXPR,
1332 const_binop (MULT_EXPR, r1, r2),
1333 const_binop (MULT_EXPR, i1, i2));
1334 imag = const_binop (PLUS_EXPR,
1335 const_binop (MULT_EXPR, r1, i2),
1336 const_binop (MULT_EXPR, i1, r2));
1337 break;
1339 case RDIV_EXPR:
1340 if (COMPLEX_FLOAT_TYPE_P (type))
1341 return do_mpc_arg2 (arg1, arg2, type,
1342 /* do_nonfinite= */ folding_initializer,
1343 mpc_div);
1344 /* Fallthru. */
1345 case TRUNC_DIV_EXPR:
1346 case CEIL_DIV_EXPR:
1347 case FLOOR_DIV_EXPR:
1348 case ROUND_DIV_EXPR:
1349 if (flag_complex_method == 0)
1351 /* Keep this algorithm in sync with
1352 tree-complex.c:expand_complex_div_straight().
1354 Expand complex division to scalars, straightforward algorithm.
1355 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1356 t = br*br + bi*bi
1358 tree magsquared
1359 = const_binop (PLUS_EXPR,
1360 const_binop (MULT_EXPR, r2, r2),
1361 const_binop (MULT_EXPR, i2, i2));
1362 tree t1
1363 = const_binop (PLUS_EXPR,
1364 const_binop (MULT_EXPR, r1, r2),
1365 const_binop (MULT_EXPR, i1, i2));
1366 tree t2
1367 = const_binop (MINUS_EXPR,
1368 const_binop (MULT_EXPR, i1, r2),
1369 const_binop (MULT_EXPR, r1, i2));
1371 real = const_binop (code, t1, magsquared);
1372 imag = const_binop (code, t2, magsquared);
1374 else
1376 /* Keep this algorithm in sync with
1377 tree-complex.c:expand_complex_div_wide().
1379 Expand complex division to scalars, modified algorithm to minimize
1380 overflow with wide input ranges. */
1381 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1382 fold_abs_const (r2, TREE_TYPE (type)),
1383 fold_abs_const (i2, TREE_TYPE (type)));
1385 if (integer_nonzerop (compare))
1387 /* In the TRUE branch, we compute
1388 ratio = br/bi;
1389 div = (br * ratio) + bi;
1390 tr = (ar * ratio) + ai;
1391 ti = (ai * ratio) - ar;
1392 tr = tr / div;
1393 ti = ti / div; */
1394 tree ratio = const_binop (code, r2, i2);
1395 tree div = const_binop (PLUS_EXPR, i2,
1396 const_binop (MULT_EXPR, r2, ratio));
1397 real = const_binop (MULT_EXPR, r1, ratio);
1398 real = const_binop (PLUS_EXPR, real, i1);
1399 real = const_binop (code, real, div);
1401 imag = const_binop (MULT_EXPR, i1, ratio);
1402 imag = const_binop (MINUS_EXPR, imag, r1);
1403 imag = const_binop (code, imag, div);
1405 else
1407 /* In the FALSE branch, we compute
1408 ratio = d/c;
1409 divisor = (d * ratio) + c;
1410 tr = (b * ratio) + a;
1411 ti = b - (a * ratio);
1412 tr = tr / div;
1413 ti = ti / div; */
1414 tree ratio = const_binop (code, i2, r2);
1415 tree div = const_binop (PLUS_EXPR, r2,
1416 const_binop (MULT_EXPR, i2, ratio));
1418 real = const_binop (MULT_EXPR, i1, ratio);
1419 real = const_binop (PLUS_EXPR, real, r1);
1420 real = const_binop (code, real, div);
1422 imag = const_binop (MULT_EXPR, r1, ratio);
1423 imag = const_binop (MINUS_EXPR, i1, imag);
1424 imag = const_binop (code, imag, div);
1427 break;
1429 default:
1430 return NULL_TREE;
1433 if (real && imag)
1434 return build_complex (type, real, imag);
1437 if (TREE_CODE (arg1) == VECTOR_CST
1438 && TREE_CODE (arg2) == VECTOR_CST)
1440 tree type = TREE_TYPE (arg1);
1441 int count = VECTOR_CST_NELTS (arg1), i;
1443 auto_vec<tree, 32> elts (count);
1444 for (i = 0; i < count; i++)
1446 tree elem1 = VECTOR_CST_ELT (arg1, i);
1447 tree elem2 = VECTOR_CST_ELT (arg2, i);
1449 tree elt = const_binop (code, elem1, elem2);
1451 /* It is possible that const_binop cannot handle the given
1452 code and return NULL_TREE */
1453 if (elt == NULL_TREE)
1454 return NULL_TREE;
1455 elts.quick_push (elt);
1458 return build_vector (type, elts);
1461 /* Shifts allow a scalar offset for a vector. */
1462 if (TREE_CODE (arg1) == VECTOR_CST
1463 && TREE_CODE (arg2) == INTEGER_CST)
1465 tree type = TREE_TYPE (arg1);
1466 bool step_ok_p = distributes_over_addition_p (code, 1);
1467 tree_vector_builder elts;
1468 if (!elts.new_unary_operation (type, arg1, step_ok_p))
1469 return NULL_TREE;
1470 unsigned int count = elts.encoded_nelts ();
1471 for (unsigned int i = 0; i < count; ++i)
1473 tree elem1 = VECTOR_CST_ELT (arg1, i);
1475 tree elt = const_binop (code, elem1, arg2);
1477 /* It is possible that const_binop cannot handle the given
1478 code and return NULL_TREE. */
1479 if (elt == NULL_TREE)
1480 return NULL_TREE;
1481 elts.quick_push (elt);
1484 return elts.build ();
1486 return NULL_TREE;
1489 /* Overload that adds a TYPE parameter to be able to dispatch
1490 to fold_relational_const. */
1492 tree
1493 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1495 if (TREE_CODE_CLASS (code) == tcc_comparison)
1496 return fold_relational_const (code, type, arg1, arg2);
1498 /* ??? Until we make the const_binop worker take the type of the
1499 result as argument put those cases that need it here. */
1500 switch (code)
1502 case COMPLEX_EXPR:
1503 if ((TREE_CODE (arg1) == REAL_CST
1504 && TREE_CODE (arg2) == REAL_CST)
1505 || (TREE_CODE (arg1) == INTEGER_CST
1506 && TREE_CODE (arg2) == INTEGER_CST))
1507 return build_complex (type, arg1, arg2);
1508 return NULL_TREE;
1510 case POINTER_DIFF_EXPR:
1511 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1513 offset_int res = wi::sub (wi::to_offset (arg1),
1514 wi::to_offset (arg2));
1515 return force_fit_type (type, res, 1,
1516 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1518 return NULL_TREE;
1520 case VEC_PACK_TRUNC_EXPR:
1521 case VEC_PACK_FIX_TRUNC_EXPR:
1523 unsigned int out_nelts, in_nelts, i;
1525 if (TREE_CODE (arg1) != VECTOR_CST
1526 || TREE_CODE (arg2) != VECTOR_CST)
1527 return NULL_TREE;
1529 in_nelts = VECTOR_CST_NELTS (arg1);
1530 out_nelts = in_nelts * 2;
1531 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1532 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1534 tree_vector_builder elts (type, out_nelts, 1);
1535 for (i = 0; i < out_nelts; i++)
1537 tree elt = (i < in_nelts
1538 ? VECTOR_CST_ELT (arg1, i)
1539 : VECTOR_CST_ELT (arg2, i - in_nelts));
1540 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1541 ? NOP_EXPR : FIX_TRUNC_EXPR,
1542 TREE_TYPE (type), elt);
1543 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1544 return NULL_TREE;
1545 elts.quick_push (elt);
1548 return elts.build ();
1551 case VEC_WIDEN_MULT_LO_EXPR:
1552 case VEC_WIDEN_MULT_HI_EXPR:
1553 case VEC_WIDEN_MULT_EVEN_EXPR:
1554 case VEC_WIDEN_MULT_ODD_EXPR:
1556 unsigned int out_nelts, in_nelts, out, ofs, scale;
1558 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1559 return NULL_TREE;
1561 in_nelts = VECTOR_CST_NELTS (arg1);
1562 out_nelts = in_nelts / 2;
1563 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1564 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1566 if (code == VEC_WIDEN_MULT_LO_EXPR)
1567 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1568 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1569 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1570 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1571 scale = 1, ofs = 0;
1572 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1573 scale = 1, ofs = 1;
1575 tree_vector_builder elts (type, out_nelts, 1);
1576 for (out = 0; out < out_nelts; out++)
1578 unsigned int in = (out << scale) + ofs;
1579 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1580 VECTOR_CST_ELT (arg1, in));
1581 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1582 VECTOR_CST_ELT (arg2, in));
1584 if (t1 == NULL_TREE || t2 == NULL_TREE)
1585 return NULL_TREE;
1586 tree elt = const_binop (MULT_EXPR, t1, t2);
1587 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1588 return NULL_TREE;
1589 elts.quick_push (elt);
1592 return elts.build ();
1595 default:;
1598 if (TREE_CODE_CLASS (code) != tcc_binary)
1599 return NULL_TREE;
1601 /* Make sure type and arg0 have the same saturating flag. */
1602 gcc_checking_assert (TYPE_SATURATING (type)
1603 == TYPE_SATURATING (TREE_TYPE (arg1)));
1605 return const_binop (code, arg1, arg2);
1608 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1609 Return zero if computing the constants is not possible. */
1611 tree
1612 const_unop (enum tree_code code, tree type, tree arg0)
1614 /* Don't perform the operation, other than NEGATE and ABS, if
1615 flag_signaling_nans is on and the operand is a signaling NaN. */
1616 if (TREE_CODE (arg0) == REAL_CST
1617 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1618 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1619 && code != NEGATE_EXPR
1620 && code != ABS_EXPR)
1621 return NULL_TREE;
1623 switch (code)
1625 CASE_CONVERT:
1626 case FLOAT_EXPR:
1627 case FIX_TRUNC_EXPR:
1628 case FIXED_CONVERT_EXPR:
1629 return fold_convert_const (code, type, arg0);
1631 case ADDR_SPACE_CONVERT_EXPR:
1632 /* If the source address is 0, and the source address space
1633 cannot have a valid object at 0, fold to dest type null. */
1634 if (integer_zerop (arg0)
1635 && !(targetm.addr_space.zero_address_valid
1636 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1637 return fold_convert_const (code, type, arg0);
1638 break;
1640 case VIEW_CONVERT_EXPR:
1641 return fold_view_convert_expr (type, arg0);
1643 case NEGATE_EXPR:
1645 /* Can't call fold_negate_const directly here as that doesn't
1646 handle all cases and we might not be able to negate some
1647 constants. */
1648 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1649 if (tem && CONSTANT_CLASS_P (tem))
1650 return tem;
1651 break;
1654 case ABS_EXPR:
1655 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1656 return fold_abs_const (arg0, type);
1657 break;
1659 case CONJ_EXPR:
1660 if (TREE_CODE (arg0) == COMPLEX_CST)
1662 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1663 TREE_TYPE (type));
1664 return build_complex (type, TREE_REALPART (arg0), ipart);
1666 break;
1668 case BIT_NOT_EXPR:
1669 if (TREE_CODE (arg0) == INTEGER_CST)
1670 return fold_not_const (arg0, type);
1671 /* Perform BIT_NOT_EXPR on each element individually. */
1672 else if (TREE_CODE (arg0) == VECTOR_CST)
1674 tree elem;
1676 /* This can cope with stepped encodings because ~x == -1 - x. */
1677 tree_vector_builder elements;
1678 elements.new_unary_operation (type, arg0, true);
1679 unsigned int i, count = elements.encoded_nelts ();
1680 for (i = 0; i < count; ++i)
1682 elem = VECTOR_CST_ELT (arg0, i);
1683 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1684 if (elem == NULL_TREE)
1685 break;
1686 elements.quick_push (elem);
1688 if (i == count)
1689 return elements.build ();
1691 break;
1693 case TRUTH_NOT_EXPR:
1694 if (TREE_CODE (arg0) == INTEGER_CST)
1695 return constant_boolean_node (integer_zerop (arg0), type);
1696 break;
1698 case REALPART_EXPR:
1699 if (TREE_CODE (arg0) == COMPLEX_CST)
1700 return fold_convert (type, TREE_REALPART (arg0));
1701 break;
1703 case IMAGPART_EXPR:
1704 if (TREE_CODE (arg0) == COMPLEX_CST)
1705 return fold_convert (type, TREE_IMAGPART (arg0));
1706 break;
1708 case VEC_UNPACK_LO_EXPR:
1709 case VEC_UNPACK_HI_EXPR:
1710 case VEC_UNPACK_FLOAT_LO_EXPR:
1711 case VEC_UNPACK_FLOAT_HI_EXPR:
1713 unsigned int out_nelts, in_nelts, i;
1714 enum tree_code subcode;
1716 if (TREE_CODE (arg0) != VECTOR_CST)
1717 return NULL_TREE;
1719 in_nelts = VECTOR_CST_NELTS (arg0);
1720 out_nelts = in_nelts / 2;
1721 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
1723 unsigned int offset = 0;
1724 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1725 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1726 offset = out_nelts;
1728 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1729 subcode = NOP_EXPR;
1730 else
1731 subcode = FLOAT_EXPR;
1733 tree_vector_builder elts (type, out_nelts, 1);
1734 for (i = 0; i < out_nelts; i++)
1736 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1737 VECTOR_CST_ELT (arg0, i + offset));
1738 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1739 return NULL_TREE;
1740 elts.quick_push (elt);
1743 return elts.build ();
1746 default:
1747 break;
1750 return NULL_TREE;
1753 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1754 indicates which particular sizetype to create. */
1756 tree
1757 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1759 return build_int_cst (sizetype_tab[(int) kind], number);
1762 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1763 is a tree code. The type of the result is taken from the operands.
1764 Both must be equivalent integer types, ala int_binop_types_match_p.
1765 If the operands are constant, so is the result. */
1767 tree
1768 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1770 tree type = TREE_TYPE (arg0);
1772 if (arg0 == error_mark_node || arg1 == error_mark_node)
1773 return error_mark_node;
1775 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1776 TREE_TYPE (arg1)));
1778 /* Handle the special case of two integer constants faster. */
1779 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1781 /* And some specific cases even faster than that. */
1782 if (code == PLUS_EXPR)
1784 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1785 return arg1;
1786 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1787 return arg0;
1789 else if (code == MINUS_EXPR)
1791 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1792 return arg0;
1794 else if (code == MULT_EXPR)
1796 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1797 return arg1;
1800 /* Handle general case of two integer constants. For sizetype
1801 constant calculations we always want to know about overflow,
1802 even in the unsigned case. */
1803 return int_const_binop_1 (code, arg0, arg1, -1);
1806 return fold_build2_loc (loc, code, type, arg0, arg1);
1809 /* Given two values, either both of sizetype or both of bitsizetype,
1810 compute the difference between the two values. Return the value
1811 in signed type corresponding to the type of the operands. */
1813 tree
1814 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1816 tree type = TREE_TYPE (arg0);
1817 tree ctype;
1819 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1820 TREE_TYPE (arg1)));
1822 /* If the type is already signed, just do the simple thing. */
1823 if (!TYPE_UNSIGNED (type))
1824 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1826 if (type == sizetype)
1827 ctype = ssizetype;
1828 else if (type == bitsizetype)
1829 ctype = sbitsizetype;
1830 else
1831 ctype = signed_type_for (type);
1833 /* If either operand is not a constant, do the conversions to the signed
1834 type and subtract. The hardware will do the right thing with any
1835 overflow in the subtraction. */
1836 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1837 return size_binop_loc (loc, MINUS_EXPR,
1838 fold_convert_loc (loc, ctype, arg0),
1839 fold_convert_loc (loc, ctype, arg1));
1841 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1842 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1843 overflow) and negate (which can't either). Special-case a result
1844 of zero while we're here. */
1845 if (tree_int_cst_equal (arg0, arg1))
1846 return build_int_cst (ctype, 0);
1847 else if (tree_int_cst_lt (arg1, arg0))
1848 return fold_convert_loc (loc, ctype,
1849 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1850 else
1851 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1852 fold_convert_loc (loc, ctype,
1853 size_binop_loc (loc,
1854 MINUS_EXPR,
1855 arg1, arg0)));
1858 /* A subroutine of fold_convert_const handling conversions of an
1859 INTEGER_CST to another integer type. */
1861 static tree
1862 fold_convert_const_int_from_int (tree type, const_tree arg1)
1864 /* Given an integer constant, make new constant with new type,
1865 appropriately sign-extended or truncated. Use widest_int
1866 so that any extension is done according ARG1's type. */
1867 return force_fit_type (type, wi::to_widest (arg1),
1868 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1869 TREE_OVERFLOW (arg1));
1872 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1873 to an integer type. */
1875 static tree
1876 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1878 bool overflow = false;
1879 tree t;
1881 /* The following code implements the floating point to integer
1882 conversion rules required by the Java Language Specification,
1883 that IEEE NaNs are mapped to zero and values that overflow
1884 the target precision saturate, i.e. values greater than
1885 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1886 are mapped to INT_MIN. These semantics are allowed by the
1887 C and C++ standards that simply state that the behavior of
1888 FP-to-integer conversion is unspecified upon overflow. */
1890 wide_int val;
1891 REAL_VALUE_TYPE r;
1892 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1894 switch (code)
1896 case FIX_TRUNC_EXPR:
1897 real_trunc (&r, VOIDmode, &x);
1898 break;
1900 default:
1901 gcc_unreachable ();
1904 /* If R is NaN, return zero and show we have an overflow. */
1905 if (REAL_VALUE_ISNAN (r))
1907 overflow = true;
1908 val = wi::zero (TYPE_PRECISION (type));
1911 /* See if R is less than the lower bound or greater than the
1912 upper bound. */
1914 if (! overflow)
1916 tree lt = TYPE_MIN_VALUE (type);
1917 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1918 if (real_less (&r, &l))
1920 overflow = true;
1921 val = wi::to_wide (lt);
1925 if (! overflow)
1927 tree ut = TYPE_MAX_VALUE (type);
1928 if (ut)
1930 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1931 if (real_less (&u, &r))
1933 overflow = true;
1934 val = wi::to_wide (ut);
1939 if (! overflow)
1940 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1942 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1943 return t;
1946 /* A subroutine of fold_convert_const handling conversions of a
1947 FIXED_CST to an integer type. */
1949 static tree
1950 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1952 tree t;
1953 double_int temp, temp_trunc;
1954 scalar_mode mode;
1956 /* Right shift FIXED_CST to temp by fbit. */
1957 temp = TREE_FIXED_CST (arg1).data;
1958 mode = TREE_FIXED_CST (arg1).mode;
1959 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1961 temp = temp.rshift (GET_MODE_FBIT (mode),
1962 HOST_BITS_PER_DOUBLE_INT,
1963 SIGNED_FIXED_POINT_MODE_P (mode));
1965 /* Left shift temp to temp_trunc by fbit. */
1966 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1967 HOST_BITS_PER_DOUBLE_INT,
1968 SIGNED_FIXED_POINT_MODE_P (mode));
1970 else
1972 temp = double_int_zero;
1973 temp_trunc = double_int_zero;
1976 /* If FIXED_CST is negative, we need to round the value toward 0.
1977 By checking if the fractional bits are not zero to add 1 to temp. */
1978 if (SIGNED_FIXED_POINT_MODE_P (mode)
1979 && temp_trunc.is_negative ()
1980 && TREE_FIXED_CST (arg1).data != temp_trunc)
1981 temp += double_int_one;
1983 /* Given a fixed-point constant, make new constant with new type,
1984 appropriately sign-extended or truncated. */
1985 t = force_fit_type (type, temp, -1,
1986 (temp.is_negative ()
1987 && (TYPE_UNSIGNED (type)
1988 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1989 | TREE_OVERFLOW (arg1));
1991 return t;
1994 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1995 to another floating point type. */
1997 static tree
1998 fold_convert_const_real_from_real (tree type, const_tree arg1)
2000 REAL_VALUE_TYPE value;
2001 tree t;
2003 /* Don't perform the operation if flag_signaling_nans is on
2004 and the operand is a signaling NaN. */
2005 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2006 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2007 return NULL_TREE;
2009 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2010 t = build_real (type, value);
2012 /* If converting an infinity or NAN to a representation that doesn't
2013 have one, set the overflow bit so that we can produce some kind of
2014 error message at the appropriate point if necessary. It's not the
2015 most user-friendly message, but it's better than nothing. */
2016 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2017 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2018 TREE_OVERFLOW (t) = 1;
2019 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2020 && !MODE_HAS_NANS (TYPE_MODE (type)))
2021 TREE_OVERFLOW (t) = 1;
2022 /* Regular overflow, conversion produced an infinity in a mode that
2023 can't represent them. */
2024 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2025 && REAL_VALUE_ISINF (value)
2026 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2027 TREE_OVERFLOW (t) = 1;
2028 else
2029 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2030 return t;
2033 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2034 to a floating point type. */
2036 static tree
2037 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2039 REAL_VALUE_TYPE value;
2040 tree t;
2042 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2043 &TREE_FIXED_CST (arg1));
2044 t = build_real (type, value);
2046 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2047 return t;
2050 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2051 to another fixed-point type. */
2053 static tree
2054 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2056 FIXED_VALUE_TYPE value;
2057 tree t;
2058 bool overflow_p;
2060 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2061 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2062 t = build_fixed (type, value);
2064 /* Propagate overflow flags. */
2065 if (overflow_p | TREE_OVERFLOW (arg1))
2066 TREE_OVERFLOW (t) = 1;
2067 return t;
2070 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2071 to a fixed-point type. */
2073 static tree
2074 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2076 FIXED_VALUE_TYPE value;
2077 tree t;
2078 bool overflow_p;
2079 double_int di;
2081 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2083 di.low = TREE_INT_CST_ELT (arg1, 0);
2084 if (TREE_INT_CST_NUNITS (arg1) == 1)
2085 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2086 else
2087 di.high = TREE_INT_CST_ELT (arg1, 1);
2089 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2090 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2091 TYPE_SATURATING (type));
2092 t = build_fixed (type, value);
2094 /* Propagate overflow flags. */
2095 if (overflow_p | TREE_OVERFLOW (arg1))
2096 TREE_OVERFLOW (t) = 1;
2097 return t;
2100 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2101 to a fixed-point type. */
2103 static tree
2104 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2106 FIXED_VALUE_TYPE value;
2107 tree t;
2108 bool overflow_p;
2110 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2111 &TREE_REAL_CST (arg1),
2112 TYPE_SATURATING (type));
2113 t = build_fixed (type, value);
2115 /* Propagate overflow flags. */
2116 if (overflow_p | TREE_OVERFLOW (arg1))
2117 TREE_OVERFLOW (t) = 1;
2118 return t;
2121 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2122 type TYPE. If no simplification can be done return NULL_TREE. */
2124 static tree
2125 fold_convert_const (enum tree_code code, tree type, tree arg1)
2127 if (TREE_TYPE (arg1) == type)
2128 return arg1;
2130 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2131 || TREE_CODE (type) == OFFSET_TYPE)
2133 if (TREE_CODE (arg1) == INTEGER_CST)
2134 return fold_convert_const_int_from_int (type, arg1);
2135 else if (TREE_CODE (arg1) == REAL_CST)
2136 return fold_convert_const_int_from_real (code, type, arg1);
2137 else if (TREE_CODE (arg1) == FIXED_CST)
2138 return fold_convert_const_int_from_fixed (type, arg1);
2140 else if (TREE_CODE (type) == REAL_TYPE)
2142 if (TREE_CODE (arg1) == INTEGER_CST)
2143 return build_real_from_int_cst (type, arg1);
2144 else if (TREE_CODE (arg1) == REAL_CST)
2145 return fold_convert_const_real_from_real (type, arg1);
2146 else if (TREE_CODE (arg1) == FIXED_CST)
2147 return fold_convert_const_real_from_fixed (type, arg1);
2149 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2151 if (TREE_CODE (arg1) == FIXED_CST)
2152 return fold_convert_const_fixed_from_fixed (type, arg1);
2153 else if (TREE_CODE (arg1) == INTEGER_CST)
2154 return fold_convert_const_fixed_from_int (type, arg1);
2155 else if (TREE_CODE (arg1) == REAL_CST)
2156 return fold_convert_const_fixed_from_real (type, arg1);
2158 else if (TREE_CODE (type) == VECTOR_TYPE)
2160 if (TREE_CODE (arg1) == VECTOR_CST
2161 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2163 tree elttype = TREE_TYPE (type);
2164 tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2165 /* We can't handle steps directly when extending, since the
2166 values need to wrap at the original precision first. */
2167 bool step_ok_p
2168 = (INTEGRAL_TYPE_P (elttype)
2169 && INTEGRAL_TYPE_P (arg1_elttype)
2170 && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2171 tree_vector_builder v;
2172 if (!v.new_unary_operation (type, arg1, step_ok_p))
2173 return NULL_TREE;
2174 unsigned int len = v.encoded_nelts ();
2175 for (unsigned int i = 0; i < len; ++i)
2177 tree elt = VECTOR_CST_ELT (arg1, i);
2178 tree cvt = fold_convert_const (code, elttype, elt);
2179 if (cvt == NULL_TREE)
2180 return NULL_TREE;
2181 v.quick_push (cvt);
2183 return v.build ();
2186 return NULL_TREE;
2189 /* Construct a vector of zero elements of vector type TYPE. */
2191 static tree
2192 build_zero_vector (tree type)
2194 tree t;
2196 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2197 return build_vector_from_val (type, t);
2200 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2202 bool
2203 fold_convertible_p (const_tree type, const_tree arg)
2205 tree orig = TREE_TYPE (arg);
2207 if (type == orig)
2208 return true;
2210 if (TREE_CODE (arg) == ERROR_MARK
2211 || TREE_CODE (type) == ERROR_MARK
2212 || TREE_CODE (orig) == ERROR_MARK)
2213 return false;
2215 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2216 return true;
2218 switch (TREE_CODE (type))
2220 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2221 case POINTER_TYPE: case REFERENCE_TYPE:
2222 case OFFSET_TYPE:
2223 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2224 || TREE_CODE (orig) == OFFSET_TYPE);
2226 case REAL_TYPE:
2227 case FIXED_POINT_TYPE:
2228 case VECTOR_TYPE:
2229 case VOID_TYPE:
2230 return TREE_CODE (type) == TREE_CODE (orig);
2232 default:
2233 return false;
2237 /* Convert expression ARG to type TYPE. Used by the middle-end for
2238 simple conversions in preference to calling the front-end's convert. */
2240 tree
2241 fold_convert_loc (location_t loc, tree type, tree arg)
2243 tree orig = TREE_TYPE (arg);
2244 tree tem;
2246 if (type == orig)
2247 return arg;
2249 if (TREE_CODE (arg) == ERROR_MARK
2250 || TREE_CODE (type) == ERROR_MARK
2251 || TREE_CODE (orig) == ERROR_MARK)
2252 return error_mark_node;
2254 switch (TREE_CODE (type))
2256 case POINTER_TYPE:
2257 case REFERENCE_TYPE:
2258 /* Handle conversions between pointers to different address spaces. */
2259 if (POINTER_TYPE_P (orig)
2260 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2261 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2262 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2263 /* fall through */
2265 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2266 case OFFSET_TYPE:
2267 if (TREE_CODE (arg) == INTEGER_CST)
2269 tem = fold_convert_const (NOP_EXPR, type, arg);
2270 if (tem != NULL_TREE)
2271 return tem;
2273 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2274 || TREE_CODE (orig) == OFFSET_TYPE)
2275 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2276 if (TREE_CODE (orig) == COMPLEX_TYPE)
2277 return fold_convert_loc (loc, type,
2278 fold_build1_loc (loc, REALPART_EXPR,
2279 TREE_TYPE (orig), arg));
2280 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2281 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2282 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2284 case REAL_TYPE:
2285 if (TREE_CODE (arg) == INTEGER_CST)
2287 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2288 if (tem != NULL_TREE)
2289 return tem;
2291 else if (TREE_CODE (arg) == REAL_CST)
2293 tem = fold_convert_const (NOP_EXPR, type, arg);
2294 if (tem != NULL_TREE)
2295 return tem;
2297 else if (TREE_CODE (arg) == FIXED_CST)
2299 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2300 if (tem != NULL_TREE)
2301 return tem;
2304 switch (TREE_CODE (orig))
2306 case INTEGER_TYPE:
2307 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2308 case POINTER_TYPE: case REFERENCE_TYPE:
2309 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2311 case REAL_TYPE:
2312 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2314 case FIXED_POINT_TYPE:
2315 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2317 case COMPLEX_TYPE:
2318 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2319 return fold_convert_loc (loc, type, tem);
2321 default:
2322 gcc_unreachable ();
2325 case FIXED_POINT_TYPE:
2326 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2327 || TREE_CODE (arg) == REAL_CST)
2329 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2330 if (tem != NULL_TREE)
2331 goto fold_convert_exit;
2334 switch (TREE_CODE (orig))
2336 case FIXED_POINT_TYPE:
2337 case INTEGER_TYPE:
2338 case ENUMERAL_TYPE:
2339 case BOOLEAN_TYPE:
2340 case REAL_TYPE:
2341 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2343 case COMPLEX_TYPE:
2344 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2345 return fold_convert_loc (loc, type, tem);
2347 default:
2348 gcc_unreachable ();
2351 case COMPLEX_TYPE:
2352 switch (TREE_CODE (orig))
2354 case INTEGER_TYPE:
2355 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2356 case POINTER_TYPE: case REFERENCE_TYPE:
2357 case REAL_TYPE:
2358 case FIXED_POINT_TYPE:
2359 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2360 fold_convert_loc (loc, TREE_TYPE (type), arg),
2361 fold_convert_loc (loc, TREE_TYPE (type),
2362 integer_zero_node));
2363 case COMPLEX_TYPE:
2365 tree rpart, ipart;
2367 if (TREE_CODE (arg) == COMPLEX_EXPR)
2369 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2370 TREE_OPERAND (arg, 0));
2371 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2372 TREE_OPERAND (arg, 1));
2373 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2376 arg = save_expr (arg);
2377 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2378 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2379 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2380 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2381 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2384 default:
2385 gcc_unreachable ();
2388 case VECTOR_TYPE:
2389 if (integer_zerop (arg))
2390 return build_zero_vector (type);
2391 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2392 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2393 || TREE_CODE (orig) == VECTOR_TYPE);
2394 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2396 case VOID_TYPE:
2397 tem = fold_ignored_result (arg);
2398 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2400 default:
2401 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2402 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2403 gcc_unreachable ();
2405 fold_convert_exit:
2406 protected_set_expr_location_unshare (tem, loc);
2407 return tem;
2410 /* Return false if expr can be assumed not to be an lvalue, true
2411 otherwise. */
2413 static bool
2414 maybe_lvalue_p (const_tree x)
2416 /* We only need to wrap lvalue tree codes. */
2417 switch (TREE_CODE (x))
2419 case VAR_DECL:
2420 case PARM_DECL:
2421 case RESULT_DECL:
2422 case LABEL_DECL:
2423 case FUNCTION_DECL:
2424 case SSA_NAME:
2426 case COMPONENT_REF:
2427 case MEM_REF:
2428 case INDIRECT_REF:
2429 case ARRAY_REF:
2430 case ARRAY_RANGE_REF:
2431 case BIT_FIELD_REF:
2432 case OBJ_TYPE_REF:
2434 case REALPART_EXPR:
2435 case IMAGPART_EXPR:
2436 case PREINCREMENT_EXPR:
2437 case PREDECREMENT_EXPR:
2438 case SAVE_EXPR:
2439 case TRY_CATCH_EXPR:
2440 case WITH_CLEANUP_EXPR:
2441 case COMPOUND_EXPR:
2442 case MODIFY_EXPR:
2443 case TARGET_EXPR:
2444 case COND_EXPR:
2445 case BIND_EXPR:
2446 break;
2448 default:
2449 /* Assume the worst for front-end tree codes. */
2450 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2451 break;
2452 return false;
2455 return true;
2458 /* Return an expr equal to X but certainly not valid as an lvalue. */
2460 tree
2461 non_lvalue_loc (location_t loc, tree x)
2463 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2464 us. */
2465 if (in_gimple_form)
2466 return x;
2468 if (! maybe_lvalue_p (x))
2469 return x;
2470 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2473 /* When pedantic, return an expr equal to X but certainly not valid as a
2474 pedantic lvalue. Otherwise, return X. */
2476 static tree
2477 pedantic_non_lvalue_loc (location_t loc, tree x)
2479 return protected_set_expr_location_unshare (x, loc);
2482 /* Given a tree comparison code, return the code that is the logical inverse.
2483 It is generally not safe to do this for floating-point comparisons, except
2484 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2485 ERROR_MARK in this case. */
2487 enum tree_code
2488 invert_tree_comparison (enum tree_code code, bool honor_nans)
2490 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2491 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2492 return ERROR_MARK;
2494 switch (code)
2496 case EQ_EXPR:
2497 return NE_EXPR;
2498 case NE_EXPR:
2499 return EQ_EXPR;
2500 case GT_EXPR:
2501 return honor_nans ? UNLE_EXPR : LE_EXPR;
2502 case GE_EXPR:
2503 return honor_nans ? UNLT_EXPR : LT_EXPR;
2504 case LT_EXPR:
2505 return honor_nans ? UNGE_EXPR : GE_EXPR;
2506 case LE_EXPR:
2507 return honor_nans ? UNGT_EXPR : GT_EXPR;
2508 case LTGT_EXPR:
2509 return UNEQ_EXPR;
2510 case UNEQ_EXPR:
2511 return LTGT_EXPR;
2512 case UNGT_EXPR:
2513 return LE_EXPR;
2514 case UNGE_EXPR:
2515 return LT_EXPR;
2516 case UNLT_EXPR:
2517 return GE_EXPR;
2518 case UNLE_EXPR:
2519 return GT_EXPR;
2520 case ORDERED_EXPR:
2521 return UNORDERED_EXPR;
2522 case UNORDERED_EXPR:
2523 return ORDERED_EXPR;
2524 default:
2525 gcc_unreachable ();
2529 /* Similar, but return the comparison that results if the operands are
2530 swapped. This is safe for floating-point. */
2532 enum tree_code
2533 swap_tree_comparison (enum tree_code code)
2535 switch (code)
2537 case EQ_EXPR:
2538 case NE_EXPR:
2539 case ORDERED_EXPR:
2540 case UNORDERED_EXPR:
2541 case LTGT_EXPR:
2542 case UNEQ_EXPR:
2543 return code;
2544 case GT_EXPR:
2545 return LT_EXPR;
2546 case GE_EXPR:
2547 return LE_EXPR;
2548 case LT_EXPR:
2549 return GT_EXPR;
2550 case LE_EXPR:
2551 return GE_EXPR;
2552 case UNGT_EXPR:
2553 return UNLT_EXPR;
2554 case UNGE_EXPR:
2555 return UNLE_EXPR;
2556 case UNLT_EXPR:
2557 return UNGT_EXPR;
2558 case UNLE_EXPR:
2559 return UNGE_EXPR;
2560 default:
2561 gcc_unreachable ();
2566 /* Convert a comparison tree code from an enum tree_code representation
2567 into a compcode bit-based encoding. This function is the inverse of
2568 compcode_to_comparison. */
2570 static enum comparison_code
2571 comparison_to_compcode (enum tree_code code)
2573 switch (code)
2575 case LT_EXPR:
2576 return COMPCODE_LT;
2577 case EQ_EXPR:
2578 return COMPCODE_EQ;
2579 case LE_EXPR:
2580 return COMPCODE_LE;
2581 case GT_EXPR:
2582 return COMPCODE_GT;
2583 case NE_EXPR:
2584 return COMPCODE_NE;
2585 case GE_EXPR:
2586 return COMPCODE_GE;
2587 case ORDERED_EXPR:
2588 return COMPCODE_ORD;
2589 case UNORDERED_EXPR:
2590 return COMPCODE_UNORD;
2591 case UNLT_EXPR:
2592 return COMPCODE_UNLT;
2593 case UNEQ_EXPR:
2594 return COMPCODE_UNEQ;
2595 case UNLE_EXPR:
2596 return COMPCODE_UNLE;
2597 case UNGT_EXPR:
2598 return COMPCODE_UNGT;
2599 case LTGT_EXPR:
2600 return COMPCODE_LTGT;
2601 case UNGE_EXPR:
2602 return COMPCODE_UNGE;
2603 default:
2604 gcc_unreachable ();
2608 /* Convert a compcode bit-based encoding of a comparison operator back
2609 to GCC's enum tree_code representation. This function is the
2610 inverse of comparison_to_compcode. */
2612 static enum tree_code
2613 compcode_to_comparison (enum comparison_code code)
2615 switch (code)
2617 case COMPCODE_LT:
2618 return LT_EXPR;
2619 case COMPCODE_EQ:
2620 return EQ_EXPR;
2621 case COMPCODE_LE:
2622 return LE_EXPR;
2623 case COMPCODE_GT:
2624 return GT_EXPR;
2625 case COMPCODE_NE:
2626 return NE_EXPR;
2627 case COMPCODE_GE:
2628 return GE_EXPR;
2629 case COMPCODE_ORD:
2630 return ORDERED_EXPR;
2631 case COMPCODE_UNORD:
2632 return UNORDERED_EXPR;
2633 case COMPCODE_UNLT:
2634 return UNLT_EXPR;
2635 case COMPCODE_UNEQ:
2636 return UNEQ_EXPR;
2637 case COMPCODE_UNLE:
2638 return UNLE_EXPR;
2639 case COMPCODE_UNGT:
2640 return UNGT_EXPR;
2641 case COMPCODE_LTGT:
2642 return LTGT_EXPR;
2643 case COMPCODE_UNGE:
2644 return UNGE_EXPR;
2645 default:
2646 gcc_unreachable ();
2650 /* Return a tree for the comparison which is the combination of
2651 doing the AND or OR (depending on CODE) of the two operations LCODE
2652 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2653 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2654 if this makes the transformation invalid. */
2656 tree
2657 combine_comparisons (location_t loc,
2658 enum tree_code code, enum tree_code lcode,
2659 enum tree_code rcode, tree truth_type,
2660 tree ll_arg, tree lr_arg)
2662 bool honor_nans = HONOR_NANS (ll_arg);
2663 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2664 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2665 int compcode;
2667 switch (code)
2669 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2670 compcode = lcompcode & rcompcode;
2671 break;
2673 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2674 compcode = lcompcode | rcompcode;
2675 break;
2677 default:
2678 return NULL_TREE;
2681 if (!honor_nans)
2683 /* Eliminate unordered comparisons, as well as LTGT and ORD
2684 which are not used unless the mode has NaNs. */
2685 compcode &= ~COMPCODE_UNORD;
2686 if (compcode == COMPCODE_LTGT)
2687 compcode = COMPCODE_NE;
2688 else if (compcode == COMPCODE_ORD)
2689 compcode = COMPCODE_TRUE;
2691 else if (flag_trapping_math)
2693 /* Check that the original operation and the optimized ones will trap
2694 under the same condition. */
2695 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2696 && (lcompcode != COMPCODE_EQ)
2697 && (lcompcode != COMPCODE_ORD);
2698 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2699 && (rcompcode != COMPCODE_EQ)
2700 && (rcompcode != COMPCODE_ORD);
2701 bool trap = (compcode & COMPCODE_UNORD) == 0
2702 && (compcode != COMPCODE_EQ)
2703 && (compcode != COMPCODE_ORD);
2705 /* In a short-circuited boolean expression the LHS might be
2706 such that the RHS, if evaluated, will never trap. For
2707 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2708 if neither x nor y is NaN. (This is a mixed blessing: for
2709 example, the expression above will never trap, hence
2710 optimizing it to x < y would be invalid). */
2711 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2712 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2713 rtrap = false;
2715 /* If the comparison was short-circuited, and only the RHS
2716 trapped, we may now generate a spurious trap. */
2717 if (rtrap && !ltrap
2718 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2719 return NULL_TREE;
2721 /* If we changed the conditions that cause a trap, we lose. */
2722 if ((ltrap || rtrap) != trap)
2723 return NULL_TREE;
2726 if (compcode == COMPCODE_TRUE)
2727 return constant_boolean_node (true, truth_type);
2728 else if (compcode == COMPCODE_FALSE)
2729 return constant_boolean_node (false, truth_type);
2730 else
2732 enum tree_code tcode;
2734 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2735 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2739 /* Return nonzero if two operands (typically of the same tree node)
2740 are necessarily equal. FLAGS modifies behavior as follows:
2742 If OEP_ONLY_CONST is set, only return nonzero for constants.
2743 This function tests whether the operands are indistinguishable;
2744 it does not test whether they are equal using C's == operation.
2745 The distinction is important for IEEE floating point, because
2746 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2747 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2749 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2750 even though it may hold multiple values during a function.
2751 This is because a GCC tree node guarantees that nothing else is
2752 executed between the evaluation of its "operands" (which may often
2753 be evaluated in arbitrary order). Hence if the operands themselves
2754 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2755 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2756 unset means assuming isochronic (or instantaneous) tree equivalence.
2757 Unless comparing arbitrary expression trees, such as from different
2758 statements, this flag can usually be left unset.
2760 If OEP_PURE_SAME is set, then pure functions with identical arguments
2761 are considered the same. It is used when the caller has other ways
2762 to ensure that global memory is unchanged in between.
2764 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2765 not values of expressions.
2767 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2768 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2770 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2771 any operand with side effect. This is unnecesarily conservative in the
2772 case we know that arg0 and arg1 are in disjoint code paths (such as in
2773 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2774 addresses with TREE_CONSTANT flag set so we know that &var == &var
2775 even if var is volatile. */
2778 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2780 /* When checking, verify at the outermost operand_equal_p call that
2781 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2782 hash value. */
2783 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2785 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2787 if (arg0 != arg1)
2789 inchash::hash hstate0 (0), hstate1 (0);
2790 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2791 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2792 hashval_t h0 = hstate0.end ();
2793 hashval_t h1 = hstate1.end ();
2794 gcc_assert (h0 == h1);
2796 return 1;
2798 else
2799 return 0;
2802 /* If either is ERROR_MARK, they aren't equal. */
2803 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2804 || TREE_TYPE (arg0) == error_mark_node
2805 || TREE_TYPE (arg1) == error_mark_node)
2806 return 0;
2808 /* Similar, if either does not have a type (like a released SSA name),
2809 they aren't equal. */
2810 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2811 return 0;
2813 /* We cannot consider pointers to different address space equal. */
2814 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2815 && POINTER_TYPE_P (TREE_TYPE (arg1))
2816 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2817 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2818 return 0;
2820 /* Check equality of integer constants before bailing out due to
2821 precision differences. */
2822 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2824 /* Address of INTEGER_CST is not defined; check that we did not forget
2825 to drop the OEP_ADDRESS_OF flags. */
2826 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2827 return tree_int_cst_equal (arg0, arg1);
2830 if (!(flags & OEP_ADDRESS_OF))
2832 /* If both types don't have the same signedness, then we can't consider
2833 them equal. We must check this before the STRIP_NOPS calls
2834 because they may change the signedness of the arguments. As pointers
2835 strictly don't have a signedness, require either two pointers or
2836 two non-pointers as well. */
2837 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2838 || POINTER_TYPE_P (TREE_TYPE (arg0))
2839 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2840 return 0;
2842 /* If both types don't have the same precision, then it is not safe
2843 to strip NOPs. */
2844 if (element_precision (TREE_TYPE (arg0))
2845 != element_precision (TREE_TYPE (arg1)))
2846 return 0;
2848 STRIP_NOPS (arg0);
2849 STRIP_NOPS (arg1);
2851 #if 0
2852 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2853 sanity check once the issue is solved. */
2854 else
2855 /* Addresses of conversions and SSA_NAMEs (and many other things)
2856 are not defined. Check that we did not forget to drop the
2857 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2858 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2859 && TREE_CODE (arg0) != SSA_NAME);
2860 #endif
2862 /* In case both args are comparisons but with different comparison
2863 code, try to swap the comparison operands of one arg to produce
2864 a match and compare that variant. */
2865 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2866 && COMPARISON_CLASS_P (arg0)
2867 && COMPARISON_CLASS_P (arg1))
2869 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2871 if (TREE_CODE (arg0) == swap_code)
2872 return operand_equal_p (TREE_OPERAND (arg0, 0),
2873 TREE_OPERAND (arg1, 1), flags)
2874 && operand_equal_p (TREE_OPERAND (arg0, 1),
2875 TREE_OPERAND (arg1, 0), flags);
2878 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2880 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2881 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2883 else if (flags & OEP_ADDRESS_OF)
2885 /* If we are interested in comparing addresses ignore
2886 MEM_REF wrappings of the base that can appear just for
2887 TBAA reasons. */
2888 if (TREE_CODE (arg0) == MEM_REF
2889 && DECL_P (arg1)
2890 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2891 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2892 && integer_zerop (TREE_OPERAND (arg0, 1)))
2893 return 1;
2894 else if (TREE_CODE (arg1) == MEM_REF
2895 && DECL_P (arg0)
2896 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2897 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2898 && integer_zerop (TREE_OPERAND (arg1, 1)))
2899 return 1;
2900 return 0;
2902 else
2903 return 0;
2906 /* When not checking adddresses, this is needed for conversions and for
2907 COMPONENT_REF. Might as well play it safe and always test this. */
2908 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2909 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2910 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2911 && !(flags & OEP_ADDRESS_OF)))
2912 return 0;
2914 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2915 We don't care about side effects in that case because the SAVE_EXPR
2916 takes care of that for us. In all other cases, two expressions are
2917 equal if they have no side effects. If we have two identical
2918 expressions with side effects that should be treated the same due
2919 to the only side effects being identical SAVE_EXPR's, that will
2920 be detected in the recursive calls below.
2921 If we are taking an invariant address of two identical objects
2922 they are necessarily equal as well. */
2923 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2924 && (TREE_CODE (arg0) == SAVE_EXPR
2925 || (flags & OEP_MATCH_SIDE_EFFECTS)
2926 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2927 return 1;
2929 /* Next handle constant cases, those for which we can return 1 even
2930 if ONLY_CONST is set. */
2931 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2932 switch (TREE_CODE (arg0))
2934 case INTEGER_CST:
2935 return tree_int_cst_equal (arg0, arg1);
2937 case FIXED_CST:
2938 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2939 TREE_FIXED_CST (arg1));
2941 case REAL_CST:
2942 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2943 return 1;
2946 if (!HONOR_SIGNED_ZEROS (arg0))
2948 /* If we do not distinguish between signed and unsigned zero,
2949 consider them equal. */
2950 if (real_zerop (arg0) && real_zerop (arg1))
2951 return 1;
2953 return 0;
2955 case VECTOR_CST:
2957 unsigned i;
2959 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2960 return 0;
2962 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2964 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2965 VECTOR_CST_ELT (arg1, i), flags))
2966 return 0;
2968 return 1;
2971 case COMPLEX_CST:
2972 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2973 flags)
2974 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2975 flags));
2977 case STRING_CST:
2978 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2979 && ! memcmp (TREE_STRING_POINTER (arg0),
2980 TREE_STRING_POINTER (arg1),
2981 TREE_STRING_LENGTH (arg0)));
2983 case ADDR_EXPR:
2984 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2985 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2986 flags | OEP_ADDRESS_OF
2987 | OEP_MATCH_SIDE_EFFECTS);
2988 case CONSTRUCTOR:
2989 /* In GIMPLE empty constructors are allowed in initializers of
2990 aggregates. */
2991 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2992 default:
2993 break;
2996 if (flags & OEP_ONLY_CONST)
2997 return 0;
2999 /* Define macros to test an operand from arg0 and arg1 for equality and a
3000 variant that allows null and views null as being different from any
3001 non-null value. In the latter case, if either is null, the both
3002 must be; otherwise, do the normal comparison. */
3003 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3004 TREE_OPERAND (arg1, N), flags)
3006 #define OP_SAME_WITH_NULL(N) \
3007 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3008 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3010 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3012 case tcc_unary:
3013 /* Two conversions are equal only if signedness and modes match. */
3014 switch (TREE_CODE (arg0))
3016 CASE_CONVERT:
3017 case FIX_TRUNC_EXPR:
3018 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3019 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3020 return 0;
3021 break;
3022 default:
3023 break;
3026 return OP_SAME (0);
3029 case tcc_comparison:
3030 case tcc_binary:
3031 if (OP_SAME (0) && OP_SAME (1))
3032 return 1;
3034 /* For commutative ops, allow the other order. */
3035 return (commutative_tree_code (TREE_CODE (arg0))
3036 && operand_equal_p (TREE_OPERAND (arg0, 0),
3037 TREE_OPERAND (arg1, 1), flags)
3038 && operand_equal_p (TREE_OPERAND (arg0, 1),
3039 TREE_OPERAND (arg1, 0), flags));
3041 case tcc_reference:
3042 /* If either of the pointer (or reference) expressions we are
3043 dereferencing contain a side effect, these cannot be equal,
3044 but their addresses can be. */
3045 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3046 && (TREE_SIDE_EFFECTS (arg0)
3047 || TREE_SIDE_EFFECTS (arg1)))
3048 return 0;
3050 switch (TREE_CODE (arg0))
3052 case INDIRECT_REF:
3053 if (!(flags & OEP_ADDRESS_OF)
3054 && (TYPE_ALIGN (TREE_TYPE (arg0))
3055 != TYPE_ALIGN (TREE_TYPE (arg1))))
3056 return 0;
3057 flags &= ~OEP_ADDRESS_OF;
3058 return OP_SAME (0);
3060 case IMAGPART_EXPR:
3061 /* Require the same offset. */
3062 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3063 TYPE_SIZE (TREE_TYPE (arg1)),
3064 flags & ~OEP_ADDRESS_OF))
3065 return 0;
3067 /* Fallthru. */
3068 case REALPART_EXPR:
3069 case VIEW_CONVERT_EXPR:
3070 return OP_SAME (0);
3072 case TARGET_MEM_REF:
3073 case MEM_REF:
3074 if (!(flags & OEP_ADDRESS_OF))
3076 /* Require equal access sizes */
3077 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3078 && (!TYPE_SIZE (TREE_TYPE (arg0))
3079 || !TYPE_SIZE (TREE_TYPE (arg1))
3080 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3081 TYPE_SIZE (TREE_TYPE (arg1)),
3082 flags)))
3083 return 0;
3084 /* Verify that access happens in similar types. */
3085 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3086 return 0;
3087 /* Verify that accesses are TBAA compatible. */
3088 if (!alias_ptr_types_compatible_p
3089 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3090 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3091 || (MR_DEPENDENCE_CLIQUE (arg0)
3092 != MR_DEPENDENCE_CLIQUE (arg1))
3093 || (MR_DEPENDENCE_BASE (arg0)
3094 != MR_DEPENDENCE_BASE (arg1)))
3095 return 0;
3096 /* Verify that alignment is compatible. */
3097 if (TYPE_ALIGN (TREE_TYPE (arg0))
3098 != TYPE_ALIGN (TREE_TYPE (arg1)))
3099 return 0;
3101 flags &= ~OEP_ADDRESS_OF;
3102 return (OP_SAME (0) && OP_SAME (1)
3103 /* TARGET_MEM_REF require equal extra operands. */
3104 && (TREE_CODE (arg0) != TARGET_MEM_REF
3105 || (OP_SAME_WITH_NULL (2)
3106 && OP_SAME_WITH_NULL (3)
3107 && OP_SAME_WITH_NULL (4))));
3109 case ARRAY_REF:
3110 case ARRAY_RANGE_REF:
3111 if (!OP_SAME (0))
3112 return 0;
3113 flags &= ~OEP_ADDRESS_OF;
3114 /* Compare the array index by value if it is constant first as we
3115 may have different types but same value here. */
3116 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3117 TREE_OPERAND (arg1, 1))
3118 || OP_SAME (1))
3119 && OP_SAME_WITH_NULL (2)
3120 && OP_SAME_WITH_NULL (3)
3121 /* Compare low bound and element size as with OEP_ADDRESS_OF
3122 we have to account for the offset of the ref. */
3123 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3124 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3125 || (operand_equal_p (array_ref_low_bound
3126 (CONST_CAST_TREE (arg0)),
3127 array_ref_low_bound
3128 (CONST_CAST_TREE (arg1)), flags)
3129 && operand_equal_p (array_ref_element_size
3130 (CONST_CAST_TREE (arg0)),
3131 array_ref_element_size
3132 (CONST_CAST_TREE (arg1)),
3133 flags))));
3135 case COMPONENT_REF:
3136 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3137 may be NULL when we're called to compare MEM_EXPRs. */
3138 if (!OP_SAME_WITH_NULL (0)
3139 || !OP_SAME (1))
3140 return 0;
3141 flags &= ~OEP_ADDRESS_OF;
3142 return OP_SAME_WITH_NULL (2);
3144 case BIT_FIELD_REF:
3145 if (!OP_SAME (0))
3146 return 0;
3147 flags &= ~OEP_ADDRESS_OF;
3148 return OP_SAME (1) && OP_SAME (2);
3150 default:
3151 return 0;
3154 case tcc_expression:
3155 switch (TREE_CODE (arg0))
3157 case ADDR_EXPR:
3158 /* Be sure we pass right ADDRESS_OF flag. */
3159 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3160 return operand_equal_p (TREE_OPERAND (arg0, 0),
3161 TREE_OPERAND (arg1, 0),
3162 flags | OEP_ADDRESS_OF);
3164 case TRUTH_NOT_EXPR:
3165 return OP_SAME (0);
3167 case TRUTH_ANDIF_EXPR:
3168 case TRUTH_ORIF_EXPR:
3169 return OP_SAME (0) && OP_SAME (1);
3171 case FMA_EXPR:
3172 case WIDEN_MULT_PLUS_EXPR:
3173 case WIDEN_MULT_MINUS_EXPR:
3174 if (!OP_SAME (2))
3175 return 0;
3176 /* The multiplcation operands are commutative. */
3177 /* FALLTHRU */
3179 case TRUTH_AND_EXPR:
3180 case TRUTH_OR_EXPR:
3181 case TRUTH_XOR_EXPR:
3182 if (OP_SAME (0) && OP_SAME (1))
3183 return 1;
3185 /* Otherwise take into account this is a commutative operation. */
3186 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3187 TREE_OPERAND (arg1, 1), flags)
3188 && operand_equal_p (TREE_OPERAND (arg0, 1),
3189 TREE_OPERAND (arg1, 0), flags));
3191 case COND_EXPR:
3192 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3193 return 0;
3194 flags &= ~OEP_ADDRESS_OF;
3195 return OP_SAME (0);
3197 case BIT_INSERT_EXPR:
3198 /* BIT_INSERT_EXPR has an implict operand as the type precision
3199 of op1. Need to check to make sure they are the same. */
3200 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3201 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3202 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3203 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3204 return false;
3205 /* FALLTHRU */
3207 case VEC_COND_EXPR:
3208 case DOT_PROD_EXPR:
3209 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3211 case MODIFY_EXPR:
3212 case INIT_EXPR:
3213 case COMPOUND_EXPR:
3214 case PREDECREMENT_EXPR:
3215 case PREINCREMENT_EXPR:
3216 case POSTDECREMENT_EXPR:
3217 case POSTINCREMENT_EXPR:
3218 if (flags & OEP_LEXICOGRAPHIC)
3219 return OP_SAME (0) && OP_SAME (1);
3220 return 0;
3222 case CLEANUP_POINT_EXPR:
3223 case EXPR_STMT:
3224 if (flags & OEP_LEXICOGRAPHIC)
3225 return OP_SAME (0);
3226 return 0;
3228 default:
3229 return 0;
3232 case tcc_vl_exp:
3233 switch (TREE_CODE (arg0))
3235 case CALL_EXPR:
3236 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3237 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3238 /* If not both CALL_EXPRs are either internal or normal function
3239 functions, then they are not equal. */
3240 return 0;
3241 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3243 /* If the CALL_EXPRs call different internal functions, then they
3244 are not equal. */
3245 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3246 return 0;
3248 else
3250 /* If the CALL_EXPRs call different functions, then they are not
3251 equal. */
3252 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3253 flags))
3254 return 0;
3257 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3259 unsigned int cef = call_expr_flags (arg0);
3260 if (flags & OEP_PURE_SAME)
3261 cef &= ECF_CONST | ECF_PURE;
3262 else
3263 cef &= ECF_CONST;
3264 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3265 return 0;
3268 /* Now see if all the arguments are the same. */
3270 const_call_expr_arg_iterator iter0, iter1;
3271 const_tree a0, a1;
3272 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3273 a1 = first_const_call_expr_arg (arg1, &iter1);
3274 a0 && a1;
3275 a0 = next_const_call_expr_arg (&iter0),
3276 a1 = next_const_call_expr_arg (&iter1))
3277 if (! operand_equal_p (a0, a1, flags))
3278 return 0;
3280 /* If we get here and both argument lists are exhausted
3281 then the CALL_EXPRs are equal. */
3282 return ! (a0 || a1);
3284 default:
3285 return 0;
3288 case tcc_declaration:
3289 /* Consider __builtin_sqrt equal to sqrt. */
3290 return (TREE_CODE (arg0) == FUNCTION_DECL
3291 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3292 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3293 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3295 case tcc_exceptional:
3296 if (TREE_CODE (arg0) == CONSTRUCTOR)
3298 /* In GIMPLE constructors are used only to build vectors from
3299 elements. Individual elements in the constructor must be
3300 indexed in increasing order and form an initial sequence.
3302 We make no effort to compare constructors in generic.
3303 (see sem_variable::equals in ipa-icf which can do so for
3304 constants). */
3305 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3306 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3307 return 0;
3309 /* Be sure that vectors constructed have the same representation.
3310 We only tested element precision and modes to match.
3311 Vectors may be BLKmode and thus also check that the number of
3312 parts match. */
3313 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3314 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3315 return 0;
3317 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3318 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3319 unsigned int len = vec_safe_length (v0);
3321 if (len != vec_safe_length (v1))
3322 return 0;
3324 for (unsigned int i = 0; i < len; i++)
3326 constructor_elt *c0 = &(*v0)[i];
3327 constructor_elt *c1 = &(*v1)[i];
3329 if (!operand_equal_p (c0->value, c1->value, flags)
3330 /* In GIMPLE the indexes can be either NULL or matching i.
3331 Double check this so we won't get false
3332 positives for GENERIC. */
3333 || (c0->index
3334 && (TREE_CODE (c0->index) != INTEGER_CST
3335 || !compare_tree_int (c0->index, i)))
3336 || (c1->index
3337 && (TREE_CODE (c1->index) != INTEGER_CST
3338 || !compare_tree_int (c1->index, i))))
3339 return 0;
3341 return 1;
3343 else if (TREE_CODE (arg0) == STATEMENT_LIST
3344 && (flags & OEP_LEXICOGRAPHIC))
3346 /* Compare the STATEMENT_LISTs. */
3347 tree_stmt_iterator tsi1, tsi2;
3348 tree body1 = CONST_CAST_TREE (arg0);
3349 tree body2 = CONST_CAST_TREE (arg1);
3350 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3351 tsi_next (&tsi1), tsi_next (&tsi2))
3353 /* The lists don't have the same number of statements. */
3354 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3355 return 0;
3356 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3357 return 1;
3358 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3359 OEP_LEXICOGRAPHIC))
3360 return 0;
3363 return 0;
3365 case tcc_statement:
3366 switch (TREE_CODE (arg0))
3368 case RETURN_EXPR:
3369 if (flags & OEP_LEXICOGRAPHIC)
3370 return OP_SAME_WITH_NULL (0);
3371 return 0;
3372 default:
3373 return 0;
3376 default:
3377 return 0;
3380 #undef OP_SAME
3381 #undef OP_SAME_WITH_NULL
3384 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3385 with a different signedness or a narrower precision. */
3387 static bool
3388 operand_equal_for_comparison_p (tree arg0, tree arg1)
3390 if (operand_equal_p (arg0, arg1, 0))
3391 return true;
3393 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3394 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3395 return false;
3397 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3398 and see if the inner values are the same. This removes any
3399 signedness comparison, which doesn't matter here. */
3400 tree op0 = arg0;
3401 tree op1 = arg1;
3402 STRIP_NOPS (op0);
3403 STRIP_NOPS (op1);
3404 if (operand_equal_p (op0, op1, 0))
3405 return true;
3407 /* Discard a single widening conversion from ARG1 and see if the inner
3408 value is the same as ARG0. */
3409 if (CONVERT_EXPR_P (arg1)
3410 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3411 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3412 < TYPE_PRECISION (TREE_TYPE (arg1))
3413 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3414 return true;
3416 return false;
3419 /* See if ARG is an expression that is either a comparison or is performing
3420 arithmetic on comparisons. The comparisons must only be comparing
3421 two different values, which will be stored in *CVAL1 and *CVAL2; if
3422 they are nonzero it means that some operands have already been found.
3423 No variables may be used anywhere else in the expression except in the
3424 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3425 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3427 If this is true, return 1. Otherwise, return zero. */
3429 static int
3430 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3432 enum tree_code code = TREE_CODE (arg);
3433 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3435 /* We can handle some of the tcc_expression cases here. */
3436 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3437 tclass = tcc_unary;
3438 else if (tclass == tcc_expression
3439 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3440 || code == COMPOUND_EXPR))
3441 tclass = tcc_binary;
3443 else if (tclass == tcc_expression && code == SAVE_EXPR
3444 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3446 /* If we've already found a CVAL1 or CVAL2, this expression is
3447 two complex to handle. */
3448 if (*cval1 || *cval2)
3449 return 0;
3451 tclass = tcc_unary;
3452 *save_p = 1;
3455 switch (tclass)
3457 case tcc_unary:
3458 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3460 case tcc_binary:
3461 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3462 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3463 cval1, cval2, save_p));
3465 case tcc_constant:
3466 return 1;
3468 case tcc_expression:
3469 if (code == COND_EXPR)
3470 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3471 cval1, cval2, save_p)
3472 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3473 cval1, cval2, save_p)
3474 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3475 cval1, cval2, save_p));
3476 return 0;
3478 case tcc_comparison:
3479 /* First see if we can handle the first operand, then the second. For
3480 the second operand, we know *CVAL1 can't be zero. It must be that
3481 one side of the comparison is each of the values; test for the
3482 case where this isn't true by failing if the two operands
3483 are the same. */
3485 if (operand_equal_p (TREE_OPERAND (arg, 0),
3486 TREE_OPERAND (arg, 1), 0))
3487 return 0;
3489 if (*cval1 == 0)
3490 *cval1 = TREE_OPERAND (arg, 0);
3491 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3493 else if (*cval2 == 0)
3494 *cval2 = TREE_OPERAND (arg, 0);
3495 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3497 else
3498 return 0;
3500 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3502 else if (*cval2 == 0)
3503 *cval2 = TREE_OPERAND (arg, 1);
3504 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3506 else
3507 return 0;
3509 return 1;
3511 default:
3512 return 0;
3516 /* ARG is a tree that is known to contain just arithmetic operations and
3517 comparisons. Evaluate the operations in the tree substituting NEW0 for
3518 any occurrence of OLD0 as an operand of a comparison and likewise for
3519 NEW1 and OLD1. */
3521 static tree
3522 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3523 tree old1, tree new1)
3525 tree type = TREE_TYPE (arg);
3526 enum tree_code code = TREE_CODE (arg);
3527 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3529 /* We can handle some of the tcc_expression cases here. */
3530 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3531 tclass = tcc_unary;
3532 else if (tclass == tcc_expression
3533 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3534 tclass = tcc_binary;
3536 switch (tclass)
3538 case tcc_unary:
3539 return fold_build1_loc (loc, code, type,
3540 eval_subst (loc, TREE_OPERAND (arg, 0),
3541 old0, new0, old1, new1));
3543 case tcc_binary:
3544 return fold_build2_loc (loc, code, type,
3545 eval_subst (loc, TREE_OPERAND (arg, 0),
3546 old0, new0, old1, new1),
3547 eval_subst (loc, TREE_OPERAND (arg, 1),
3548 old0, new0, old1, new1));
3550 case tcc_expression:
3551 switch (code)
3553 case SAVE_EXPR:
3554 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3555 old1, new1);
3557 case COMPOUND_EXPR:
3558 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3559 old1, new1);
3561 case COND_EXPR:
3562 return fold_build3_loc (loc, code, type,
3563 eval_subst (loc, TREE_OPERAND (arg, 0),
3564 old0, new0, old1, new1),
3565 eval_subst (loc, TREE_OPERAND (arg, 1),
3566 old0, new0, old1, new1),
3567 eval_subst (loc, TREE_OPERAND (arg, 2),
3568 old0, new0, old1, new1));
3569 default:
3570 break;
3572 /* Fall through - ??? */
3574 case tcc_comparison:
3576 tree arg0 = TREE_OPERAND (arg, 0);
3577 tree arg1 = TREE_OPERAND (arg, 1);
3579 /* We need to check both for exact equality and tree equality. The
3580 former will be true if the operand has a side-effect. In that
3581 case, we know the operand occurred exactly once. */
3583 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3584 arg0 = new0;
3585 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3586 arg0 = new1;
3588 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3589 arg1 = new0;
3590 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3591 arg1 = new1;
3593 return fold_build2_loc (loc, code, type, arg0, arg1);
3596 default:
3597 return arg;
3601 /* Return a tree for the case when the result of an expression is RESULT
3602 converted to TYPE and OMITTED was previously an operand of the expression
3603 but is now not needed (e.g., we folded OMITTED * 0).
3605 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3606 the conversion of RESULT to TYPE. */
3608 tree
3609 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3611 tree t = fold_convert_loc (loc, type, result);
3613 /* If the resulting operand is an empty statement, just return the omitted
3614 statement casted to void. */
3615 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3616 return build1_loc (loc, NOP_EXPR, void_type_node,
3617 fold_ignored_result (omitted));
3619 if (TREE_SIDE_EFFECTS (omitted))
3620 return build2_loc (loc, COMPOUND_EXPR, type,
3621 fold_ignored_result (omitted), t);
3623 return non_lvalue_loc (loc, t);
3626 /* Return a tree for the case when the result of an expression is RESULT
3627 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3628 of the expression but are now not needed.
3630 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3631 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3632 evaluated before OMITTED2. Otherwise, if neither has side effects,
3633 just do the conversion of RESULT to TYPE. */
3635 tree
3636 omit_two_operands_loc (location_t loc, tree type, tree result,
3637 tree omitted1, tree omitted2)
3639 tree t = fold_convert_loc (loc, type, result);
3641 if (TREE_SIDE_EFFECTS (omitted2))
3642 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3643 if (TREE_SIDE_EFFECTS (omitted1))
3644 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3646 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3650 /* Return a simplified tree node for the truth-negation of ARG. This
3651 never alters ARG itself. We assume that ARG is an operation that
3652 returns a truth value (0 or 1).
3654 FIXME: one would think we would fold the result, but it causes
3655 problems with the dominator optimizer. */
3657 static tree
3658 fold_truth_not_expr (location_t loc, tree arg)
3660 tree type = TREE_TYPE (arg);
3661 enum tree_code code = TREE_CODE (arg);
3662 location_t loc1, loc2;
3664 /* If this is a comparison, we can simply invert it, except for
3665 floating-point non-equality comparisons, in which case we just
3666 enclose a TRUTH_NOT_EXPR around what we have. */
3668 if (TREE_CODE_CLASS (code) == tcc_comparison)
3670 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3671 if (FLOAT_TYPE_P (op_type)
3672 && flag_trapping_math
3673 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3674 && code != NE_EXPR && code != EQ_EXPR)
3675 return NULL_TREE;
3677 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3678 if (code == ERROR_MARK)
3679 return NULL_TREE;
3681 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3682 TREE_OPERAND (arg, 1));
3683 if (TREE_NO_WARNING (arg))
3684 TREE_NO_WARNING (ret) = 1;
3685 return ret;
3688 switch (code)
3690 case INTEGER_CST:
3691 return constant_boolean_node (integer_zerop (arg), type);
3693 case TRUTH_AND_EXPR:
3694 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3695 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3696 return build2_loc (loc, TRUTH_OR_EXPR, type,
3697 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3698 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3700 case TRUTH_OR_EXPR:
3701 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3702 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3703 return build2_loc (loc, TRUTH_AND_EXPR, type,
3704 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3705 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3707 case TRUTH_XOR_EXPR:
3708 /* Here we can invert either operand. We invert the first operand
3709 unless the second operand is a TRUTH_NOT_EXPR in which case our
3710 result is the XOR of the first operand with the inside of the
3711 negation of the second operand. */
3713 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3714 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3715 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3716 else
3717 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3718 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3719 TREE_OPERAND (arg, 1));
3721 case TRUTH_ANDIF_EXPR:
3722 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3723 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3724 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3725 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3726 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3728 case TRUTH_ORIF_EXPR:
3729 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3730 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3731 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3732 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3733 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3735 case TRUTH_NOT_EXPR:
3736 return TREE_OPERAND (arg, 0);
3738 case COND_EXPR:
3740 tree arg1 = TREE_OPERAND (arg, 1);
3741 tree arg2 = TREE_OPERAND (arg, 2);
3743 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3744 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3746 /* A COND_EXPR may have a throw as one operand, which
3747 then has void type. Just leave void operands
3748 as they are. */
3749 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3750 VOID_TYPE_P (TREE_TYPE (arg1))
3751 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3752 VOID_TYPE_P (TREE_TYPE (arg2))
3753 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3756 case COMPOUND_EXPR:
3757 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3758 return build2_loc (loc, COMPOUND_EXPR, type,
3759 TREE_OPERAND (arg, 0),
3760 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3762 case NON_LVALUE_EXPR:
3763 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3764 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3766 CASE_CONVERT:
3767 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3768 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3770 /* fall through */
3772 case FLOAT_EXPR:
3773 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3774 return build1_loc (loc, TREE_CODE (arg), type,
3775 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3777 case BIT_AND_EXPR:
3778 if (!integer_onep (TREE_OPERAND (arg, 1)))
3779 return NULL_TREE;
3780 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3782 case SAVE_EXPR:
3783 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3785 case CLEANUP_POINT_EXPR:
3786 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3787 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3788 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3790 default:
3791 return NULL_TREE;
3795 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3796 assume that ARG is an operation that returns a truth value (0 or 1
3797 for scalars, 0 or -1 for vectors). Return the folded expression if
3798 folding is successful. Otherwise, return NULL_TREE. */
3800 static tree
3801 fold_invert_truthvalue (location_t loc, tree arg)
3803 tree type = TREE_TYPE (arg);
3804 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3805 ? BIT_NOT_EXPR
3806 : TRUTH_NOT_EXPR,
3807 type, arg);
3810 /* Return a simplified tree node for the truth-negation of ARG. This
3811 never alters ARG itself. We assume that ARG is an operation that
3812 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3814 tree
3815 invert_truthvalue_loc (location_t loc, tree arg)
3817 if (TREE_CODE (arg) == ERROR_MARK)
3818 return arg;
3820 tree type = TREE_TYPE (arg);
3821 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3822 ? BIT_NOT_EXPR
3823 : TRUTH_NOT_EXPR,
3824 type, arg);
3827 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3828 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3829 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3830 is the original memory reference used to preserve the alias set of
3831 the access. */
3833 static tree
3834 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3835 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3836 int unsignedp, int reversep)
3838 tree result, bftype;
3840 /* Attempt not to lose the access path if possible. */
3841 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3843 tree ninner = TREE_OPERAND (orig_inner, 0);
3844 machine_mode nmode;
3845 HOST_WIDE_INT nbitsize, nbitpos;
3846 tree noffset;
3847 int nunsignedp, nreversep, nvolatilep = 0;
3848 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3849 &noffset, &nmode, &nunsignedp,
3850 &nreversep, &nvolatilep);
3851 if (base == inner
3852 && noffset == NULL_TREE
3853 && nbitsize >= bitsize
3854 && nbitpos <= bitpos
3855 && bitpos + bitsize <= nbitpos + nbitsize
3856 && !reversep
3857 && !nreversep
3858 && !nvolatilep)
3860 inner = ninner;
3861 bitpos -= nbitpos;
3865 alias_set_type iset = get_alias_set (orig_inner);
3866 if (iset == 0 && get_alias_set (inner) != iset)
3867 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3868 build_fold_addr_expr (inner),
3869 build_int_cst (ptr_type_node, 0));
3871 if (bitpos == 0 && !reversep)
3873 tree size = TYPE_SIZE (TREE_TYPE (inner));
3874 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3875 || POINTER_TYPE_P (TREE_TYPE (inner)))
3876 && tree_fits_shwi_p (size)
3877 && tree_to_shwi (size) == bitsize)
3878 return fold_convert_loc (loc, type, inner);
3881 bftype = type;
3882 if (TYPE_PRECISION (bftype) != bitsize
3883 || TYPE_UNSIGNED (bftype) == !unsignedp)
3884 bftype = build_nonstandard_integer_type (bitsize, 0);
3886 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3887 bitsize_int (bitsize), bitsize_int (bitpos));
3888 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3890 if (bftype != type)
3891 result = fold_convert_loc (loc, type, result);
3893 return result;
3896 /* Optimize a bit-field compare.
3898 There are two cases: First is a compare against a constant and the
3899 second is a comparison of two items where the fields are at the same
3900 bit position relative to the start of a chunk (byte, halfword, word)
3901 large enough to contain it. In these cases we can avoid the shift
3902 implicit in bitfield extractions.
3904 For constants, we emit a compare of the shifted constant with the
3905 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3906 compared. For two fields at the same position, we do the ANDs with the
3907 similar mask and compare the result of the ANDs.
3909 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3910 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3911 are the left and right operands of the comparison, respectively.
3913 If the optimization described above can be done, we return the resulting
3914 tree. Otherwise we return zero. */
3916 static tree
3917 optimize_bit_field_compare (location_t loc, enum tree_code code,
3918 tree compare_type, tree lhs, tree rhs)
3920 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3921 tree type = TREE_TYPE (lhs);
3922 tree unsigned_type;
3923 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3924 machine_mode lmode, rmode;
3925 scalar_int_mode nmode;
3926 int lunsignedp, runsignedp;
3927 int lreversep, rreversep;
3928 int lvolatilep = 0, rvolatilep = 0;
3929 tree linner, rinner = NULL_TREE;
3930 tree mask;
3931 tree offset;
3933 /* Get all the information about the extractions being done. If the bit size
3934 if the same as the size of the underlying object, we aren't doing an
3935 extraction at all and so can do nothing. We also don't want to
3936 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3937 then will no longer be able to replace it. */
3938 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3939 &lunsignedp, &lreversep, &lvolatilep);
3940 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3941 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3942 return 0;
3944 if (const_p)
3945 rreversep = lreversep;
3946 else
3948 /* If this is not a constant, we can only do something if bit positions,
3949 sizes, signedness and storage order are the same. */
3950 rinner
3951 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3952 &runsignedp, &rreversep, &rvolatilep);
3954 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3955 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3956 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3957 return 0;
3960 /* Honor the C++ memory model and mimic what RTL expansion does. */
3961 unsigned HOST_WIDE_INT bitstart = 0;
3962 unsigned HOST_WIDE_INT bitend = 0;
3963 if (TREE_CODE (lhs) == COMPONENT_REF)
3965 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
3966 if (offset != NULL_TREE)
3967 return 0;
3970 /* See if we can find a mode to refer to this field. We should be able to,
3971 but fail if we can't. */
3972 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
3973 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3974 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3975 TYPE_ALIGN (TREE_TYPE (rinner))),
3976 BITS_PER_WORD, false, &nmode))
3977 return 0;
3979 /* Set signed and unsigned types of the precision of this mode for the
3980 shifts below. */
3981 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3983 /* Compute the bit position and size for the new reference and our offset
3984 within it. If the new reference is the same size as the original, we
3985 won't optimize anything, so return zero. */
3986 nbitsize = GET_MODE_BITSIZE (nmode);
3987 nbitpos = lbitpos & ~ (nbitsize - 1);
3988 lbitpos -= nbitpos;
3989 if (nbitsize == lbitsize)
3990 return 0;
3992 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3993 lbitpos = nbitsize - lbitsize - lbitpos;
3995 /* Make the mask to be used against the extracted field. */
3996 mask = build_int_cst_type (unsigned_type, -1);
3997 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3998 mask = const_binop (RSHIFT_EXPR, mask,
3999 size_int (nbitsize - lbitsize - lbitpos));
4001 if (! const_p)
4003 if (nbitpos < 0)
4004 return 0;
4006 /* If not comparing with constant, just rework the comparison
4007 and return. */
4008 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4009 nbitsize, nbitpos, 1, lreversep);
4010 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4011 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4012 nbitsize, nbitpos, 1, rreversep);
4013 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4014 return fold_build2_loc (loc, code, compare_type, t1, t2);
4017 /* Otherwise, we are handling the constant case. See if the constant is too
4018 big for the field. Warn and return a tree for 0 (false) if so. We do
4019 this not only for its own sake, but to avoid having to test for this
4020 error case below. If we didn't, we might generate wrong code.
4022 For unsigned fields, the constant shifted right by the field length should
4023 be all zero. For signed fields, the high-order bits should agree with
4024 the sign bit. */
4026 if (lunsignedp)
4028 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4030 warning (0, "comparison is always %d due to width of bit-field",
4031 code == NE_EXPR);
4032 return constant_boolean_node (code == NE_EXPR, compare_type);
4035 else
4037 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4038 if (tem != 0 && tem != -1)
4040 warning (0, "comparison is always %d due to width of bit-field",
4041 code == NE_EXPR);
4042 return constant_boolean_node (code == NE_EXPR, compare_type);
4046 if (nbitpos < 0)
4047 return 0;
4049 /* Single-bit compares should always be against zero. */
4050 if (lbitsize == 1 && ! integer_zerop (rhs))
4052 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4053 rhs = build_int_cst (type, 0);
4056 /* Make a new bitfield reference, shift the constant over the
4057 appropriate number of bits and mask it with the computed mask
4058 (in case this was a signed field). If we changed it, make a new one. */
4059 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4060 nbitsize, nbitpos, 1, lreversep);
4062 rhs = const_binop (BIT_AND_EXPR,
4063 const_binop (LSHIFT_EXPR,
4064 fold_convert_loc (loc, unsigned_type, rhs),
4065 size_int (lbitpos)),
4066 mask);
4068 lhs = build2_loc (loc, code, compare_type,
4069 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4070 return lhs;
4073 /* Subroutine for fold_truth_andor_1: decode a field reference.
4075 If EXP is a comparison reference, we return the innermost reference.
4077 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4078 set to the starting bit number.
4080 If the innermost field can be completely contained in a mode-sized
4081 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4083 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4084 otherwise it is not changed.
4086 *PUNSIGNEDP is set to the signedness of the field.
4088 *PREVERSEP is set to the storage order of the field.
4090 *PMASK is set to the mask used. This is either contained in a
4091 BIT_AND_EXPR or derived from the width of the field.
4093 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4095 Return 0 if this is not a component reference or is one that we can't
4096 do anything with. */
4098 static tree
4099 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4100 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4101 int *punsignedp, int *preversep, int *pvolatilep,
4102 tree *pmask, tree *pand_mask)
4104 tree exp = *exp_;
4105 tree outer_type = 0;
4106 tree and_mask = 0;
4107 tree mask, inner, offset;
4108 tree unsigned_type;
4109 unsigned int precision;
4111 /* All the optimizations using this function assume integer fields.
4112 There are problems with FP fields since the type_for_size call
4113 below can fail for, e.g., XFmode. */
4114 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4115 return 0;
4117 /* We are interested in the bare arrangement of bits, so strip everything
4118 that doesn't affect the machine mode. However, record the type of the
4119 outermost expression if it may matter below. */
4120 if (CONVERT_EXPR_P (exp)
4121 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4122 outer_type = TREE_TYPE (exp);
4123 STRIP_NOPS (exp);
4125 if (TREE_CODE (exp) == BIT_AND_EXPR)
4127 and_mask = TREE_OPERAND (exp, 1);
4128 exp = TREE_OPERAND (exp, 0);
4129 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4130 if (TREE_CODE (and_mask) != INTEGER_CST)
4131 return 0;
4134 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4135 punsignedp, preversep, pvolatilep);
4136 if ((inner == exp && and_mask == 0)
4137 || *pbitsize < 0 || offset != 0
4138 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4139 /* Reject out-of-bound accesses (PR79731). */
4140 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4141 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4142 *pbitpos + *pbitsize) < 0))
4143 return 0;
4145 *exp_ = exp;
4147 /* If the number of bits in the reference is the same as the bitsize of
4148 the outer type, then the outer type gives the signedness. Otherwise
4149 (in case of a small bitfield) the signedness is unchanged. */
4150 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4151 *punsignedp = TYPE_UNSIGNED (outer_type);
4153 /* Compute the mask to access the bitfield. */
4154 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4155 precision = TYPE_PRECISION (unsigned_type);
4157 mask = build_int_cst_type (unsigned_type, -1);
4159 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4160 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4162 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4163 if (and_mask != 0)
4164 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4165 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4167 *pmask = mask;
4168 *pand_mask = and_mask;
4169 return inner;
4172 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4173 bit positions and MASK is SIGNED. */
4175 static int
4176 all_ones_mask_p (const_tree mask, unsigned int size)
4178 tree type = TREE_TYPE (mask);
4179 unsigned int precision = TYPE_PRECISION (type);
4181 /* If this function returns true when the type of the mask is
4182 UNSIGNED, then there will be errors. In particular see
4183 gcc.c-torture/execute/990326-1.c. There does not appear to be
4184 any documentation paper trail as to why this is so. But the pre
4185 wide-int worked with that restriction and it has been preserved
4186 here. */
4187 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4188 return false;
4190 return wi::mask (size, false, precision) == wi::to_wide (mask);
4193 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4194 represents the sign bit of EXP's type. If EXP represents a sign
4195 or zero extension, also test VAL against the unextended type.
4196 The return value is the (sub)expression whose sign bit is VAL,
4197 or NULL_TREE otherwise. */
4199 tree
4200 sign_bit_p (tree exp, const_tree val)
4202 int width;
4203 tree t;
4205 /* Tree EXP must have an integral type. */
4206 t = TREE_TYPE (exp);
4207 if (! INTEGRAL_TYPE_P (t))
4208 return NULL_TREE;
4210 /* Tree VAL must be an integer constant. */
4211 if (TREE_CODE (val) != INTEGER_CST
4212 || TREE_OVERFLOW (val))
4213 return NULL_TREE;
4215 width = TYPE_PRECISION (t);
4216 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4217 return exp;
4219 /* Handle extension from a narrower type. */
4220 if (TREE_CODE (exp) == NOP_EXPR
4221 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4222 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4224 return NULL_TREE;
4227 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4228 to be evaluated unconditionally. */
4230 static int
4231 simple_operand_p (const_tree exp)
4233 /* Strip any conversions that don't change the machine mode. */
4234 STRIP_NOPS (exp);
4236 return (CONSTANT_CLASS_P (exp)
4237 || TREE_CODE (exp) == SSA_NAME
4238 || (DECL_P (exp)
4239 && ! TREE_ADDRESSABLE (exp)
4240 && ! TREE_THIS_VOLATILE (exp)
4241 && ! DECL_NONLOCAL (exp)
4242 /* Don't regard global variables as simple. They may be
4243 allocated in ways unknown to the compiler (shared memory,
4244 #pragma weak, etc). */
4245 && ! TREE_PUBLIC (exp)
4246 && ! DECL_EXTERNAL (exp)
4247 /* Weakrefs are not safe to be read, since they can be NULL.
4248 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4249 have DECL_WEAK flag set. */
4250 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4251 /* Loading a static variable is unduly expensive, but global
4252 registers aren't expensive. */
4253 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4256 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4257 to be evaluated unconditionally.
4258 I addition to simple_operand_p, we assume that comparisons, conversions,
4259 and logic-not operations are simple, if their operands are simple, too. */
4261 static bool
4262 simple_operand_p_2 (tree exp)
4264 enum tree_code code;
4266 if (TREE_SIDE_EFFECTS (exp)
4267 || tree_could_trap_p (exp))
4268 return false;
4270 while (CONVERT_EXPR_P (exp))
4271 exp = TREE_OPERAND (exp, 0);
4273 code = TREE_CODE (exp);
4275 if (TREE_CODE_CLASS (code) == tcc_comparison)
4276 return (simple_operand_p (TREE_OPERAND (exp, 0))
4277 && simple_operand_p (TREE_OPERAND (exp, 1)));
4279 if (code == TRUTH_NOT_EXPR)
4280 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4282 return simple_operand_p (exp);
4286 /* The following functions are subroutines to fold_range_test and allow it to
4287 try to change a logical combination of comparisons into a range test.
4289 For example, both
4290 X == 2 || X == 3 || X == 4 || X == 5
4292 X >= 2 && X <= 5
4293 are converted to
4294 (unsigned) (X - 2) <= 3
4296 We describe each set of comparisons as being either inside or outside
4297 a range, using a variable named like IN_P, and then describe the
4298 range with a lower and upper bound. If one of the bounds is omitted,
4299 it represents either the highest or lowest value of the type.
4301 In the comments below, we represent a range by two numbers in brackets
4302 preceded by a "+" to designate being inside that range, or a "-" to
4303 designate being outside that range, so the condition can be inverted by
4304 flipping the prefix. An omitted bound is represented by a "-". For
4305 example, "- [-, 10]" means being outside the range starting at the lowest
4306 possible value and ending at 10, in other words, being greater than 10.
4307 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4308 always false.
4310 We set up things so that the missing bounds are handled in a consistent
4311 manner so neither a missing bound nor "true" and "false" need to be
4312 handled using a special case. */
4314 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4315 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4316 and UPPER1_P are nonzero if the respective argument is an upper bound
4317 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4318 must be specified for a comparison. ARG1 will be converted to ARG0's
4319 type if both are specified. */
4321 static tree
4322 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4323 tree arg1, int upper1_p)
4325 tree tem;
4326 int result;
4327 int sgn0, sgn1;
4329 /* If neither arg represents infinity, do the normal operation.
4330 Else, if not a comparison, return infinity. Else handle the special
4331 comparison rules. Note that most of the cases below won't occur, but
4332 are handled for consistency. */
4334 if (arg0 != 0 && arg1 != 0)
4336 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4337 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4338 STRIP_NOPS (tem);
4339 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4342 if (TREE_CODE_CLASS (code) != tcc_comparison)
4343 return 0;
4345 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4346 for neither. In real maths, we cannot assume open ended ranges are
4347 the same. But, this is computer arithmetic, where numbers are finite.
4348 We can therefore make the transformation of any unbounded range with
4349 the value Z, Z being greater than any representable number. This permits
4350 us to treat unbounded ranges as equal. */
4351 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4352 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4353 switch (code)
4355 case EQ_EXPR:
4356 result = sgn0 == sgn1;
4357 break;
4358 case NE_EXPR:
4359 result = sgn0 != sgn1;
4360 break;
4361 case LT_EXPR:
4362 result = sgn0 < sgn1;
4363 break;
4364 case LE_EXPR:
4365 result = sgn0 <= sgn1;
4366 break;
4367 case GT_EXPR:
4368 result = sgn0 > sgn1;
4369 break;
4370 case GE_EXPR:
4371 result = sgn0 >= sgn1;
4372 break;
4373 default:
4374 gcc_unreachable ();
4377 return constant_boolean_node (result, type);
4380 /* Helper routine for make_range. Perform one step for it, return
4381 new expression if the loop should continue or NULL_TREE if it should
4382 stop. */
4384 tree
4385 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4386 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4387 bool *strict_overflow_p)
4389 tree arg0_type = TREE_TYPE (arg0);
4390 tree n_low, n_high, low = *p_low, high = *p_high;
4391 int in_p = *p_in_p, n_in_p;
4393 switch (code)
4395 case TRUTH_NOT_EXPR:
4396 /* We can only do something if the range is testing for zero. */
4397 if (low == NULL_TREE || high == NULL_TREE
4398 || ! integer_zerop (low) || ! integer_zerop (high))
4399 return NULL_TREE;
4400 *p_in_p = ! in_p;
4401 return arg0;
4403 case EQ_EXPR: case NE_EXPR:
4404 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4405 /* We can only do something if the range is testing for zero
4406 and if the second operand is an integer constant. Note that
4407 saying something is "in" the range we make is done by
4408 complementing IN_P since it will set in the initial case of
4409 being not equal to zero; "out" is leaving it alone. */
4410 if (low == NULL_TREE || high == NULL_TREE
4411 || ! integer_zerop (low) || ! integer_zerop (high)
4412 || TREE_CODE (arg1) != INTEGER_CST)
4413 return NULL_TREE;
4415 switch (code)
4417 case NE_EXPR: /* - [c, c] */
4418 low = high = arg1;
4419 break;
4420 case EQ_EXPR: /* + [c, c] */
4421 in_p = ! in_p, low = high = arg1;
4422 break;
4423 case GT_EXPR: /* - [-, c] */
4424 low = 0, high = arg1;
4425 break;
4426 case GE_EXPR: /* + [c, -] */
4427 in_p = ! in_p, low = arg1, high = 0;
4428 break;
4429 case LT_EXPR: /* - [c, -] */
4430 low = arg1, high = 0;
4431 break;
4432 case LE_EXPR: /* + [-, c] */
4433 in_p = ! in_p, low = 0, high = arg1;
4434 break;
4435 default:
4436 gcc_unreachable ();
4439 /* If this is an unsigned comparison, we also know that EXP is
4440 greater than or equal to zero. We base the range tests we make
4441 on that fact, so we record it here so we can parse existing
4442 range tests. We test arg0_type since often the return type
4443 of, e.g. EQ_EXPR, is boolean. */
4444 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4446 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4447 in_p, low, high, 1,
4448 build_int_cst (arg0_type, 0),
4449 NULL_TREE))
4450 return NULL_TREE;
4452 in_p = n_in_p, low = n_low, high = n_high;
4454 /* If the high bound is missing, but we have a nonzero low
4455 bound, reverse the range so it goes from zero to the low bound
4456 minus 1. */
4457 if (high == 0 && low && ! integer_zerop (low))
4459 in_p = ! in_p;
4460 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4461 build_int_cst (TREE_TYPE (low), 1), 0);
4462 low = build_int_cst (arg0_type, 0);
4466 *p_low = low;
4467 *p_high = high;
4468 *p_in_p = in_p;
4469 return arg0;
4471 case NEGATE_EXPR:
4472 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4473 low and high are non-NULL, then normalize will DTRT. */
4474 if (!TYPE_UNSIGNED (arg0_type)
4475 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4477 if (low == NULL_TREE)
4478 low = TYPE_MIN_VALUE (arg0_type);
4479 if (high == NULL_TREE)
4480 high = TYPE_MAX_VALUE (arg0_type);
4483 /* (-x) IN [a,b] -> x in [-b, -a] */
4484 n_low = range_binop (MINUS_EXPR, exp_type,
4485 build_int_cst (exp_type, 0),
4486 0, high, 1);
4487 n_high = range_binop (MINUS_EXPR, exp_type,
4488 build_int_cst (exp_type, 0),
4489 0, low, 0);
4490 if (n_high != 0 && TREE_OVERFLOW (n_high))
4491 return NULL_TREE;
4492 goto normalize;
4494 case BIT_NOT_EXPR:
4495 /* ~ X -> -X - 1 */
4496 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4497 build_int_cst (exp_type, 1));
4499 case PLUS_EXPR:
4500 case MINUS_EXPR:
4501 if (TREE_CODE (arg1) != INTEGER_CST)
4502 return NULL_TREE;
4504 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4505 move a constant to the other side. */
4506 if (!TYPE_UNSIGNED (arg0_type)
4507 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4508 return NULL_TREE;
4510 /* If EXP is signed, any overflow in the computation is undefined,
4511 so we don't worry about it so long as our computations on
4512 the bounds don't overflow. For unsigned, overflow is defined
4513 and this is exactly the right thing. */
4514 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4515 arg0_type, low, 0, arg1, 0);
4516 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4517 arg0_type, high, 1, arg1, 0);
4518 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4519 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4520 return NULL_TREE;
4522 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4523 *strict_overflow_p = true;
4525 normalize:
4526 /* Check for an unsigned range which has wrapped around the maximum
4527 value thus making n_high < n_low, and normalize it. */
4528 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4530 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4531 build_int_cst (TREE_TYPE (n_high), 1), 0);
4532 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4533 build_int_cst (TREE_TYPE (n_low), 1), 0);
4535 /* If the range is of the form +/- [ x+1, x ], we won't
4536 be able to normalize it. But then, it represents the
4537 whole range or the empty set, so make it
4538 +/- [ -, - ]. */
4539 if (tree_int_cst_equal (n_low, low)
4540 && tree_int_cst_equal (n_high, high))
4541 low = high = 0;
4542 else
4543 in_p = ! in_p;
4545 else
4546 low = n_low, high = n_high;
4548 *p_low = low;
4549 *p_high = high;
4550 *p_in_p = in_p;
4551 return arg0;
4553 CASE_CONVERT:
4554 case NON_LVALUE_EXPR:
4555 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4556 return NULL_TREE;
4558 if (! INTEGRAL_TYPE_P (arg0_type)
4559 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4560 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4561 return NULL_TREE;
4563 n_low = low, n_high = high;
4565 if (n_low != 0)
4566 n_low = fold_convert_loc (loc, arg0_type, n_low);
4568 if (n_high != 0)
4569 n_high = fold_convert_loc (loc, arg0_type, n_high);
4571 /* If we're converting arg0 from an unsigned type, to exp,
4572 a signed type, we will be doing the comparison as unsigned.
4573 The tests above have already verified that LOW and HIGH
4574 are both positive.
4576 So we have to ensure that we will handle large unsigned
4577 values the same way that the current signed bounds treat
4578 negative values. */
4580 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4582 tree high_positive;
4583 tree equiv_type;
4584 /* For fixed-point modes, we need to pass the saturating flag
4585 as the 2nd parameter. */
4586 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4587 equiv_type
4588 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4589 TYPE_SATURATING (arg0_type));
4590 else
4591 equiv_type
4592 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4594 /* A range without an upper bound is, naturally, unbounded.
4595 Since convert would have cropped a very large value, use
4596 the max value for the destination type. */
4597 high_positive
4598 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4599 : TYPE_MAX_VALUE (arg0_type);
4601 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4602 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4603 fold_convert_loc (loc, arg0_type,
4604 high_positive),
4605 build_int_cst (arg0_type, 1));
4607 /* If the low bound is specified, "and" the range with the
4608 range for which the original unsigned value will be
4609 positive. */
4610 if (low != 0)
4612 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4613 1, fold_convert_loc (loc, arg0_type,
4614 integer_zero_node),
4615 high_positive))
4616 return NULL_TREE;
4618 in_p = (n_in_p == in_p);
4620 else
4622 /* Otherwise, "or" the range with the range of the input
4623 that will be interpreted as negative. */
4624 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4625 1, fold_convert_loc (loc, arg0_type,
4626 integer_zero_node),
4627 high_positive))
4628 return NULL_TREE;
4630 in_p = (in_p != n_in_p);
4634 *p_low = n_low;
4635 *p_high = n_high;
4636 *p_in_p = in_p;
4637 return arg0;
4639 default:
4640 return NULL_TREE;
4644 /* Given EXP, a logical expression, set the range it is testing into
4645 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4646 actually being tested. *PLOW and *PHIGH will be made of the same
4647 type as the returned expression. If EXP is not a comparison, we
4648 will most likely not be returning a useful value and range. Set
4649 *STRICT_OVERFLOW_P to true if the return value is only valid
4650 because signed overflow is undefined; otherwise, do not change
4651 *STRICT_OVERFLOW_P. */
4653 tree
4654 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4655 bool *strict_overflow_p)
4657 enum tree_code code;
4658 tree arg0, arg1 = NULL_TREE;
4659 tree exp_type, nexp;
4660 int in_p;
4661 tree low, high;
4662 location_t loc = EXPR_LOCATION (exp);
4664 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4665 and see if we can refine the range. Some of the cases below may not
4666 happen, but it doesn't seem worth worrying about this. We "continue"
4667 the outer loop when we've changed something; otherwise we "break"
4668 the switch, which will "break" the while. */
4670 in_p = 0;
4671 low = high = build_int_cst (TREE_TYPE (exp), 0);
4673 while (1)
4675 code = TREE_CODE (exp);
4676 exp_type = TREE_TYPE (exp);
4677 arg0 = NULL_TREE;
4679 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4681 if (TREE_OPERAND_LENGTH (exp) > 0)
4682 arg0 = TREE_OPERAND (exp, 0);
4683 if (TREE_CODE_CLASS (code) == tcc_binary
4684 || TREE_CODE_CLASS (code) == tcc_comparison
4685 || (TREE_CODE_CLASS (code) == tcc_expression
4686 && TREE_OPERAND_LENGTH (exp) > 1))
4687 arg1 = TREE_OPERAND (exp, 1);
4689 if (arg0 == NULL_TREE)
4690 break;
4692 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4693 &high, &in_p, strict_overflow_p);
4694 if (nexp == NULL_TREE)
4695 break;
4696 exp = nexp;
4699 /* If EXP is a constant, we can evaluate whether this is true or false. */
4700 if (TREE_CODE (exp) == INTEGER_CST)
4702 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4703 exp, 0, low, 0))
4704 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4705 exp, 1, high, 1)));
4706 low = high = 0;
4707 exp = 0;
4710 *pin_p = in_p, *plow = low, *phigh = high;
4711 return exp;
4714 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4715 a bitwise check i.e. when
4716 LOW == 0xXX...X00...0
4717 HIGH == 0xXX...X11...1
4718 Return corresponding mask in MASK and stem in VALUE. */
4720 static bool
4721 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4722 tree *value)
4724 if (TREE_CODE (low) != INTEGER_CST
4725 || TREE_CODE (high) != INTEGER_CST)
4726 return false;
4728 unsigned prec = TYPE_PRECISION (type);
4729 wide_int lo = wi::to_wide (low, prec);
4730 wide_int hi = wi::to_wide (high, prec);
4732 wide_int end_mask = lo ^ hi;
4733 if ((end_mask & (end_mask + 1)) != 0
4734 || (lo & end_mask) != 0)
4735 return false;
4737 wide_int stem_mask = ~end_mask;
4738 wide_int stem = lo & stem_mask;
4739 if (stem != (hi & stem_mask))
4740 return false;
4742 *mask = wide_int_to_tree (type, stem_mask);
4743 *value = wide_int_to_tree (type, stem);
4745 return true;
4748 /* Helper routine for build_range_check and match.pd. Return the type to
4749 perform the check or NULL if it shouldn't be optimized. */
4751 tree
4752 range_check_type (tree etype)
4754 /* First make sure that arithmetics in this type is valid, then make sure
4755 that it wraps around. */
4756 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4757 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4758 TYPE_UNSIGNED (etype));
4760 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4762 tree utype, minv, maxv;
4764 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4765 for the type in question, as we rely on this here. */
4766 utype = unsigned_type_for (etype);
4767 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4768 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4769 build_int_cst (TREE_TYPE (maxv), 1), 1);
4770 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4772 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4773 minv, 1, maxv, 1)))
4774 etype = utype;
4775 else
4776 return NULL_TREE;
4778 return etype;
4781 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4782 type, TYPE, return an expression to test if EXP is in (or out of, depending
4783 on IN_P) the range. Return 0 if the test couldn't be created. */
4785 tree
4786 build_range_check (location_t loc, tree type, tree exp, int in_p,
4787 tree low, tree high)
4789 tree etype = TREE_TYPE (exp), mask, value;
4791 /* Disable this optimization for function pointer expressions
4792 on targets that require function pointer canonicalization. */
4793 if (targetm.have_canonicalize_funcptr_for_compare ()
4794 && TREE_CODE (etype) == POINTER_TYPE
4795 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4796 return NULL_TREE;
4798 if (! in_p)
4800 value = build_range_check (loc, type, exp, 1, low, high);
4801 if (value != 0)
4802 return invert_truthvalue_loc (loc, value);
4804 return 0;
4807 if (low == 0 && high == 0)
4808 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4810 if (low == 0)
4811 return fold_build2_loc (loc, LE_EXPR, type, exp,
4812 fold_convert_loc (loc, etype, high));
4814 if (high == 0)
4815 return fold_build2_loc (loc, GE_EXPR, type, exp,
4816 fold_convert_loc (loc, etype, low));
4818 if (operand_equal_p (low, high, 0))
4819 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4820 fold_convert_loc (loc, etype, low));
4822 if (TREE_CODE (exp) == BIT_AND_EXPR
4823 && maskable_range_p (low, high, etype, &mask, &value))
4824 return fold_build2_loc (loc, EQ_EXPR, type,
4825 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4826 exp, mask),
4827 value);
4829 if (integer_zerop (low))
4831 if (! TYPE_UNSIGNED (etype))
4833 etype = unsigned_type_for (etype);
4834 high = fold_convert_loc (loc, etype, high);
4835 exp = fold_convert_loc (loc, etype, exp);
4837 return build_range_check (loc, type, exp, 1, 0, high);
4840 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4841 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4843 int prec = TYPE_PRECISION (etype);
4845 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4847 if (TYPE_UNSIGNED (etype))
4849 tree signed_etype = signed_type_for (etype);
4850 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4851 etype
4852 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4853 else
4854 etype = signed_etype;
4855 exp = fold_convert_loc (loc, etype, exp);
4857 return fold_build2_loc (loc, GT_EXPR, type, exp,
4858 build_int_cst (etype, 0));
4862 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4863 This requires wrap-around arithmetics for the type of the expression. */
4864 etype = range_check_type (etype);
4865 if (etype == NULL_TREE)
4866 return NULL_TREE;
4868 if (POINTER_TYPE_P (etype))
4869 etype = unsigned_type_for (etype);
4871 high = fold_convert_loc (loc, etype, high);
4872 low = fold_convert_loc (loc, etype, low);
4873 exp = fold_convert_loc (loc, etype, exp);
4875 value = const_binop (MINUS_EXPR, high, low);
4877 if (value != 0 && !TREE_OVERFLOW (value))
4878 return build_range_check (loc, type,
4879 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4880 1, build_int_cst (etype, 0), value);
4882 return 0;
4885 /* Return the predecessor of VAL in its type, handling the infinite case. */
4887 static tree
4888 range_predecessor (tree val)
4890 tree type = TREE_TYPE (val);
4892 if (INTEGRAL_TYPE_P (type)
4893 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4894 return 0;
4895 else
4896 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4897 build_int_cst (TREE_TYPE (val), 1), 0);
4900 /* Return the successor of VAL in its type, handling the infinite case. */
4902 static tree
4903 range_successor (tree val)
4905 tree type = TREE_TYPE (val);
4907 if (INTEGRAL_TYPE_P (type)
4908 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4909 return 0;
4910 else
4911 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4912 build_int_cst (TREE_TYPE (val), 1), 0);
4915 /* Given two ranges, see if we can merge them into one. Return 1 if we
4916 can, 0 if we can't. Set the output range into the specified parameters. */
4918 bool
4919 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4920 tree high0, int in1_p, tree low1, tree high1)
4922 int no_overlap;
4923 int subset;
4924 int temp;
4925 tree tem;
4926 int in_p;
4927 tree low, high;
4928 int lowequal = ((low0 == 0 && low1 == 0)
4929 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4930 low0, 0, low1, 0)));
4931 int highequal = ((high0 == 0 && high1 == 0)
4932 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4933 high0, 1, high1, 1)));
4935 /* Make range 0 be the range that starts first, or ends last if they
4936 start at the same value. Swap them if it isn't. */
4937 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4938 low0, 0, low1, 0))
4939 || (lowequal
4940 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4941 high1, 1, high0, 1))))
4943 temp = in0_p, in0_p = in1_p, in1_p = temp;
4944 tem = low0, low0 = low1, low1 = tem;
4945 tem = high0, high0 = high1, high1 = tem;
4948 /* Now flag two cases, whether the ranges are disjoint or whether the
4949 second range is totally subsumed in the first. Note that the tests
4950 below are simplified by the ones above. */
4951 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4952 high0, 1, low1, 0));
4953 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4954 high1, 1, high0, 1));
4956 /* We now have four cases, depending on whether we are including or
4957 excluding the two ranges. */
4958 if (in0_p && in1_p)
4960 /* If they don't overlap, the result is false. If the second range
4961 is a subset it is the result. Otherwise, the range is from the start
4962 of the second to the end of the first. */
4963 if (no_overlap)
4964 in_p = 0, low = high = 0;
4965 else if (subset)
4966 in_p = 1, low = low1, high = high1;
4967 else
4968 in_p = 1, low = low1, high = high0;
4971 else if (in0_p && ! in1_p)
4973 /* If they don't overlap, the result is the first range. If they are
4974 equal, the result is false. If the second range is a subset of the
4975 first, and the ranges begin at the same place, we go from just after
4976 the end of the second range to the end of the first. If the second
4977 range is not a subset of the first, or if it is a subset and both
4978 ranges end at the same place, the range starts at the start of the
4979 first range and ends just before the second range.
4980 Otherwise, we can't describe this as a single range. */
4981 if (no_overlap)
4982 in_p = 1, low = low0, high = high0;
4983 else if (lowequal && highequal)
4984 in_p = 0, low = high = 0;
4985 else if (subset && lowequal)
4987 low = range_successor (high1);
4988 high = high0;
4989 in_p = 1;
4990 if (low == 0)
4992 /* We are in the weird situation where high0 > high1 but
4993 high1 has no successor. Punt. */
4994 return 0;
4997 else if (! subset || highequal)
4999 low = low0;
5000 high = range_predecessor (low1);
5001 in_p = 1;
5002 if (high == 0)
5004 /* low0 < low1 but low1 has no predecessor. Punt. */
5005 return 0;
5008 else
5009 return 0;
5012 else if (! in0_p && in1_p)
5014 /* If they don't overlap, the result is the second range. If the second
5015 is a subset of the first, the result is false. Otherwise,
5016 the range starts just after the first range and ends at the
5017 end of the second. */
5018 if (no_overlap)
5019 in_p = 1, low = low1, high = high1;
5020 else if (subset || highequal)
5021 in_p = 0, low = high = 0;
5022 else
5024 low = range_successor (high0);
5025 high = high1;
5026 in_p = 1;
5027 if (low == 0)
5029 /* high1 > high0 but high0 has no successor. Punt. */
5030 return 0;
5035 else
5037 /* The case where we are excluding both ranges. Here the complex case
5038 is if they don't overlap. In that case, the only time we have a
5039 range is if they are adjacent. If the second is a subset of the
5040 first, the result is the first. Otherwise, the range to exclude
5041 starts at the beginning of the first range and ends at the end of the
5042 second. */
5043 if (no_overlap)
5045 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5046 range_successor (high0),
5047 1, low1, 0)))
5048 in_p = 0, low = low0, high = high1;
5049 else
5051 /* Canonicalize - [min, x] into - [-, x]. */
5052 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5053 switch (TREE_CODE (TREE_TYPE (low0)))
5055 case ENUMERAL_TYPE:
5056 if (TYPE_PRECISION (TREE_TYPE (low0))
5057 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5058 break;
5059 /* FALLTHROUGH */
5060 case INTEGER_TYPE:
5061 if (tree_int_cst_equal (low0,
5062 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5063 low0 = 0;
5064 break;
5065 case POINTER_TYPE:
5066 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5067 && integer_zerop (low0))
5068 low0 = 0;
5069 break;
5070 default:
5071 break;
5074 /* Canonicalize - [x, max] into - [x, -]. */
5075 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5076 switch (TREE_CODE (TREE_TYPE (high1)))
5078 case ENUMERAL_TYPE:
5079 if (TYPE_PRECISION (TREE_TYPE (high1))
5080 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5081 break;
5082 /* FALLTHROUGH */
5083 case INTEGER_TYPE:
5084 if (tree_int_cst_equal (high1,
5085 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5086 high1 = 0;
5087 break;
5088 case POINTER_TYPE:
5089 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5090 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5091 high1, 1,
5092 build_int_cst (TREE_TYPE (high1), 1),
5093 1)))
5094 high1 = 0;
5095 break;
5096 default:
5097 break;
5100 /* The ranges might be also adjacent between the maximum and
5101 minimum values of the given type. For
5102 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5103 return + [x + 1, y - 1]. */
5104 if (low0 == 0 && high1 == 0)
5106 low = range_successor (high0);
5107 high = range_predecessor (low1);
5108 if (low == 0 || high == 0)
5109 return 0;
5111 in_p = 1;
5113 else
5114 return 0;
5117 else if (subset)
5118 in_p = 0, low = low0, high = high0;
5119 else
5120 in_p = 0, low = low0, high = high1;
5123 *pin_p = in_p, *plow = low, *phigh = high;
5124 return 1;
5128 /* Subroutine of fold, looking inside expressions of the form
5129 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5130 of the COND_EXPR. This function is being used also to optimize
5131 A op B ? C : A, by reversing the comparison first.
5133 Return a folded expression whose code is not a COND_EXPR
5134 anymore, or NULL_TREE if no folding opportunity is found. */
5136 static tree
5137 fold_cond_expr_with_comparison (location_t loc, tree type,
5138 tree arg0, tree arg1, tree arg2)
5140 enum tree_code comp_code = TREE_CODE (arg0);
5141 tree arg00 = TREE_OPERAND (arg0, 0);
5142 tree arg01 = TREE_OPERAND (arg0, 1);
5143 tree arg1_type = TREE_TYPE (arg1);
5144 tree tem;
5146 STRIP_NOPS (arg1);
5147 STRIP_NOPS (arg2);
5149 /* If we have A op 0 ? A : -A, consider applying the following
5150 transformations:
5152 A == 0? A : -A same as -A
5153 A != 0? A : -A same as A
5154 A >= 0? A : -A same as abs (A)
5155 A > 0? A : -A same as abs (A)
5156 A <= 0? A : -A same as -abs (A)
5157 A < 0? A : -A same as -abs (A)
5159 None of these transformations work for modes with signed
5160 zeros. If A is +/-0, the first two transformations will
5161 change the sign of the result (from +0 to -0, or vice
5162 versa). The last four will fix the sign of the result,
5163 even though the original expressions could be positive or
5164 negative, depending on the sign of A.
5166 Note that all these transformations are correct if A is
5167 NaN, since the two alternatives (A and -A) are also NaNs. */
5168 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5169 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5170 ? real_zerop (arg01)
5171 : integer_zerop (arg01))
5172 && ((TREE_CODE (arg2) == NEGATE_EXPR
5173 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5174 /* In the case that A is of the form X-Y, '-A' (arg2) may
5175 have already been folded to Y-X, check for that. */
5176 || (TREE_CODE (arg1) == MINUS_EXPR
5177 && TREE_CODE (arg2) == MINUS_EXPR
5178 && operand_equal_p (TREE_OPERAND (arg1, 0),
5179 TREE_OPERAND (arg2, 1), 0)
5180 && operand_equal_p (TREE_OPERAND (arg1, 1),
5181 TREE_OPERAND (arg2, 0), 0))))
5182 switch (comp_code)
5184 case EQ_EXPR:
5185 case UNEQ_EXPR:
5186 tem = fold_convert_loc (loc, arg1_type, arg1);
5187 return fold_convert_loc (loc, type, negate_expr (tem));
5188 case NE_EXPR:
5189 case LTGT_EXPR:
5190 return fold_convert_loc (loc, type, arg1);
5191 case UNGE_EXPR:
5192 case UNGT_EXPR:
5193 if (flag_trapping_math)
5194 break;
5195 /* Fall through. */
5196 case GE_EXPR:
5197 case GT_EXPR:
5198 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5199 break;
5200 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5201 return fold_convert_loc (loc, type, tem);
5202 case UNLE_EXPR:
5203 case UNLT_EXPR:
5204 if (flag_trapping_math)
5205 break;
5206 /* FALLTHRU */
5207 case LE_EXPR:
5208 case LT_EXPR:
5209 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5210 break;
5211 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5212 return negate_expr (fold_convert_loc (loc, type, tem));
5213 default:
5214 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5215 break;
5218 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5219 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5220 both transformations are correct when A is NaN: A != 0
5221 is then true, and A == 0 is false. */
5223 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5224 && integer_zerop (arg01) && integer_zerop (arg2))
5226 if (comp_code == NE_EXPR)
5227 return fold_convert_loc (loc, type, arg1);
5228 else if (comp_code == EQ_EXPR)
5229 return build_zero_cst (type);
5232 /* Try some transformations of A op B ? A : B.
5234 A == B? A : B same as B
5235 A != B? A : B same as A
5236 A >= B? A : B same as max (A, B)
5237 A > B? A : B same as max (B, A)
5238 A <= B? A : B same as min (A, B)
5239 A < B? A : B same as min (B, A)
5241 As above, these transformations don't work in the presence
5242 of signed zeros. For example, if A and B are zeros of
5243 opposite sign, the first two transformations will change
5244 the sign of the result. In the last four, the original
5245 expressions give different results for (A=+0, B=-0) and
5246 (A=-0, B=+0), but the transformed expressions do not.
5248 The first two transformations are correct if either A or B
5249 is a NaN. In the first transformation, the condition will
5250 be false, and B will indeed be chosen. In the case of the
5251 second transformation, the condition A != B will be true,
5252 and A will be chosen.
5254 The conversions to max() and min() are not correct if B is
5255 a number and A is not. The conditions in the original
5256 expressions will be false, so all four give B. The min()
5257 and max() versions would give a NaN instead. */
5258 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5259 && operand_equal_for_comparison_p (arg01, arg2)
5260 /* Avoid these transformations if the COND_EXPR may be used
5261 as an lvalue in the C++ front-end. PR c++/19199. */
5262 && (in_gimple_form
5263 || VECTOR_TYPE_P (type)
5264 || (! lang_GNU_CXX ()
5265 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5266 || ! maybe_lvalue_p (arg1)
5267 || ! maybe_lvalue_p (arg2)))
5269 tree comp_op0 = arg00;
5270 tree comp_op1 = arg01;
5271 tree comp_type = TREE_TYPE (comp_op0);
5273 switch (comp_code)
5275 case EQ_EXPR:
5276 return fold_convert_loc (loc, type, arg2);
5277 case NE_EXPR:
5278 return fold_convert_loc (loc, type, arg1);
5279 case LE_EXPR:
5280 case LT_EXPR:
5281 case UNLE_EXPR:
5282 case UNLT_EXPR:
5283 /* In C++ a ?: expression can be an lvalue, so put the
5284 operand which will be used if they are equal first
5285 so that we can convert this back to the
5286 corresponding COND_EXPR. */
5287 if (!HONOR_NANS (arg1))
5289 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5290 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5291 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5292 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5293 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5294 comp_op1, comp_op0);
5295 return fold_convert_loc (loc, type, tem);
5297 break;
5298 case GE_EXPR:
5299 case GT_EXPR:
5300 case UNGE_EXPR:
5301 case UNGT_EXPR:
5302 if (!HONOR_NANS (arg1))
5304 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5305 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5306 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5307 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5308 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5309 comp_op1, comp_op0);
5310 return fold_convert_loc (loc, type, tem);
5312 break;
5313 case UNEQ_EXPR:
5314 if (!HONOR_NANS (arg1))
5315 return fold_convert_loc (loc, type, arg2);
5316 break;
5317 case LTGT_EXPR:
5318 if (!HONOR_NANS (arg1))
5319 return fold_convert_loc (loc, type, arg1);
5320 break;
5321 default:
5322 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5323 break;
5327 return NULL_TREE;
5332 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5333 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5334 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5335 false) >= 2)
5336 #endif
5338 /* EXP is some logical combination of boolean tests. See if we can
5339 merge it into some range test. Return the new tree if so. */
5341 static tree
5342 fold_range_test (location_t loc, enum tree_code code, tree type,
5343 tree op0, tree op1)
5345 int or_op = (code == TRUTH_ORIF_EXPR
5346 || code == TRUTH_OR_EXPR);
5347 int in0_p, in1_p, in_p;
5348 tree low0, low1, low, high0, high1, high;
5349 bool strict_overflow_p = false;
5350 tree tem, lhs, rhs;
5351 const char * const warnmsg = G_("assuming signed overflow does not occur "
5352 "when simplifying range test");
5354 if (!INTEGRAL_TYPE_P (type))
5355 return 0;
5357 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5358 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5360 /* If this is an OR operation, invert both sides; we will invert
5361 again at the end. */
5362 if (or_op)
5363 in0_p = ! in0_p, in1_p = ! in1_p;
5365 /* If both expressions are the same, if we can merge the ranges, and we
5366 can build the range test, return it or it inverted. If one of the
5367 ranges is always true or always false, consider it to be the same
5368 expression as the other. */
5369 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5370 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5371 in1_p, low1, high1)
5372 && 0 != (tem = (build_range_check (loc, type,
5373 lhs != 0 ? lhs
5374 : rhs != 0 ? rhs : integer_zero_node,
5375 in_p, low, high))))
5377 if (strict_overflow_p)
5378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5379 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5382 /* On machines where the branch cost is expensive, if this is a
5383 short-circuited branch and the underlying object on both sides
5384 is the same, make a non-short-circuit operation. */
5385 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5386 && !flag_sanitize_coverage
5387 && lhs != 0 && rhs != 0
5388 && (code == TRUTH_ANDIF_EXPR
5389 || code == TRUTH_ORIF_EXPR)
5390 && operand_equal_p (lhs, rhs, 0))
5392 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5393 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5394 which cases we can't do this. */
5395 if (simple_operand_p (lhs))
5396 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5397 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5398 type, op0, op1);
5400 else if (!lang_hooks.decls.global_bindings_p ()
5401 && !CONTAINS_PLACEHOLDER_P (lhs))
5403 tree common = save_expr (lhs);
5405 if (0 != (lhs = build_range_check (loc, type, common,
5406 or_op ? ! in0_p : in0_p,
5407 low0, high0))
5408 && (0 != (rhs = build_range_check (loc, type, common,
5409 or_op ? ! in1_p : in1_p,
5410 low1, high1))))
5412 if (strict_overflow_p)
5413 fold_overflow_warning (warnmsg,
5414 WARN_STRICT_OVERFLOW_COMPARISON);
5415 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5416 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5417 type, lhs, rhs);
5422 return 0;
5425 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5426 bit value. Arrange things so the extra bits will be set to zero if and
5427 only if C is signed-extended to its full width. If MASK is nonzero,
5428 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5430 static tree
5431 unextend (tree c, int p, int unsignedp, tree mask)
5433 tree type = TREE_TYPE (c);
5434 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5435 tree temp;
5437 if (p == modesize || unsignedp)
5438 return c;
5440 /* We work by getting just the sign bit into the low-order bit, then
5441 into the high-order bit, then sign-extend. We then XOR that value
5442 with C. */
5443 temp = build_int_cst (TREE_TYPE (c),
5444 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5446 /* We must use a signed type in order to get an arithmetic right shift.
5447 However, we must also avoid introducing accidental overflows, so that
5448 a subsequent call to integer_zerop will work. Hence we must
5449 do the type conversion here. At this point, the constant is either
5450 zero or one, and the conversion to a signed type can never overflow.
5451 We could get an overflow if this conversion is done anywhere else. */
5452 if (TYPE_UNSIGNED (type))
5453 temp = fold_convert (signed_type_for (type), temp);
5455 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5456 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5457 if (mask != 0)
5458 temp = const_binop (BIT_AND_EXPR, temp,
5459 fold_convert (TREE_TYPE (c), mask));
5460 /* If necessary, convert the type back to match the type of C. */
5461 if (TYPE_UNSIGNED (type))
5462 temp = fold_convert (type, temp);
5464 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5467 /* For an expression that has the form
5468 (A && B) || ~B
5470 (A || B) && ~B,
5471 we can drop one of the inner expressions and simplify to
5472 A || ~B
5474 A && ~B
5475 LOC is the location of the resulting expression. OP is the inner
5476 logical operation; the left-hand side in the examples above, while CMPOP
5477 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5478 removing a condition that guards another, as in
5479 (A != NULL && A->...) || A == NULL
5480 which we must not transform. If RHS_ONLY is true, only eliminate the
5481 right-most operand of the inner logical operation. */
5483 static tree
5484 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5485 bool rhs_only)
5487 tree type = TREE_TYPE (cmpop);
5488 enum tree_code code = TREE_CODE (cmpop);
5489 enum tree_code truthop_code = TREE_CODE (op);
5490 tree lhs = TREE_OPERAND (op, 0);
5491 tree rhs = TREE_OPERAND (op, 1);
5492 tree orig_lhs = lhs, orig_rhs = rhs;
5493 enum tree_code rhs_code = TREE_CODE (rhs);
5494 enum tree_code lhs_code = TREE_CODE (lhs);
5495 enum tree_code inv_code;
5497 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5498 return NULL_TREE;
5500 if (TREE_CODE_CLASS (code) != tcc_comparison)
5501 return NULL_TREE;
5503 if (rhs_code == truthop_code)
5505 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5506 if (newrhs != NULL_TREE)
5508 rhs = newrhs;
5509 rhs_code = TREE_CODE (rhs);
5512 if (lhs_code == truthop_code && !rhs_only)
5514 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5515 if (newlhs != NULL_TREE)
5517 lhs = newlhs;
5518 lhs_code = TREE_CODE (lhs);
5522 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5523 if (inv_code == rhs_code
5524 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5525 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5526 return lhs;
5527 if (!rhs_only && inv_code == lhs_code
5528 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5529 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5530 return rhs;
5531 if (rhs != orig_rhs || lhs != orig_lhs)
5532 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5533 lhs, rhs);
5534 return NULL_TREE;
5537 /* Find ways of folding logical expressions of LHS and RHS:
5538 Try to merge two comparisons to the same innermost item.
5539 Look for range tests like "ch >= '0' && ch <= '9'".
5540 Look for combinations of simple terms on machines with expensive branches
5541 and evaluate the RHS unconditionally.
5543 For example, if we have p->a == 2 && p->b == 4 and we can make an
5544 object large enough to span both A and B, we can do this with a comparison
5545 against the object ANDed with the a mask.
5547 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5548 operations to do this with one comparison.
5550 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5551 function and the one above.
5553 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5554 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5556 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5557 two operands.
5559 We return the simplified tree or 0 if no optimization is possible. */
5561 static tree
5562 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5563 tree lhs, tree rhs)
5565 /* If this is the "or" of two comparisons, we can do something if
5566 the comparisons are NE_EXPR. If this is the "and", we can do something
5567 if the comparisons are EQ_EXPR. I.e.,
5568 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5570 WANTED_CODE is this operation code. For single bit fields, we can
5571 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5572 comparison for one-bit fields. */
5574 enum tree_code wanted_code;
5575 enum tree_code lcode, rcode;
5576 tree ll_arg, lr_arg, rl_arg, rr_arg;
5577 tree ll_inner, lr_inner, rl_inner, rr_inner;
5578 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5579 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5580 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5581 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5582 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5583 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5584 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5585 scalar_int_mode lnmode, rnmode;
5586 tree ll_mask, lr_mask, rl_mask, rr_mask;
5587 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5588 tree l_const, r_const;
5589 tree lntype, rntype, result;
5590 HOST_WIDE_INT first_bit, end_bit;
5591 int volatilep;
5593 /* Start by getting the comparison codes. Fail if anything is volatile.
5594 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5595 it were surrounded with a NE_EXPR. */
5597 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5598 return 0;
5600 lcode = TREE_CODE (lhs);
5601 rcode = TREE_CODE (rhs);
5603 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5605 lhs = build2 (NE_EXPR, truth_type, lhs,
5606 build_int_cst (TREE_TYPE (lhs), 0));
5607 lcode = NE_EXPR;
5610 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5612 rhs = build2 (NE_EXPR, truth_type, rhs,
5613 build_int_cst (TREE_TYPE (rhs), 0));
5614 rcode = NE_EXPR;
5617 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5618 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5619 return 0;
5621 ll_arg = TREE_OPERAND (lhs, 0);
5622 lr_arg = TREE_OPERAND (lhs, 1);
5623 rl_arg = TREE_OPERAND (rhs, 0);
5624 rr_arg = TREE_OPERAND (rhs, 1);
5626 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5627 if (simple_operand_p (ll_arg)
5628 && simple_operand_p (lr_arg))
5630 if (operand_equal_p (ll_arg, rl_arg, 0)
5631 && operand_equal_p (lr_arg, rr_arg, 0))
5633 result = combine_comparisons (loc, code, lcode, rcode,
5634 truth_type, ll_arg, lr_arg);
5635 if (result)
5636 return result;
5638 else if (operand_equal_p (ll_arg, rr_arg, 0)
5639 && operand_equal_p (lr_arg, rl_arg, 0))
5641 result = combine_comparisons (loc, code, lcode,
5642 swap_tree_comparison (rcode),
5643 truth_type, ll_arg, lr_arg);
5644 if (result)
5645 return result;
5649 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5650 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5652 /* If the RHS can be evaluated unconditionally and its operands are
5653 simple, it wins to evaluate the RHS unconditionally on machines
5654 with expensive branches. In this case, this isn't a comparison
5655 that can be merged. */
5657 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5658 false) >= 2
5659 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5660 && simple_operand_p (rl_arg)
5661 && simple_operand_p (rr_arg))
5663 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5664 if (code == TRUTH_OR_EXPR
5665 && lcode == NE_EXPR && integer_zerop (lr_arg)
5666 && rcode == NE_EXPR && integer_zerop (rr_arg)
5667 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5668 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5669 return build2_loc (loc, NE_EXPR, truth_type,
5670 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5671 ll_arg, rl_arg),
5672 build_int_cst (TREE_TYPE (ll_arg), 0));
5674 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5675 if (code == TRUTH_AND_EXPR
5676 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5677 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5678 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5679 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5680 return build2_loc (loc, EQ_EXPR, truth_type,
5681 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5682 ll_arg, rl_arg),
5683 build_int_cst (TREE_TYPE (ll_arg), 0));
5686 /* See if the comparisons can be merged. Then get all the parameters for
5687 each side. */
5689 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5690 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5691 return 0;
5693 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5694 volatilep = 0;
5695 ll_inner = decode_field_reference (loc, &ll_arg,
5696 &ll_bitsize, &ll_bitpos, &ll_mode,
5697 &ll_unsignedp, &ll_reversep, &volatilep,
5698 &ll_mask, &ll_and_mask);
5699 lr_inner = decode_field_reference (loc, &lr_arg,
5700 &lr_bitsize, &lr_bitpos, &lr_mode,
5701 &lr_unsignedp, &lr_reversep, &volatilep,
5702 &lr_mask, &lr_and_mask);
5703 rl_inner = decode_field_reference (loc, &rl_arg,
5704 &rl_bitsize, &rl_bitpos, &rl_mode,
5705 &rl_unsignedp, &rl_reversep, &volatilep,
5706 &rl_mask, &rl_and_mask);
5707 rr_inner = decode_field_reference (loc, &rr_arg,
5708 &rr_bitsize, &rr_bitpos, &rr_mode,
5709 &rr_unsignedp, &rr_reversep, &volatilep,
5710 &rr_mask, &rr_and_mask);
5712 /* It must be true that the inner operation on the lhs of each
5713 comparison must be the same if we are to be able to do anything.
5714 Then see if we have constants. If not, the same must be true for
5715 the rhs's. */
5716 if (volatilep
5717 || ll_reversep != rl_reversep
5718 || ll_inner == 0 || rl_inner == 0
5719 || ! operand_equal_p (ll_inner, rl_inner, 0))
5720 return 0;
5722 if (TREE_CODE (lr_arg) == INTEGER_CST
5723 && TREE_CODE (rr_arg) == INTEGER_CST)
5725 l_const = lr_arg, r_const = rr_arg;
5726 lr_reversep = ll_reversep;
5728 else if (lr_reversep != rr_reversep
5729 || lr_inner == 0 || rr_inner == 0
5730 || ! operand_equal_p (lr_inner, rr_inner, 0))
5731 return 0;
5732 else
5733 l_const = r_const = 0;
5735 /* If either comparison code is not correct for our logical operation,
5736 fail. However, we can convert a one-bit comparison against zero into
5737 the opposite comparison against that bit being set in the field. */
5739 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5740 if (lcode != wanted_code)
5742 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5744 /* Make the left operand unsigned, since we are only interested
5745 in the value of one bit. Otherwise we are doing the wrong
5746 thing below. */
5747 ll_unsignedp = 1;
5748 l_const = ll_mask;
5750 else
5751 return 0;
5754 /* This is analogous to the code for l_const above. */
5755 if (rcode != wanted_code)
5757 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5759 rl_unsignedp = 1;
5760 r_const = rl_mask;
5762 else
5763 return 0;
5766 /* See if we can find a mode that contains both fields being compared on
5767 the left. If we can't, fail. Otherwise, update all constants and masks
5768 to be relative to a field of that size. */
5769 first_bit = MIN (ll_bitpos, rl_bitpos);
5770 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5771 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5772 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5773 volatilep, &lnmode))
5774 return 0;
5776 lnbitsize = GET_MODE_BITSIZE (lnmode);
5777 lnbitpos = first_bit & ~ (lnbitsize - 1);
5778 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5779 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5781 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5783 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5784 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5787 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5788 size_int (xll_bitpos));
5789 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5790 size_int (xrl_bitpos));
5792 if (l_const)
5794 l_const = fold_convert_loc (loc, lntype, l_const);
5795 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5796 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5797 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5798 fold_build1_loc (loc, BIT_NOT_EXPR,
5799 lntype, ll_mask))))
5801 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5803 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5806 if (r_const)
5808 r_const = fold_convert_loc (loc, lntype, r_const);
5809 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5810 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5811 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5812 fold_build1_loc (loc, BIT_NOT_EXPR,
5813 lntype, rl_mask))))
5815 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5817 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5821 /* If the right sides are not constant, do the same for it. Also,
5822 disallow this optimization if a size or signedness mismatch occurs
5823 between the left and right sides. */
5824 if (l_const == 0)
5826 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5827 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5828 /* Make sure the two fields on the right
5829 correspond to the left without being swapped. */
5830 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5831 return 0;
5833 first_bit = MIN (lr_bitpos, rr_bitpos);
5834 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5835 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5836 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5837 volatilep, &rnmode))
5838 return 0;
5840 rnbitsize = GET_MODE_BITSIZE (rnmode);
5841 rnbitpos = first_bit & ~ (rnbitsize - 1);
5842 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5843 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5845 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5847 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5848 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5851 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5852 rntype, lr_mask),
5853 size_int (xlr_bitpos));
5854 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5855 rntype, rr_mask),
5856 size_int (xrr_bitpos));
5858 /* Make a mask that corresponds to both fields being compared.
5859 Do this for both items being compared. If the operands are the
5860 same size and the bits being compared are in the same position
5861 then we can do this by masking both and comparing the masked
5862 results. */
5863 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5864 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5865 if (lnbitsize == rnbitsize
5866 && xll_bitpos == xlr_bitpos
5867 && lnbitpos >= 0
5868 && rnbitpos >= 0)
5870 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5871 lntype, lnbitsize, lnbitpos,
5872 ll_unsignedp || rl_unsignedp, ll_reversep);
5873 if (! all_ones_mask_p (ll_mask, lnbitsize))
5874 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5876 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5877 rntype, rnbitsize, rnbitpos,
5878 lr_unsignedp || rr_unsignedp, lr_reversep);
5879 if (! all_ones_mask_p (lr_mask, rnbitsize))
5880 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5882 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5885 /* There is still another way we can do something: If both pairs of
5886 fields being compared are adjacent, we may be able to make a wider
5887 field containing them both.
5889 Note that we still must mask the lhs/rhs expressions. Furthermore,
5890 the mask must be shifted to account for the shift done by
5891 make_bit_field_ref. */
5892 if (((ll_bitsize + ll_bitpos == rl_bitpos
5893 && lr_bitsize + lr_bitpos == rr_bitpos)
5894 || (ll_bitpos == rl_bitpos + rl_bitsize
5895 && lr_bitpos == rr_bitpos + rr_bitsize))
5896 && ll_bitpos >= 0
5897 && rl_bitpos >= 0
5898 && lr_bitpos >= 0
5899 && rr_bitpos >= 0)
5901 tree type;
5903 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5904 ll_bitsize + rl_bitsize,
5905 MIN (ll_bitpos, rl_bitpos),
5906 ll_unsignedp, ll_reversep);
5907 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5908 lr_bitsize + rr_bitsize,
5909 MIN (lr_bitpos, rr_bitpos),
5910 lr_unsignedp, lr_reversep);
5912 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5913 size_int (MIN (xll_bitpos, xrl_bitpos)));
5914 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5915 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5917 /* Convert to the smaller type before masking out unwanted bits. */
5918 type = lntype;
5919 if (lntype != rntype)
5921 if (lnbitsize > rnbitsize)
5923 lhs = fold_convert_loc (loc, rntype, lhs);
5924 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5925 type = rntype;
5927 else if (lnbitsize < rnbitsize)
5929 rhs = fold_convert_loc (loc, lntype, rhs);
5930 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5931 type = lntype;
5935 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5936 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5938 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5939 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5941 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5944 return 0;
5947 /* Handle the case of comparisons with constants. If there is something in
5948 common between the masks, those bits of the constants must be the same.
5949 If not, the condition is always false. Test for this to avoid generating
5950 incorrect code below. */
5951 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5952 if (! integer_zerop (result)
5953 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5954 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5956 if (wanted_code == NE_EXPR)
5958 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5959 return constant_boolean_node (true, truth_type);
5961 else
5963 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5964 return constant_boolean_node (false, truth_type);
5968 if (lnbitpos < 0)
5969 return 0;
5971 /* Construct the expression we will return. First get the component
5972 reference we will make. Unless the mask is all ones the width of
5973 that field, perform the mask operation. Then compare with the
5974 merged constant. */
5975 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5976 lntype, lnbitsize, lnbitpos,
5977 ll_unsignedp || rl_unsignedp, ll_reversep);
5979 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5980 if (! all_ones_mask_p (ll_mask, lnbitsize))
5981 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5983 return build2_loc (loc, wanted_code, truth_type, result,
5984 const_binop (BIT_IOR_EXPR, l_const, r_const));
5987 /* T is an integer expression that is being multiplied, divided, or taken a
5988 modulus (CODE says which and what kind of divide or modulus) by a
5989 constant C. See if we can eliminate that operation by folding it with
5990 other operations already in T. WIDE_TYPE, if non-null, is a type that
5991 should be used for the computation if wider than our type.
5993 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5994 (X * 2) + (Y * 4). We must, however, be assured that either the original
5995 expression would not overflow or that overflow is undefined for the type
5996 in the language in question.
5998 If we return a non-null expression, it is an equivalent form of the
5999 original computation, but need not be in the original type.
6001 We set *STRICT_OVERFLOW_P to true if the return values depends on
6002 signed overflow being undefined. Otherwise we do not change
6003 *STRICT_OVERFLOW_P. */
6005 static tree
6006 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6007 bool *strict_overflow_p)
6009 /* To avoid exponential search depth, refuse to allow recursion past
6010 three levels. Beyond that (1) it's highly unlikely that we'll find
6011 something interesting and (2) we've probably processed it before
6012 when we built the inner expression. */
6014 static int depth;
6015 tree ret;
6017 if (depth > 3)
6018 return NULL;
6020 depth++;
6021 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6022 depth--;
6024 return ret;
6027 static tree
6028 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6029 bool *strict_overflow_p)
6031 tree type = TREE_TYPE (t);
6032 enum tree_code tcode = TREE_CODE (t);
6033 tree ctype = (wide_type != 0
6034 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6035 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6036 ? wide_type : type);
6037 tree t1, t2;
6038 int same_p = tcode == code;
6039 tree op0 = NULL_TREE, op1 = NULL_TREE;
6040 bool sub_strict_overflow_p;
6042 /* Don't deal with constants of zero here; they confuse the code below. */
6043 if (integer_zerop (c))
6044 return NULL_TREE;
6046 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6047 op0 = TREE_OPERAND (t, 0);
6049 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6050 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6052 /* Note that we need not handle conditional operations here since fold
6053 already handles those cases. So just do arithmetic here. */
6054 switch (tcode)
6056 case INTEGER_CST:
6057 /* For a constant, we can always simplify if we are a multiply
6058 or (for divide and modulus) if it is a multiple of our constant. */
6059 if (code == MULT_EXPR
6060 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6061 TYPE_SIGN (type)))
6063 tree tem = const_binop (code, fold_convert (ctype, t),
6064 fold_convert (ctype, c));
6065 /* If the multiplication overflowed, we lost information on it.
6066 See PR68142 and PR69845. */
6067 if (TREE_OVERFLOW (tem))
6068 return NULL_TREE;
6069 return tem;
6071 break;
6073 CASE_CONVERT: case NON_LVALUE_EXPR:
6074 /* If op0 is an expression ... */
6075 if ((COMPARISON_CLASS_P (op0)
6076 || UNARY_CLASS_P (op0)
6077 || BINARY_CLASS_P (op0)
6078 || VL_EXP_CLASS_P (op0)
6079 || EXPRESSION_CLASS_P (op0))
6080 /* ... and has wrapping overflow, and its type is smaller
6081 than ctype, then we cannot pass through as widening. */
6082 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6083 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6084 && (TYPE_PRECISION (ctype)
6085 > TYPE_PRECISION (TREE_TYPE (op0))))
6086 /* ... or this is a truncation (t is narrower than op0),
6087 then we cannot pass through this narrowing. */
6088 || (TYPE_PRECISION (type)
6089 < TYPE_PRECISION (TREE_TYPE (op0)))
6090 /* ... or signedness changes for division or modulus,
6091 then we cannot pass through this conversion. */
6092 || (code != MULT_EXPR
6093 && (TYPE_UNSIGNED (ctype)
6094 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6095 /* ... or has undefined overflow while the converted to
6096 type has not, we cannot do the operation in the inner type
6097 as that would introduce undefined overflow. */
6098 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6099 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6100 && !TYPE_OVERFLOW_UNDEFINED (type))))
6101 break;
6103 /* Pass the constant down and see if we can make a simplification. If
6104 we can, replace this expression with the inner simplification for
6105 possible later conversion to our or some other type. */
6106 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6107 && TREE_CODE (t2) == INTEGER_CST
6108 && !TREE_OVERFLOW (t2)
6109 && (0 != (t1 = extract_muldiv (op0, t2, code,
6110 code == MULT_EXPR
6111 ? ctype : NULL_TREE,
6112 strict_overflow_p))))
6113 return t1;
6114 break;
6116 case ABS_EXPR:
6117 /* If widening the type changes it from signed to unsigned, then we
6118 must avoid building ABS_EXPR itself as unsigned. */
6119 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6121 tree cstype = (*signed_type_for) (ctype);
6122 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6123 != 0)
6125 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6126 return fold_convert (ctype, t1);
6128 break;
6130 /* If the constant is negative, we cannot simplify this. */
6131 if (tree_int_cst_sgn (c) == -1)
6132 break;
6133 /* FALLTHROUGH */
6134 case NEGATE_EXPR:
6135 /* For division and modulus, type can't be unsigned, as e.g.
6136 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6137 For signed types, even with wrapping overflow, this is fine. */
6138 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6139 break;
6140 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6141 != 0)
6142 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6143 break;
6145 case MIN_EXPR: case MAX_EXPR:
6146 /* If widening the type changes the signedness, then we can't perform
6147 this optimization as that changes the result. */
6148 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6149 break;
6151 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6152 sub_strict_overflow_p = false;
6153 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6154 &sub_strict_overflow_p)) != 0
6155 && (t2 = extract_muldiv (op1, c, code, wide_type,
6156 &sub_strict_overflow_p)) != 0)
6158 if (tree_int_cst_sgn (c) < 0)
6159 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6160 if (sub_strict_overflow_p)
6161 *strict_overflow_p = true;
6162 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6163 fold_convert (ctype, t2));
6165 break;
6167 case LSHIFT_EXPR: case RSHIFT_EXPR:
6168 /* If the second operand is constant, this is a multiplication
6169 or floor division, by a power of two, so we can treat it that
6170 way unless the multiplier or divisor overflows. Signed
6171 left-shift overflow is implementation-defined rather than
6172 undefined in C90, so do not convert signed left shift into
6173 multiplication. */
6174 if (TREE_CODE (op1) == INTEGER_CST
6175 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6176 /* const_binop may not detect overflow correctly,
6177 so check for it explicitly here. */
6178 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6179 wi::to_wide (op1))
6180 && 0 != (t1 = fold_convert (ctype,
6181 const_binop (LSHIFT_EXPR,
6182 size_one_node,
6183 op1)))
6184 && !TREE_OVERFLOW (t1))
6185 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6186 ? MULT_EXPR : FLOOR_DIV_EXPR,
6187 ctype,
6188 fold_convert (ctype, op0),
6189 t1),
6190 c, code, wide_type, strict_overflow_p);
6191 break;
6193 case PLUS_EXPR: case MINUS_EXPR:
6194 /* See if we can eliminate the operation on both sides. If we can, we
6195 can return a new PLUS or MINUS. If we can't, the only remaining
6196 cases where we can do anything are if the second operand is a
6197 constant. */
6198 sub_strict_overflow_p = false;
6199 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6200 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6201 if (t1 != 0 && t2 != 0
6202 && TYPE_OVERFLOW_WRAPS (ctype)
6203 && (code == MULT_EXPR
6204 /* If not multiplication, we can only do this if both operands
6205 are divisible by c. */
6206 || (multiple_of_p (ctype, op0, c)
6207 && multiple_of_p (ctype, op1, c))))
6209 if (sub_strict_overflow_p)
6210 *strict_overflow_p = true;
6211 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6212 fold_convert (ctype, t2));
6215 /* If this was a subtraction, negate OP1 and set it to be an addition.
6216 This simplifies the logic below. */
6217 if (tcode == MINUS_EXPR)
6219 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6220 /* If OP1 was not easily negatable, the constant may be OP0. */
6221 if (TREE_CODE (op0) == INTEGER_CST)
6223 std::swap (op0, op1);
6224 std::swap (t1, t2);
6228 if (TREE_CODE (op1) != INTEGER_CST)
6229 break;
6231 /* If either OP1 or C are negative, this optimization is not safe for
6232 some of the division and remainder types while for others we need
6233 to change the code. */
6234 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6236 if (code == CEIL_DIV_EXPR)
6237 code = FLOOR_DIV_EXPR;
6238 else if (code == FLOOR_DIV_EXPR)
6239 code = CEIL_DIV_EXPR;
6240 else if (code != MULT_EXPR
6241 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6242 break;
6245 /* If it's a multiply or a division/modulus operation of a multiple
6246 of our constant, do the operation and verify it doesn't overflow. */
6247 if (code == MULT_EXPR
6248 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6249 TYPE_SIGN (type)))
6251 op1 = const_binop (code, fold_convert (ctype, op1),
6252 fold_convert (ctype, c));
6253 /* We allow the constant to overflow with wrapping semantics. */
6254 if (op1 == 0
6255 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6256 break;
6258 else
6259 break;
6261 /* If we have an unsigned type, we cannot widen the operation since it
6262 will change the result if the original computation overflowed. */
6263 if (TYPE_UNSIGNED (ctype) && ctype != type)
6264 break;
6266 /* The last case is if we are a multiply. In that case, we can
6267 apply the distributive law to commute the multiply and addition
6268 if the multiplication of the constants doesn't overflow
6269 and overflow is defined. With undefined overflow
6270 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6271 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6272 return fold_build2 (tcode, ctype,
6273 fold_build2 (code, ctype,
6274 fold_convert (ctype, op0),
6275 fold_convert (ctype, c)),
6276 op1);
6278 break;
6280 case MULT_EXPR:
6281 /* We have a special case here if we are doing something like
6282 (C * 8) % 4 since we know that's zero. */
6283 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6284 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6285 /* If the multiplication can overflow we cannot optimize this. */
6286 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6287 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6288 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6289 TYPE_SIGN (type)))
6291 *strict_overflow_p = true;
6292 return omit_one_operand (type, integer_zero_node, op0);
6295 /* ... fall through ... */
6297 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6298 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6299 /* If we can extract our operation from the LHS, do so and return a
6300 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6301 do something only if the second operand is a constant. */
6302 if (same_p
6303 && TYPE_OVERFLOW_WRAPS (ctype)
6304 && (t1 = extract_muldiv (op0, c, code, wide_type,
6305 strict_overflow_p)) != 0)
6306 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6307 fold_convert (ctype, op1));
6308 else if (tcode == MULT_EXPR && code == MULT_EXPR
6309 && TYPE_OVERFLOW_WRAPS (ctype)
6310 && (t1 = extract_muldiv (op1, c, code, wide_type,
6311 strict_overflow_p)) != 0)
6312 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6313 fold_convert (ctype, t1));
6314 else if (TREE_CODE (op1) != INTEGER_CST)
6315 return 0;
6317 /* If these are the same operation types, we can associate them
6318 assuming no overflow. */
6319 if (tcode == code)
6321 bool overflow_p = false;
6322 bool overflow_mul_p;
6323 signop sign = TYPE_SIGN (ctype);
6324 unsigned prec = TYPE_PRECISION (ctype);
6325 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6326 wi::to_wide (c, prec),
6327 sign, &overflow_mul_p);
6328 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6329 if (overflow_mul_p
6330 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6331 overflow_p = true;
6332 if (!overflow_p)
6333 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6334 wide_int_to_tree (ctype, mul));
6337 /* If these operations "cancel" each other, we have the main
6338 optimizations of this pass, which occur when either constant is a
6339 multiple of the other, in which case we replace this with either an
6340 operation or CODE or TCODE.
6342 If we have an unsigned type, we cannot do this since it will change
6343 the result if the original computation overflowed. */
6344 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6345 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6346 || (tcode == MULT_EXPR
6347 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6348 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6349 && code != MULT_EXPR)))
6351 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6352 TYPE_SIGN (type)))
6354 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6355 *strict_overflow_p = true;
6356 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6357 fold_convert (ctype,
6358 const_binop (TRUNC_DIV_EXPR,
6359 op1, c)));
6361 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6362 TYPE_SIGN (type)))
6364 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6365 *strict_overflow_p = true;
6366 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6367 fold_convert (ctype,
6368 const_binop (TRUNC_DIV_EXPR,
6369 c, op1)));
6372 break;
6374 default:
6375 break;
6378 return 0;
6381 /* Return a node which has the indicated constant VALUE (either 0 or
6382 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6383 and is of the indicated TYPE. */
6385 tree
6386 constant_boolean_node (bool value, tree type)
6388 if (type == integer_type_node)
6389 return value ? integer_one_node : integer_zero_node;
6390 else if (type == boolean_type_node)
6391 return value ? boolean_true_node : boolean_false_node;
6392 else if (TREE_CODE (type) == VECTOR_TYPE)
6393 return build_vector_from_val (type,
6394 build_int_cst (TREE_TYPE (type),
6395 value ? -1 : 0));
6396 else
6397 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6401 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6402 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6403 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6404 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6405 COND is the first argument to CODE; otherwise (as in the example
6406 given here), it is the second argument. TYPE is the type of the
6407 original expression. Return NULL_TREE if no simplification is
6408 possible. */
6410 static tree
6411 fold_binary_op_with_conditional_arg (location_t loc,
6412 enum tree_code code,
6413 tree type, tree op0, tree op1,
6414 tree cond, tree arg, int cond_first_p)
6416 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6417 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6418 tree test, true_value, false_value;
6419 tree lhs = NULL_TREE;
6420 tree rhs = NULL_TREE;
6421 enum tree_code cond_code = COND_EXPR;
6423 if (TREE_CODE (cond) == COND_EXPR
6424 || TREE_CODE (cond) == VEC_COND_EXPR)
6426 test = TREE_OPERAND (cond, 0);
6427 true_value = TREE_OPERAND (cond, 1);
6428 false_value = TREE_OPERAND (cond, 2);
6429 /* If this operand throws an expression, then it does not make
6430 sense to try to perform a logical or arithmetic operation
6431 involving it. */
6432 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6433 lhs = true_value;
6434 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6435 rhs = false_value;
6437 else if (!(TREE_CODE (type) != VECTOR_TYPE
6438 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6440 tree testtype = TREE_TYPE (cond);
6441 test = cond;
6442 true_value = constant_boolean_node (true, testtype);
6443 false_value = constant_boolean_node (false, testtype);
6445 else
6446 /* Detect the case of mixing vector and scalar types - bail out. */
6447 return NULL_TREE;
6449 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6450 cond_code = VEC_COND_EXPR;
6452 /* This transformation is only worthwhile if we don't have to wrap ARG
6453 in a SAVE_EXPR and the operation can be simplified without recursing
6454 on at least one of the branches once its pushed inside the COND_EXPR. */
6455 if (!TREE_CONSTANT (arg)
6456 && (TREE_SIDE_EFFECTS (arg)
6457 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6458 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6459 return NULL_TREE;
6461 arg = fold_convert_loc (loc, arg_type, arg);
6462 if (lhs == 0)
6464 true_value = fold_convert_loc (loc, cond_type, true_value);
6465 if (cond_first_p)
6466 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6467 else
6468 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6470 if (rhs == 0)
6472 false_value = fold_convert_loc (loc, cond_type, false_value);
6473 if (cond_first_p)
6474 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6475 else
6476 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6479 /* Check that we have simplified at least one of the branches. */
6480 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6481 return NULL_TREE;
6483 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6487 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6489 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6490 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6491 ADDEND is the same as X.
6493 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6494 and finite. The problematic cases are when X is zero, and its mode
6495 has signed zeros. In the case of rounding towards -infinity,
6496 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6497 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6499 bool
6500 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6502 if (!real_zerop (addend))
6503 return false;
6505 /* Don't allow the fold with -fsignaling-nans. */
6506 if (HONOR_SNANS (element_mode (type)))
6507 return false;
6509 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6510 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6511 return true;
6513 /* In a vector or complex, we would need to check the sign of all zeros. */
6514 if (TREE_CODE (addend) != REAL_CST)
6515 return false;
6517 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6518 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6519 negate = !negate;
6521 /* The mode has signed zeros, and we have to honor their sign.
6522 In this situation, there is only one case we can return true for.
6523 X - 0 is the same as X unless rounding towards -infinity is
6524 supported. */
6525 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6528 /* Subroutine of match.pd that optimizes comparisons of a division by
6529 a nonzero integer constant against an integer constant, i.e.
6530 X/C1 op C2.
6532 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6533 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6535 enum tree_code
6536 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6537 tree *hi, bool *neg_overflow)
6539 tree prod, tmp, type = TREE_TYPE (c1);
6540 signop sign = TYPE_SIGN (type);
6541 bool overflow;
6543 /* We have to do this the hard way to detect unsigned overflow.
6544 prod = int_const_binop (MULT_EXPR, c1, c2); */
6545 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6546 prod = force_fit_type (type, val, -1, overflow);
6547 *neg_overflow = false;
6549 if (sign == UNSIGNED)
6551 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6552 *lo = prod;
6554 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6555 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6556 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6558 else if (tree_int_cst_sgn (c1) >= 0)
6560 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6561 switch (tree_int_cst_sgn (c2))
6563 case -1:
6564 *neg_overflow = true;
6565 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6566 *hi = prod;
6567 break;
6569 case 0:
6570 *lo = fold_negate_const (tmp, type);
6571 *hi = tmp;
6572 break;
6574 case 1:
6575 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6576 *lo = prod;
6577 break;
6579 default:
6580 gcc_unreachable ();
6583 else
6585 /* A negative divisor reverses the relational operators. */
6586 code = swap_tree_comparison (code);
6588 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6589 switch (tree_int_cst_sgn (c2))
6591 case -1:
6592 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6593 *lo = prod;
6594 break;
6596 case 0:
6597 *hi = fold_negate_const (tmp, type);
6598 *lo = tmp;
6599 break;
6601 case 1:
6602 *neg_overflow = true;
6603 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6604 *hi = prod;
6605 break;
6607 default:
6608 gcc_unreachable ();
6612 if (code != EQ_EXPR && code != NE_EXPR)
6613 return code;
6615 if (TREE_OVERFLOW (*lo)
6616 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6617 *lo = NULL_TREE;
6618 if (TREE_OVERFLOW (*hi)
6619 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6620 *hi = NULL_TREE;
6622 return code;
6626 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6627 equality/inequality test, then return a simplified form of the test
6628 using a sign testing. Otherwise return NULL. TYPE is the desired
6629 result type. */
6631 static tree
6632 fold_single_bit_test_into_sign_test (location_t loc,
6633 enum tree_code code, tree arg0, tree arg1,
6634 tree result_type)
6636 /* If this is testing a single bit, we can optimize the test. */
6637 if ((code == NE_EXPR || code == EQ_EXPR)
6638 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6639 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6641 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6642 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6643 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6645 if (arg00 != NULL_TREE
6646 /* This is only a win if casting to a signed type is cheap,
6647 i.e. when arg00's type is not a partial mode. */
6648 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6650 tree stype = signed_type_for (TREE_TYPE (arg00));
6651 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6652 result_type,
6653 fold_convert_loc (loc, stype, arg00),
6654 build_int_cst (stype, 0));
6658 return NULL_TREE;
6661 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6662 equality/inequality test, then return a simplified form of
6663 the test using shifts and logical operations. Otherwise return
6664 NULL. TYPE is the desired result type. */
6666 tree
6667 fold_single_bit_test (location_t loc, enum tree_code code,
6668 tree arg0, tree arg1, tree result_type)
6670 /* If this is testing a single bit, we can optimize the test. */
6671 if ((code == NE_EXPR || code == EQ_EXPR)
6672 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6673 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6675 tree inner = TREE_OPERAND (arg0, 0);
6676 tree type = TREE_TYPE (arg0);
6677 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6678 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6679 int ops_unsigned;
6680 tree signed_type, unsigned_type, intermediate_type;
6681 tree tem, one;
6683 /* First, see if we can fold the single bit test into a sign-bit
6684 test. */
6685 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6686 result_type);
6687 if (tem)
6688 return tem;
6690 /* Otherwise we have (A & C) != 0 where C is a single bit,
6691 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6692 Similarly for (A & C) == 0. */
6694 /* If INNER is a right shift of a constant and it plus BITNUM does
6695 not overflow, adjust BITNUM and INNER. */
6696 if (TREE_CODE (inner) == RSHIFT_EXPR
6697 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6698 && bitnum < TYPE_PRECISION (type)
6699 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6700 TYPE_PRECISION (type) - bitnum))
6702 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6703 inner = TREE_OPERAND (inner, 0);
6706 /* If we are going to be able to omit the AND below, we must do our
6707 operations as unsigned. If we must use the AND, we have a choice.
6708 Normally unsigned is faster, but for some machines signed is. */
6709 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6710 && !flag_syntax_only) ? 0 : 1;
6712 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6713 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6714 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6715 inner = fold_convert_loc (loc, intermediate_type, inner);
6717 if (bitnum != 0)
6718 inner = build2 (RSHIFT_EXPR, intermediate_type,
6719 inner, size_int (bitnum));
6721 one = build_int_cst (intermediate_type, 1);
6723 if (code == EQ_EXPR)
6724 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6726 /* Put the AND last so it can combine with more things. */
6727 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6729 /* Make sure to return the proper type. */
6730 inner = fold_convert_loc (loc, result_type, inner);
6732 return inner;
6734 return NULL_TREE;
6737 /* Test whether it is preferable two swap two operands, ARG0 and
6738 ARG1, for example because ARG0 is an integer constant and ARG1
6739 isn't. */
6741 bool
6742 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6744 if (CONSTANT_CLASS_P (arg1))
6745 return 0;
6746 if (CONSTANT_CLASS_P (arg0))
6747 return 1;
6749 STRIP_NOPS (arg0);
6750 STRIP_NOPS (arg1);
6752 if (TREE_CONSTANT (arg1))
6753 return 0;
6754 if (TREE_CONSTANT (arg0))
6755 return 1;
6757 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6758 for commutative and comparison operators. Ensuring a canonical
6759 form allows the optimizers to find additional redundancies without
6760 having to explicitly check for both orderings. */
6761 if (TREE_CODE (arg0) == SSA_NAME
6762 && TREE_CODE (arg1) == SSA_NAME
6763 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6764 return 1;
6766 /* Put SSA_NAMEs last. */
6767 if (TREE_CODE (arg1) == SSA_NAME)
6768 return 0;
6769 if (TREE_CODE (arg0) == SSA_NAME)
6770 return 1;
6772 /* Put variables last. */
6773 if (DECL_P (arg1))
6774 return 0;
6775 if (DECL_P (arg0))
6776 return 1;
6778 return 0;
6782 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6783 means A >= Y && A != MAX, but in this case we know that
6784 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6786 static tree
6787 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6789 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6791 if (TREE_CODE (bound) == LT_EXPR)
6792 a = TREE_OPERAND (bound, 0);
6793 else if (TREE_CODE (bound) == GT_EXPR)
6794 a = TREE_OPERAND (bound, 1);
6795 else
6796 return NULL_TREE;
6798 typea = TREE_TYPE (a);
6799 if (!INTEGRAL_TYPE_P (typea)
6800 && !POINTER_TYPE_P (typea))
6801 return NULL_TREE;
6803 if (TREE_CODE (ineq) == LT_EXPR)
6805 a1 = TREE_OPERAND (ineq, 1);
6806 y = TREE_OPERAND (ineq, 0);
6808 else if (TREE_CODE (ineq) == GT_EXPR)
6810 a1 = TREE_OPERAND (ineq, 0);
6811 y = TREE_OPERAND (ineq, 1);
6813 else
6814 return NULL_TREE;
6816 if (TREE_TYPE (a1) != typea)
6817 return NULL_TREE;
6819 if (POINTER_TYPE_P (typea))
6821 /* Convert the pointer types into integer before taking the difference. */
6822 tree ta = fold_convert_loc (loc, ssizetype, a);
6823 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6824 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6826 else
6827 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6829 if (!diff || !integer_onep (diff))
6830 return NULL_TREE;
6832 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6835 /* Fold a sum or difference of at least one multiplication.
6836 Returns the folded tree or NULL if no simplification could be made. */
6838 static tree
6839 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6840 tree arg0, tree arg1)
6842 tree arg00, arg01, arg10, arg11;
6843 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6845 /* (A * C) +- (B * C) -> (A+-B) * C.
6846 (A * C) +- A -> A * (C+-1).
6847 We are most concerned about the case where C is a constant,
6848 but other combinations show up during loop reduction. Since
6849 it is not difficult, try all four possibilities. */
6851 if (TREE_CODE (arg0) == MULT_EXPR)
6853 arg00 = TREE_OPERAND (arg0, 0);
6854 arg01 = TREE_OPERAND (arg0, 1);
6856 else if (TREE_CODE (arg0) == INTEGER_CST)
6858 arg00 = build_one_cst (type);
6859 arg01 = arg0;
6861 else
6863 /* We cannot generate constant 1 for fract. */
6864 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6865 return NULL_TREE;
6866 arg00 = arg0;
6867 arg01 = build_one_cst (type);
6869 if (TREE_CODE (arg1) == MULT_EXPR)
6871 arg10 = TREE_OPERAND (arg1, 0);
6872 arg11 = TREE_OPERAND (arg1, 1);
6874 else if (TREE_CODE (arg1) == INTEGER_CST)
6876 arg10 = build_one_cst (type);
6877 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6878 the purpose of this canonicalization. */
6879 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
6880 && negate_expr_p (arg1)
6881 && code == PLUS_EXPR)
6883 arg11 = negate_expr (arg1);
6884 code = MINUS_EXPR;
6886 else
6887 arg11 = arg1;
6889 else
6891 /* We cannot generate constant 1 for fract. */
6892 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6893 return NULL_TREE;
6894 arg10 = arg1;
6895 arg11 = build_one_cst (type);
6897 same = NULL_TREE;
6899 /* Prefer factoring a common non-constant. */
6900 if (operand_equal_p (arg00, arg10, 0))
6901 same = arg00, alt0 = arg01, alt1 = arg11;
6902 else if (operand_equal_p (arg01, arg11, 0))
6903 same = arg01, alt0 = arg00, alt1 = arg10;
6904 else if (operand_equal_p (arg00, arg11, 0))
6905 same = arg00, alt0 = arg01, alt1 = arg10;
6906 else if (operand_equal_p (arg01, arg10, 0))
6907 same = arg01, alt0 = arg00, alt1 = arg11;
6909 /* No identical multiplicands; see if we can find a common
6910 power-of-two factor in non-power-of-two multiplies. This
6911 can help in multi-dimensional array access. */
6912 else if (tree_fits_shwi_p (arg01)
6913 && tree_fits_shwi_p (arg11))
6915 HOST_WIDE_INT int01, int11, tmp;
6916 bool swap = false;
6917 tree maybe_same;
6918 int01 = tree_to_shwi (arg01);
6919 int11 = tree_to_shwi (arg11);
6921 /* Move min of absolute values to int11. */
6922 if (absu_hwi (int01) < absu_hwi (int11))
6924 tmp = int01, int01 = int11, int11 = tmp;
6925 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6926 maybe_same = arg01;
6927 swap = true;
6929 else
6930 maybe_same = arg11;
6932 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6933 /* The remainder should not be a constant, otherwise we
6934 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6935 increased the number of multiplications necessary. */
6936 && TREE_CODE (arg10) != INTEGER_CST)
6938 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6939 build_int_cst (TREE_TYPE (arg00),
6940 int01 / int11));
6941 alt1 = arg10;
6942 same = maybe_same;
6943 if (swap)
6944 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6948 if (!same)
6949 return NULL_TREE;
6951 if (! INTEGRAL_TYPE_P (type)
6952 || TYPE_OVERFLOW_WRAPS (type)
6953 /* We are neither factoring zero nor minus one. */
6954 || TREE_CODE (same) == INTEGER_CST)
6955 return fold_build2_loc (loc, MULT_EXPR, type,
6956 fold_build2_loc (loc, code, type,
6957 fold_convert_loc (loc, type, alt0),
6958 fold_convert_loc (loc, type, alt1)),
6959 fold_convert_loc (loc, type, same));
6961 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6962 same may be minus one and thus the multiplication may overflow. Perform
6963 the operations in an unsigned type. */
6964 tree utype = unsigned_type_for (type);
6965 tree tem = fold_build2_loc (loc, code, utype,
6966 fold_convert_loc (loc, utype, alt0),
6967 fold_convert_loc (loc, utype, alt1));
6968 /* If the sum evaluated to a constant that is not -INF the multiplication
6969 cannot overflow. */
6970 if (TREE_CODE (tem) == INTEGER_CST
6971 && (wi::to_wide (tem)
6972 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
6973 return fold_build2_loc (loc, MULT_EXPR, type,
6974 fold_convert (type, tem), same);
6976 return fold_convert_loc (loc, type,
6977 fold_build2_loc (loc, MULT_EXPR, utype, tem,
6978 fold_convert_loc (loc, utype, same)));
6981 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6982 specified by EXPR into the buffer PTR of length LEN bytes.
6983 Return the number of bytes placed in the buffer, or zero
6984 upon failure. */
6986 static int
6987 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6989 tree type = TREE_TYPE (expr);
6990 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
6991 int byte, offset, word, words;
6992 unsigned char value;
6994 if ((off == -1 && total_bytes > len) || off >= total_bytes)
6995 return 0;
6996 if (off == -1)
6997 off = 0;
6999 if (ptr == NULL)
7000 /* Dry run. */
7001 return MIN (len, total_bytes - off);
7003 words = total_bytes / UNITS_PER_WORD;
7005 for (byte = 0; byte < total_bytes; byte++)
7007 int bitpos = byte * BITS_PER_UNIT;
7008 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7009 number of bytes. */
7010 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7012 if (total_bytes > UNITS_PER_WORD)
7014 word = byte / UNITS_PER_WORD;
7015 if (WORDS_BIG_ENDIAN)
7016 word = (words - 1) - word;
7017 offset = word * UNITS_PER_WORD;
7018 if (BYTES_BIG_ENDIAN)
7019 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7020 else
7021 offset += byte % UNITS_PER_WORD;
7023 else
7024 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7025 if (offset >= off && offset - off < len)
7026 ptr[offset - off] = value;
7028 return MIN (len, total_bytes - off);
7032 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7033 specified by EXPR into the buffer PTR of length LEN bytes.
7034 Return the number of bytes placed in the buffer, or zero
7035 upon failure. */
7037 static int
7038 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7040 tree type = TREE_TYPE (expr);
7041 scalar_mode mode = SCALAR_TYPE_MODE (type);
7042 int total_bytes = GET_MODE_SIZE (mode);
7043 FIXED_VALUE_TYPE value;
7044 tree i_value, i_type;
7046 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7047 return 0;
7049 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7051 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7052 return 0;
7054 value = TREE_FIXED_CST (expr);
7055 i_value = double_int_to_tree (i_type, value.data);
7057 return native_encode_int (i_value, ptr, len, off);
7061 /* Subroutine of native_encode_expr. Encode the REAL_CST
7062 specified by EXPR into the buffer PTR of length LEN bytes.
7063 Return the number of bytes placed in the buffer, or zero
7064 upon failure. */
7066 static int
7067 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7069 tree type = TREE_TYPE (expr);
7070 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7071 int byte, offset, word, words, bitpos;
7072 unsigned char value;
7074 /* There are always 32 bits in each long, no matter the size of
7075 the hosts long. We handle floating point representations with
7076 up to 192 bits. */
7077 long tmp[6];
7079 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7080 return 0;
7081 if (off == -1)
7082 off = 0;
7084 if (ptr == NULL)
7085 /* Dry run. */
7086 return MIN (len, total_bytes - off);
7088 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7090 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7092 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7093 bitpos += BITS_PER_UNIT)
7095 byte = (bitpos / BITS_PER_UNIT) & 3;
7096 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7098 if (UNITS_PER_WORD < 4)
7100 word = byte / UNITS_PER_WORD;
7101 if (WORDS_BIG_ENDIAN)
7102 word = (words - 1) - word;
7103 offset = word * UNITS_PER_WORD;
7104 if (BYTES_BIG_ENDIAN)
7105 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7106 else
7107 offset += byte % UNITS_PER_WORD;
7109 else
7111 offset = byte;
7112 if (BYTES_BIG_ENDIAN)
7114 /* Reverse bytes within each long, or within the entire float
7115 if it's smaller than a long (for HFmode). */
7116 offset = MIN (3, total_bytes - 1) - offset;
7117 gcc_assert (offset >= 0);
7120 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7121 if (offset >= off
7122 && offset - off < len)
7123 ptr[offset - off] = value;
7125 return MIN (len, total_bytes - off);
7128 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7129 specified by EXPR into the buffer PTR of length LEN bytes.
7130 Return the number of bytes placed in the buffer, or zero
7131 upon failure. */
7133 static int
7134 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7136 int rsize, isize;
7137 tree part;
7139 part = TREE_REALPART (expr);
7140 rsize = native_encode_expr (part, ptr, len, off);
7141 if (off == -1 && rsize == 0)
7142 return 0;
7143 part = TREE_IMAGPART (expr);
7144 if (off != -1)
7145 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7146 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7147 len - rsize, off);
7148 if (off == -1 && isize != rsize)
7149 return 0;
7150 return rsize + isize;
7154 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7155 specified by EXPR into the buffer PTR of length LEN bytes.
7156 Return the number of bytes placed in the buffer, or zero
7157 upon failure. */
7159 static int
7160 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7162 unsigned i, count;
7163 int size, offset;
7164 tree itype, elem;
7166 offset = 0;
7167 count = VECTOR_CST_NELTS (expr);
7168 itype = TREE_TYPE (TREE_TYPE (expr));
7169 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7170 for (i = 0; i < count; i++)
7172 if (off >= size)
7174 off -= size;
7175 continue;
7177 elem = VECTOR_CST_ELT (expr, i);
7178 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7179 len - offset, off);
7180 if ((off == -1 && res != size) || res == 0)
7181 return 0;
7182 offset += res;
7183 if (offset >= len)
7184 return offset;
7185 if (off != -1)
7186 off = 0;
7188 return offset;
7192 /* Subroutine of native_encode_expr. Encode the STRING_CST
7193 specified by EXPR into the buffer PTR of length LEN bytes.
7194 Return the number of bytes placed in the buffer, or zero
7195 upon failure. */
7197 static int
7198 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7200 tree type = TREE_TYPE (expr);
7202 /* Wide-char strings are encoded in target byte-order so native
7203 encoding them is trivial. */
7204 if (BITS_PER_UNIT != CHAR_BIT
7205 || TREE_CODE (type) != ARRAY_TYPE
7206 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7207 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7208 return 0;
7210 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7211 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7212 return 0;
7213 if (off == -1)
7214 off = 0;
7215 if (ptr == NULL)
7216 /* Dry run. */;
7217 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7219 int written = 0;
7220 if (off < TREE_STRING_LENGTH (expr))
7222 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7223 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7225 memset (ptr + written, 0,
7226 MIN (total_bytes - written, len - written));
7228 else
7229 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7230 return MIN (total_bytes - off, len);
7234 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7235 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7236 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7237 anything, just do a dry run. If OFF is not -1 then start
7238 the encoding at byte offset OFF and encode at most LEN bytes.
7239 Return the number of bytes placed in the buffer, or zero upon failure. */
7242 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7244 /* We don't support starting at negative offset and -1 is special. */
7245 if (off < -1)
7246 return 0;
7248 switch (TREE_CODE (expr))
7250 case INTEGER_CST:
7251 return native_encode_int (expr, ptr, len, off);
7253 case REAL_CST:
7254 return native_encode_real (expr, ptr, len, off);
7256 case FIXED_CST:
7257 return native_encode_fixed (expr, ptr, len, off);
7259 case COMPLEX_CST:
7260 return native_encode_complex (expr, ptr, len, off);
7262 case VECTOR_CST:
7263 return native_encode_vector (expr, ptr, len, off);
7265 case STRING_CST:
7266 return native_encode_string (expr, ptr, len, off);
7268 default:
7269 return 0;
7274 /* Subroutine of native_interpret_expr. Interpret the contents of
7275 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7276 If the buffer cannot be interpreted, return NULL_TREE. */
7278 static tree
7279 native_interpret_int (tree type, const unsigned char *ptr, int len)
7281 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7283 if (total_bytes > len
7284 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7285 return NULL_TREE;
7287 wide_int result = wi::from_buffer (ptr, total_bytes);
7289 return wide_int_to_tree (type, result);
7293 /* Subroutine of native_interpret_expr. Interpret the contents of
7294 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7295 If the buffer cannot be interpreted, return NULL_TREE. */
7297 static tree
7298 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7300 scalar_mode mode = SCALAR_TYPE_MODE (type);
7301 int total_bytes = GET_MODE_SIZE (mode);
7302 double_int result;
7303 FIXED_VALUE_TYPE fixed_value;
7305 if (total_bytes > len
7306 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7307 return NULL_TREE;
7309 result = double_int::from_buffer (ptr, total_bytes);
7310 fixed_value = fixed_from_double_int (result, mode);
7312 return build_fixed (type, fixed_value);
7316 /* Subroutine of native_interpret_expr. Interpret the contents of
7317 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7318 If the buffer cannot be interpreted, return NULL_TREE. */
7320 static tree
7321 native_interpret_real (tree type, const unsigned char *ptr, int len)
7323 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7324 int total_bytes = GET_MODE_SIZE (mode);
7325 unsigned char value;
7326 /* There are always 32 bits in each long, no matter the size of
7327 the hosts long. We handle floating point representations with
7328 up to 192 bits. */
7329 REAL_VALUE_TYPE r;
7330 long tmp[6];
7332 if (total_bytes > len || total_bytes > 24)
7333 return NULL_TREE;
7334 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7336 memset (tmp, 0, sizeof (tmp));
7337 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7338 bitpos += BITS_PER_UNIT)
7340 /* Both OFFSET and BYTE index within a long;
7341 bitpos indexes the whole float. */
7342 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7343 if (UNITS_PER_WORD < 4)
7345 int word = byte / UNITS_PER_WORD;
7346 if (WORDS_BIG_ENDIAN)
7347 word = (words - 1) - word;
7348 offset = word * UNITS_PER_WORD;
7349 if (BYTES_BIG_ENDIAN)
7350 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7351 else
7352 offset += byte % UNITS_PER_WORD;
7354 else
7356 offset = byte;
7357 if (BYTES_BIG_ENDIAN)
7359 /* Reverse bytes within each long, or within the entire float
7360 if it's smaller than a long (for HFmode). */
7361 offset = MIN (3, total_bytes - 1) - offset;
7362 gcc_assert (offset >= 0);
7365 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7367 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7370 real_from_target (&r, tmp, mode);
7371 return build_real (type, r);
7375 /* Subroutine of native_interpret_expr. Interpret the contents of
7376 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7377 If the buffer cannot be interpreted, return NULL_TREE. */
7379 static tree
7380 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7382 tree etype, rpart, ipart;
7383 int size;
7385 etype = TREE_TYPE (type);
7386 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7387 if (size * 2 > len)
7388 return NULL_TREE;
7389 rpart = native_interpret_expr (etype, ptr, size);
7390 if (!rpart)
7391 return NULL_TREE;
7392 ipart = native_interpret_expr (etype, ptr+size, size);
7393 if (!ipart)
7394 return NULL_TREE;
7395 return build_complex (type, rpart, ipart);
7399 /* Subroutine of native_interpret_expr. Interpret the contents of
7400 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7401 If the buffer cannot be interpreted, return NULL_TREE. */
7403 static tree
7404 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7406 tree etype, elem;
7407 int i, size, count;
7409 etype = TREE_TYPE (type);
7410 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7411 count = TYPE_VECTOR_SUBPARTS (type);
7412 if (size * count > len)
7413 return NULL_TREE;
7415 tree_vector_builder elements (type, count, 1);
7416 for (i = 0; i < count; ++i)
7418 elem = native_interpret_expr (etype, ptr+(i*size), size);
7419 if (!elem)
7420 return NULL_TREE;
7421 elements.quick_push (elem);
7423 return elements.build ();
7427 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7428 the buffer PTR of length LEN as a constant of type TYPE. For
7429 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7430 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7431 return NULL_TREE. */
7433 tree
7434 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7436 switch (TREE_CODE (type))
7438 case INTEGER_TYPE:
7439 case ENUMERAL_TYPE:
7440 case BOOLEAN_TYPE:
7441 case POINTER_TYPE:
7442 case REFERENCE_TYPE:
7443 return native_interpret_int (type, ptr, len);
7445 case REAL_TYPE:
7446 return native_interpret_real (type, ptr, len);
7448 case FIXED_POINT_TYPE:
7449 return native_interpret_fixed (type, ptr, len);
7451 case COMPLEX_TYPE:
7452 return native_interpret_complex (type, ptr, len);
7454 case VECTOR_TYPE:
7455 return native_interpret_vector (type, ptr, len);
7457 default:
7458 return NULL_TREE;
7462 /* Returns true if we can interpret the contents of a native encoding
7463 as TYPE. */
7465 static bool
7466 can_native_interpret_type_p (tree type)
7468 switch (TREE_CODE (type))
7470 case INTEGER_TYPE:
7471 case ENUMERAL_TYPE:
7472 case BOOLEAN_TYPE:
7473 case POINTER_TYPE:
7474 case REFERENCE_TYPE:
7475 case FIXED_POINT_TYPE:
7476 case REAL_TYPE:
7477 case COMPLEX_TYPE:
7478 case VECTOR_TYPE:
7479 return true;
7480 default:
7481 return false;
7486 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7487 TYPE at compile-time. If we're unable to perform the conversion
7488 return NULL_TREE. */
7490 static tree
7491 fold_view_convert_expr (tree type, tree expr)
7493 /* We support up to 512-bit values (for V8DFmode). */
7494 unsigned char buffer[64];
7495 int len;
7497 /* Check that the host and target are sane. */
7498 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7499 return NULL_TREE;
7501 len = native_encode_expr (expr, buffer, sizeof (buffer));
7502 if (len == 0)
7503 return NULL_TREE;
7505 return native_interpret_expr (type, buffer, len);
7508 /* Build an expression for the address of T. Folds away INDIRECT_REF
7509 to avoid confusing the gimplify process. */
7511 tree
7512 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7514 /* The size of the object is not relevant when talking about its address. */
7515 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7516 t = TREE_OPERAND (t, 0);
7518 if (TREE_CODE (t) == INDIRECT_REF)
7520 t = TREE_OPERAND (t, 0);
7522 if (TREE_TYPE (t) != ptrtype)
7523 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7525 else if (TREE_CODE (t) == MEM_REF
7526 && integer_zerop (TREE_OPERAND (t, 1)))
7527 return TREE_OPERAND (t, 0);
7528 else if (TREE_CODE (t) == MEM_REF
7529 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7530 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7531 TREE_OPERAND (t, 0),
7532 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7533 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7535 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7537 if (TREE_TYPE (t) != ptrtype)
7538 t = fold_convert_loc (loc, ptrtype, t);
7540 else
7541 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7543 return t;
7546 /* Build an expression for the address of T. */
7548 tree
7549 build_fold_addr_expr_loc (location_t loc, tree t)
7551 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7553 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7556 /* Fold a unary expression of code CODE and type TYPE with operand
7557 OP0. Return the folded expression if folding is successful.
7558 Otherwise, return NULL_TREE. */
7560 tree
7561 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7563 tree tem;
7564 tree arg0;
7565 enum tree_code_class kind = TREE_CODE_CLASS (code);
7567 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7568 && TREE_CODE_LENGTH (code) == 1);
7570 arg0 = op0;
7571 if (arg0)
7573 if (CONVERT_EXPR_CODE_P (code)
7574 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7576 /* Don't use STRIP_NOPS, because signedness of argument type
7577 matters. */
7578 STRIP_SIGN_NOPS (arg0);
7580 else
7582 /* Strip any conversions that don't change the mode. This
7583 is safe for every expression, except for a comparison
7584 expression because its signedness is derived from its
7585 operands.
7587 Note that this is done as an internal manipulation within
7588 the constant folder, in order to find the simplest
7589 representation of the arguments so that their form can be
7590 studied. In any cases, the appropriate type conversions
7591 should be put back in the tree that will get out of the
7592 constant folder. */
7593 STRIP_NOPS (arg0);
7596 if (CONSTANT_CLASS_P (arg0))
7598 tree tem = const_unop (code, type, arg0);
7599 if (tem)
7601 if (TREE_TYPE (tem) != type)
7602 tem = fold_convert_loc (loc, type, tem);
7603 return tem;
7608 tem = generic_simplify (loc, code, type, op0);
7609 if (tem)
7610 return tem;
7612 if (TREE_CODE_CLASS (code) == tcc_unary)
7614 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7615 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7616 fold_build1_loc (loc, code, type,
7617 fold_convert_loc (loc, TREE_TYPE (op0),
7618 TREE_OPERAND (arg0, 1))));
7619 else if (TREE_CODE (arg0) == COND_EXPR)
7621 tree arg01 = TREE_OPERAND (arg0, 1);
7622 tree arg02 = TREE_OPERAND (arg0, 2);
7623 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7624 arg01 = fold_build1_loc (loc, code, type,
7625 fold_convert_loc (loc,
7626 TREE_TYPE (op0), arg01));
7627 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7628 arg02 = fold_build1_loc (loc, code, type,
7629 fold_convert_loc (loc,
7630 TREE_TYPE (op0), arg02));
7631 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7632 arg01, arg02);
7634 /* If this was a conversion, and all we did was to move into
7635 inside the COND_EXPR, bring it back out. But leave it if
7636 it is a conversion from integer to integer and the
7637 result precision is no wider than a word since such a
7638 conversion is cheap and may be optimized away by combine,
7639 while it couldn't if it were outside the COND_EXPR. Then return
7640 so we don't get into an infinite recursion loop taking the
7641 conversion out and then back in. */
7643 if ((CONVERT_EXPR_CODE_P (code)
7644 || code == NON_LVALUE_EXPR)
7645 && TREE_CODE (tem) == COND_EXPR
7646 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7647 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7648 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7649 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7650 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7651 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7652 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7653 && (INTEGRAL_TYPE_P
7654 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7655 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7656 || flag_syntax_only))
7657 tem = build1_loc (loc, code, type,
7658 build3 (COND_EXPR,
7659 TREE_TYPE (TREE_OPERAND
7660 (TREE_OPERAND (tem, 1), 0)),
7661 TREE_OPERAND (tem, 0),
7662 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7663 TREE_OPERAND (TREE_OPERAND (tem, 2),
7664 0)));
7665 return tem;
7669 switch (code)
7671 case NON_LVALUE_EXPR:
7672 if (!maybe_lvalue_p (op0))
7673 return fold_convert_loc (loc, type, op0);
7674 return NULL_TREE;
7676 CASE_CONVERT:
7677 case FLOAT_EXPR:
7678 case FIX_TRUNC_EXPR:
7679 if (COMPARISON_CLASS_P (op0))
7681 /* If we have (type) (a CMP b) and type is an integral type, return
7682 new expression involving the new type. Canonicalize
7683 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7684 non-integral type.
7685 Do not fold the result as that would not simplify further, also
7686 folding again results in recursions. */
7687 if (TREE_CODE (type) == BOOLEAN_TYPE)
7688 return build2_loc (loc, TREE_CODE (op0), type,
7689 TREE_OPERAND (op0, 0),
7690 TREE_OPERAND (op0, 1));
7691 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7692 && TREE_CODE (type) != VECTOR_TYPE)
7693 return build3_loc (loc, COND_EXPR, type, op0,
7694 constant_boolean_node (true, type),
7695 constant_boolean_node (false, type));
7698 /* Handle (T *)&A.B.C for A being of type T and B and C
7699 living at offset zero. This occurs frequently in
7700 C++ upcasting and then accessing the base. */
7701 if (TREE_CODE (op0) == ADDR_EXPR
7702 && POINTER_TYPE_P (type)
7703 && handled_component_p (TREE_OPERAND (op0, 0)))
7705 HOST_WIDE_INT bitsize, bitpos;
7706 tree offset;
7707 machine_mode mode;
7708 int unsignedp, reversep, volatilep;
7709 tree base
7710 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7711 &offset, &mode, &unsignedp, &reversep,
7712 &volatilep);
7713 /* If the reference was to a (constant) zero offset, we can use
7714 the address of the base if it has the same base type
7715 as the result type and the pointer type is unqualified. */
7716 if (! offset && bitpos == 0
7717 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7718 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7719 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7720 return fold_convert_loc (loc, type,
7721 build_fold_addr_expr_loc (loc, base));
7724 if (TREE_CODE (op0) == MODIFY_EXPR
7725 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7726 /* Detect assigning a bitfield. */
7727 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7728 && DECL_BIT_FIELD
7729 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7731 /* Don't leave an assignment inside a conversion
7732 unless assigning a bitfield. */
7733 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7734 /* First do the assignment, then return converted constant. */
7735 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7736 TREE_NO_WARNING (tem) = 1;
7737 TREE_USED (tem) = 1;
7738 return tem;
7741 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7742 constants (if x has signed type, the sign bit cannot be set
7743 in c). This folds extension into the BIT_AND_EXPR.
7744 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7745 very likely don't have maximal range for their precision and this
7746 transformation effectively doesn't preserve non-maximal ranges. */
7747 if (TREE_CODE (type) == INTEGER_TYPE
7748 && TREE_CODE (op0) == BIT_AND_EXPR
7749 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7751 tree and_expr = op0;
7752 tree and0 = TREE_OPERAND (and_expr, 0);
7753 tree and1 = TREE_OPERAND (and_expr, 1);
7754 int change = 0;
7756 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7757 || (TYPE_PRECISION (type)
7758 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7759 change = 1;
7760 else if (TYPE_PRECISION (TREE_TYPE (and1))
7761 <= HOST_BITS_PER_WIDE_INT
7762 && tree_fits_uhwi_p (and1))
7764 unsigned HOST_WIDE_INT cst;
7766 cst = tree_to_uhwi (and1);
7767 cst &= HOST_WIDE_INT_M1U
7768 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7769 change = (cst == 0);
7770 if (change
7771 && !flag_syntax_only
7772 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7773 == ZERO_EXTEND))
7775 tree uns = unsigned_type_for (TREE_TYPE (and0));
7776 and0 = fold_convert_loc (loc, uns, and0);
7777 and1 = fold_convert_loc (loc, uns, and1);
7780 if (change)
7782 tem = force_fit_type (type, wi::to_widest (and1), 0,
7783 TREE_OVERFLOW (and1));
7784 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7785 fold_convert_loc (loc, type, and0), tem);
7789 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7790 cast (T1)X will fold away. We assume that this happens when X itself
7791 is a cast. */
7792 if (POINTER_TYPE_P (type)
7793 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7794 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7796 tree arg00 = TREE_OPERAND (arg0, 0);
7797 tree arg01 = TREE_OPERAND (arg0, 1);
7799 return fold_build_pointer_plus_loc
7800 (loc, fold_convert_loc (loc, type, arg00), arg01);
7803 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7804 of the same precision, and X is an integer type not narrower than
7805 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7806 if (INTEGRAL_TYPE_P (type)
7807 && TREE_CODE (op0) == BIT_NOT_EXPR
7808 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7809 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7810 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7812 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7813 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7814 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7815 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7816 fold_convert_loc (loc, type, tem));
7819 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7820 type of X and Y (integer types only). */
7821 if (INTEGRAL_TYPE_P (type)
7822 && TREE_CODE (op0) == MULT_EXPR
7823 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7824 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7826 /* Be careful not to introduce new overflows. */
7827 tree mult_type;
7828 if (TYPE_OVERFLOW_WRAPS (type))
7829 mult_type = type;
7830 else
7831 mult_type = unsigned_type_for (type);
7833 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7835 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7836 fold_convert_loc (loc, mult_type,
7837 TREE_OPERAND (op0, 0)),
7838 fold_convert_loc (loc, mult_type,
7839 TREE_OPERAND (op0, 1)));
7840 return fold_convert_loc (loc, type, tem);
7844 return NULL_TREE;
7846 case VIEW_CONVERT_EXPR:
7847 if (TREE_CODE (op0) == MEM_REF)
7849 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7850 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7851 tem = fold_build2_loc (loc, MEM_REF, type,
7852 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7853 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7854 return tem;
7857 return NULL_TREE;
7859 case NEGATE_EXPR:
7860 tem = fold_negate_expr (loc, arg0);
7861 if (tem)
7862 return fold_convert_loc (loc, type, tem);
7863 return NULL_TREE;
7865 case ABS_EXPR:
7866 /* Convert fabs((double)float) into (double)fabsf(float). */
7867 if (TREE_CODE (arg0) == NOP_EXPR
7868 && TREE_CODE (type) == REAL_TYPE)
7870 tree targ0 = strip_float_extensions (arg0);
7871 if (targ0 != arg0)
7872 return fold_convert_loc (loc, type,
7873 fold_build1_loc (loc, ABS_EXPR,
7874 TREE_TYPE (targ0),
7875 targ0));
7877 return NULL_TREE;
7879 case BIT_NOT_EXPR:
7880 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7881 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7882 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7883 fold_convert_loc (loc, type,
7884 TREE_OPERAND (arg0, 0)))))
7885 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7886 fold_convert_loc (loc, type,
7887 TREE_OPERAND (arg0, 1)));
7888 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7889 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7890 fold_convert_loc (loc, type,
7891 TREE_OPERAND (arg0, 1)))))
7892 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7893 fold_convert_loc (loc, type,
7894 TREE_OPERAND (arg0, 0)), tem);
7896 return NULL_TREE;
7898 case TRUTH_NOT_EXPR:
7899 /* Note that the operand of this must be an int
7900 and its values must be 0 or 1.
7901 ("true" is a fixed value perhaps depending on the language,
7902 but we don't handle values other than 1 correctly yet.) */
7903 tem = fold_truth_not_expr (loc, arg0);
7904 if (!tem)
7905 return NULL_TREE;
7906 return fold_convert_loc (loc, type, tem);
7908 case INDIRECT_REF:
7909 /* Fold *&X to X if X is an lvalue. */
7910 if (TREE_CODE (op0) == ADDR_EXPR)
7912 tree op00 = TREE_OPERAND (op0, 0);
7913 if ((VAR_P (op00)
7914 || TREE_CODE (op00) == PARM_DECL
7915 || TREE_CODE (op00) == RESULT_DECL)
7916 && !TREE_READONLY (op00))
7917 return op00;
7919 return NULL_TREE;
7921 default:
7922 return NULL_TREE;
7923 } /* switch (code) */
7927 /* If the operation was a conversion do _not_ mark a resulting constant
7928 with TREE_OVERFLOW if the original constant was not. These conversions
7929 have implementation defined behavior and retaining the TREE_OVERFLOW
7930 flag here would confuse later passes such as VRP. */
7931 tree
7932 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7933 tree type, tree op0)
7935 tree res = fold_unary_loc (loc, code, type, op0);
7936 if (res
7937 && TREE_CODE (res) == INTEGER_CST
7938 && TREE_CODE (op0) == INTEGER_CST
7939 && CONVERT_EXPR_CODE_P (code))
7940 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7942 return res;
7945 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7946 operands OP0 and OP1. LOC is the location of the resulting expression.
7947 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7948 Return the folded expression if folding is successful. Otherwise,
7949 return NULL_TREE. */
7950 static tree
7951 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7952 tree arg0, tree arg1, tree op0, tree op1)
7954 tree tem;
7956 /* We only do these simplifications if we are optimizing. */
7957 if (!optimize)
7958 return NULL_TREE;
7960 /* Check for things like (A || B) && (A || C). We can convert this
7961 to A || (B && C). Note that either operator can be any of the four
7962 truth and/or operations and the transformation will still be
7963 valid. Also note that we only care about order for the
7964 ANDIF and ORIF operators. If B contains side effects, this
7965 might change the truth-value of A. */
7966 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7967 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7968 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7969 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7970 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7971 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7973 tree a00 = TREE_OPERAND (arg0, 0);
7974 tree a01 = TREE_OPERAND (arg0, 1);
7975 tree a10 = TREE_OPERAND (arg1, 0);
7976 tree a11 = TREE_OPERAND (arg1, 1);
7977 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7978 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7979 && (code == TRUTH_AND_EXPR
7980 || code == TRUTH_OR_EXPR));
7982 if (operand_equal_p (a00, a10, 0))
7983 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7984 fold_build2_loc (loc, code, type, a01, a11));
7985 else if (commutative && operand_equal_p (a00, a11, 0))
7986 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7987 fold_build2_loc (loc, code, type, a01, a10));
7988 else if (commutative && operand_equal_p (a01, a10, 0))
7989 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7990 fold_build2_loc (loc, code, type, a00, a11));
7992 /* This case if tricky because we must either have commutative
7993 operators or else A10 must not have side-effects. */
7995 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7996 && operand_equal_p (a01, a11, 0))
7997 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7998 fold_build2_loc (loc, code, type, a00, a10),
7999 a01);
8002 /* See if we can build a range comparison. */
8003 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8004 return tem;
8006 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8007 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8009 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8010 if (tem)
8011 return fold_build2_loc (loc, code, type, tem, arg1);
8014 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8015 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8017 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8018 if (tem)
8019 return fold_build2_loc (loc, code, type, arg0, tem);
8022 /* Check for the possibility of merging component references. If our
8023 lhs is another similar operation, try to merge its rhs with our
8024 rhs. Then try to merge our lhs and rhs. */
8025 if (TREE_CODE (arg0) == code
8026 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8027 TREE_OPERAND (arg0, 1), arg1)))
8028 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8030 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8031 return tem;
8033 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8034 && !flag_sanitize_coverage
8035 && (code == TRUTH_AND_EXPR
8036 || code == TRUTH_ANDIF_EXPR
8037 || code == TRUTH_OR_EXPR
8038 || code == TRUTH_ORIF_EXPR))
8040 enum tree_code ncode, icode;
8042 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8043 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8044 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8046 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8047 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8048 We don't want to pack more than two leafs to a non-IF AND/OR
8049 expression.
8050 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8051 equal to IF-CODE, then we don't want to add right-hand operand.
8052 If the inner right-hand side of left-hand operand has
8053 side-effects, or isn't simple, then we can't add to it,
8054 as otherwise we might destroy if-sequence. */
8055 if (TREE_CODE (arg0) == icode
8056 && simple_operand_p_2 (arg1)
8057 /* Needed for sequence points to handle trappings, and
8058 side-effects. */
8059 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8061 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8062 arg1);
8063 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8064 tem);
8066 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8067 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8068 else if (TREE_CODE (arg1) == icode
8069 && simple_operand_p_2 (arg0)
8070 /* Needed for sequence points to handle trappings, and
8071 side-effects. */
8072 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8074 tem = fold_build2_loc (loc, ncode, type,
8075 arg0, TREE_OPERAND (arg1, 0));
8076 return fold_build2_loc (loc, icode, type, tem,
8077 TREE_OPERAND (arg1, 1));
8079 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8080 into (A OR B).
8081 For sequence point consistancy, we need to check for trapping,
8082 and side-effects. */
8083 else if (code == icode && simple_operand_p_2 (arg0)
8084 && simple_operand_p_2 (arg1))
8085 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8088 return NULL_TREE;
8091 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8092 by changing CODE to reduce the magnitude of constants involved in
8093 ARG0 of the comparison.
8094 Returns a canonicalized comparison tree if a simplification was
8095 possible, otherwise returns NULL_TREE.
8096 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8097 valid if signed overflow is undefined. */
8099 static tree
8100 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8101 tree arg0, tree arg1,
8102 bool *strict_overflow_p)
8104 enum tree_code code0 = TREE_CODE (arg0);
8105 tree t, cst0 = NULL_TREE;
8106 int sgn0;
8108 /* Match A +- CST code arg1. We can change this only if overflow
8109 is undefined. */
8110 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8111 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8112 /* In principle pointers also have undefined overflow behavior,
8113 but that causes problems elsewhere. */
8114 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8115 && (code0 == MINUS_EXPR
8116 || code0 == PLUS_EXPR)
8117 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8118 return NULL_TREE;
8120 /* Identify the constant in arg0 and its sign. */
8121 cst0 = TREE_OPERAND (arg0, 1);
8122 sgn0 = tree_int_cst_sgn (cst0);
8124 /* Overflowed constants and zero will cause problems. */
8125 if (integer_zerop (cst0)
8126 || TREE_OVERFLOW (cst0))
8127 return NULL_TREE;
8129 /* See if we can reduce the magnitude of the constant in
8130 arg0 by changing the comparison code. */
8131 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8132 if (code == LT_EXPR
8133 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8134 code = LE_EXPR;
8135 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8136 else if (code == GT_EXPR
8137 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8138 code = GE_EXPR;
8139 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8140 else if (code == LE_EXPR
8141 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8142 code = LT_EXPR;
8143 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8144 else if (code == GE_EXPR
8145 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8146 code = GT_EXPR;
8147 else
8148 return NULL_TREE;
8149 *strict_overflow_p = true;
8151 /* Now build the constant reduced in magnitude. But not if that
8152 would produce one outside of its types range. */
8153 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8154 && ((sgn0 == 1
8155 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8156 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8157 || (sgn0 == -1
8158 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8159 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8160 return NULL_TREE;
8162 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8163 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8164 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8165 t = fold_convert (TREE_TYPE (arg1), t);
8167 return fold_build2_loc (loc, code, type, t, arg1);
8170 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8171 overflow further. Try to decrease the magnitude of constants involved
8172 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8173 and put sole constants at the second argument position.
8174 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8176 static tree
8177 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8178 tree arg0, tree arg1)
8180 tree t;
8181 bool strict_overflow_p;
8182 const char * const warnmsg = G_("assuming signed overflow does not occur "
8183 "when reducing constant in comparison");
8185 /* Try canonicalization by simplifying arg0. */
8186 strict_overflow_p = false;
8187 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8188 &strict_overflow_p);
8189 if (t)
8191 if (strict_overflow_p)
8192 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8193 return t;
8196 /* Try canonicalization by simplifying arg1 using the swapped
8197 comparison. */
8198 code = swap_tree_comparison (code);
8199 strict_overflow_p = false;
8200 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8201 &strict_overflow_p);
8202 if (t && strict_overflow_p)
8203 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8204 return t;
8207 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8208 space. This is used to avoid issuing overflow warnings for
8209 expressions like &p->x which can not wrap. */
8211 static bool
8212 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8214 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8215 return true;
8217 if (bitpos < 0)
8218 return true;
8220 wide_int wi_offset;
8221 int precision = TYPE_PRECISION (TREE_TYPE (base));
8222 if (offset == NULL_TREE)
8223 wi_offset = wi::zero (precision);
8224 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8225 return true;
8226 else
8227 wi_offset = wi::to_wide (offset);
8229 bool overflow;
8230 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8231 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8232 if (overflow)
8233 return true;
8235 if (!wi::fits_uhwi_p (total))
8236 return true;
8238 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8239 if (size <= 0)
8240 return true;
8242 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8243 array. */
8244 if (TREE_CODE (base) == ADDR_EXPR)
8246 HOST_WIDE_INT base_size;
8248 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8249 if (base_size > 0 && size < base_size)
8250 size = base_size;
8253 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8256 /* Return a positive integer when the symbol DECL is known to have
8257 a nonzero address, zero when it's known not to (e.g., it's a weak
8258 symbol), and a negative integer when the symbol is not yet in the
8259 symbol table and so whether or not its address is zero is unknown.
8260 For function local objects always return positive integer. */
8261 static int
8262 maybe_nonzero_address (tree decl)
8264 if (DECL_P (decl) && decl_in_symtab_p (decl))
8265 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8266 return symbol->nonzero_address ();
8268 /* Function local objects are never NULL. */
8269 if (DECL_P (decl)
8270 && (DECL_CONTEXT (decl)
8271 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8272 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8273 return 1;
8275 return -1;
8278 /* Subroutine of fold_binary. This routine performs all of the
8279 transformations that are common to the equality/inequality
8280 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8281 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8282 fold_binary should call fold_binary. Fold a comparison with
8283 tree code CODE and type TYPE with operands OP0 and OP1. Return
8284 the folded comparison or NULL_TREE. */
8286 static tree
8287 fold_comparison (location_t loc, enum tree_code code, tree type,
8288 tree op0, tree op1)
8290 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8291 tree arg0, arg1, tem;
8293 arg0 = op0;
8294 arg1 = op1;
8296 STRIP_SIGN_NOPS (arg0);
8297 STRIP_SIGN_NOPS (arg1);
8299 /* For comparisons of pointers we can decompose it to a compile time
8300 comparison of the base objects and the offsets into the object.
8301 This requires at least one operand being an ADDR_EXPR or a
8302 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8303 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8304 && (TREE_CODE (arg0) == ADDR_EXPR
8305 || TREE_CODE (arg1) == ADDR_EXPR
8306 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8307 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8309 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8310 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8311 machine_mode mode;
8312 int volatilep, reversep, unsignedp;
8313 bool indirect_base0 = false, indirect_base1 = false;
8315 /* Get base and offset for the access. Strip ADDR_EXPR for
8316 get_inner_reference, but put it back by stripping INDIRECT_REF
8317 off the base object if possible. indirect_baseN will be true
8318 if baseN is not an address but refers to the object itself. */
8319 base0 = arg0;
8320 if (TREE_CODE (arg0) == ADDR_EXPR)
8322 base0
8323 = get_inner_reference (TREE_OPERAND (arg0, 0),
8324 &bitsize, &bitpos0, &offset0, &mode,
8325 &unsignedp, &reversep, &volatilep);
8326 if (TREE_CODE (base0) == INDIRECT_REF)
8327 base0 = TREE_OPERAND (base0, 0);
8328 else
8329 indirect_base0 = true;
8331 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8333 base0 = TREE_OPERAND (arg0, 0);
8334 STRIP_SIGN_NOPS (base0);
8335 if (TREE_CODE (base0) == ADDR_EXPR)
8337 base0
8338 = get_inner_reference (TREE_OPERAND (base0, 0),
8339 &bitsize, &bitpos0, &offset0, &mode,
8340 &unsignedp, &reversep, &volatilep);
8341 if (TREE_CODE (base0) == INDIRECT_REF)
8342 base0 = TREE_OPERAND (base0, 0);
8343 else
8344 indirect_base0 = true;
8346 if (offset0 == NULL_TREE || integer_zerop (offset0))
8347 offset0 = TREE_OPERAND (arg0, 1);
8348 else
8349 offset0 = size_binop (PLUS_EXPR, offset0,
8350 TREE_OPERAND (arg0, 1));
8351 if (TREE_CODE (offset0) == INTEGER_CST)
8353 offset_int tem = wi::sext (wi::to_offset (offset0),
8354 TYPE_PRECISION (sizetype));
8355 tem <<= LOG2_BITS_PER_UNIT;
8356 tem += bitpos0;
8357 if (wi::fits_shwi_p (tem))
8359 bitpos0 = tem.to_shwi ();
8360 offset0 = NULL_TREE;
8365 base1 = arg1;
8366 if (TREE_CODE (arg1) == ADDR_EXPR)
8368 base1
8369 = get_inner_reference (TREE_OPERAND (arg1, 0),
8370 &bitsize, &bitpos1, &offset1, &mode,
8371 &unsignedp, &reversep, &volatilep);
8372 if (TREE_CODE (base1) == INDIRECT_REF)
8373 base1 = TREE_OPERAND (base1, 0);
8374 else
8375 indirect_base1 = true;
8377 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8379 base1 = TREE_OPERAND (arg1, 0);
8380 STRIP_SIGN_NOPS (base1);
8381 if (TREE_CODE (base1) == ADDR_EXPR)
8383 base1
8384 = get_inner_reference (TREE_OPERAND (base1, 0),
8385 &bitsize, &bitpos1, &offset1, &mode,
8386 &unsignedp, &reversep, &volatilep);
8387 if (TREE_CODE (base1) == INDIRECT_REF)
8388 base1 = TREE_OPERAND (base1, 0);
8389 else
8390 indirect_base1 = true;
8392 if (offset1 == NULL_TREE || integer_zerop (offset1))
8393 offset1 = TREE_OPERAND (arg1, 1);
8394 else
8395 offset1 = size_binop (PLUS_EXPR, offset1,
8396 TREE_OPERAND (arg1, 1));
8397 if (TREE_CODE (offset1) == INTEGER_CST)
8399 offset_int tem = wi::sext (wi::to_offset (offset1),
8400 TYPE_PRECISION (sizetype));
8401 tem <<= LOG2_BITS_PER_UNIT;
8402 tem += bitpos1;
8403 if (wi::fits_shwi_p (tem))
8405 bitpos1 = tem.to_shwi ();
8406 offset1 = NULL_TREE;
8411 /* If we have equivalent bases we might be able to simplify. */
8412 if (indirect_base0 == indirect_base1
8413 && operand_equal_p (base0, base1,
8414 indirect_base0 ? OEP_ADDRESS_OF : 0))
8416 /* We can fold this expression to a constant if the non-constant
8417 offset parts are equal. */
8418 if (offset0 == offset1
8419 || (offset0 && offset1
8420 && operand_equal_p (offset0, offset1, 0)))
8422 if (!equality_code
8423 && bitpos0 != bitpos1
8424 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8425 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8426 fold_overflow_warning (("assuming pointer wraparound does not "
8427 "occur when comparing P +- C1 with "
8428 "P +- C2"),
8429 WARN_STRICT_OVERFLOW_CONDITIONAL);
8431 switch (code)
8433 case EQ_EXPR:
8434 return constant_boolean_node (bitpos0 == bitpos1, type);
8435 case NE_EXPR:
8436 return constant_boolean_node (bitpos0 != bitpos1, type);
8437 case LT_EXPR:
8438 return constant_boolean_node (bitpos0 < bitpos1, type);
8439 case LE_EXPR:
8440 return constant_boolean_node (bitpos0 <= bitpos1, type);
8441 case GE_EXPR:
8442 return constant_boolean_node (bitpos0 >= bitpos1, type);
8443 case GT_EXPR:
8444 return constant_boolean_node (bitpos0 > bitpos1, type);
8445 default:;
8448 /* We can simplify the comparison to a comparison of the variable
8449 offset parts if the constant offset parts are equal.
8450 Be careful to use signed sizetype here because otherwise we
8451 mess with array offsets in the wrong way. This is possible
8452 because pointer arithmetic is restricted to retain within an
8453 object and overflow on pointer differences is undefined as of
8454 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8455 else if (bitpos0 == bitpos1)
8457 /* By converting to signed sizetype we cover middle-end pointer
8458 arithmetic which operates on unsigned pointer types of size
8459 type size and ARRAY_REF offsets which are properly sign or
8460 zero extended from their type in case it is narrower than
8461 sizetype. */
8462 if (offset0 == NULL_TREE)
8463 offset0 = build_int_cst (ssizetype, 0);
8464 else
8465 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8466 if (offset1 == NULL_TREE)
8467 offset1 = build_int_cst (ssizetype, 0);
8468 else
8469 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8471 if (!equality_code
8472 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8473 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8474 fold_overflow_warning (("assuming pointer wraparound does not "
8475 "occur when comparing P +- C1 with "
8476 "P +- C2"),
8477 WARN_STRICT_OVERFLOW_COMPARISON);
8479 return fold_build2_loc (loc, code, type, offset0, offset1);
8482 /* For equal offsets we can simplify to a comparison of the
8483 base addresses. */
8484 else if (bitpos0 == bitpos1
8485 && (indirect_base0
8486 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8487 && (indirect_base1
8488 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8489 && ((offset0 == offset1)
8490 || (offset0 && offset1
8491 && operand_equal_p (offset0, offset1, 0))))
8493 if (indirect_base0)
8494 base0 = build_fold_addr_expr_loc (loc, base0);
8495 if (indirect_base1)
8496 base1 = build_fold_addr_expr_loc (loc, base1);
8497 return fold_build2_loc (loc, code, type, base0, base1);
8499 /* Comparison between an ordinary (non-weak) symbol and a null
8500 pointer can be eliminated since such symbols must have a non
8501 null address. In C, relational expressions between pointers
8502 to objects and null pointers are undefined. The results
8503 below follow the C++ rules with the additional property that
8504 every object pointer compares greater than a null pointer.
8506 else if (((DECL_P (base0)
8507 && maybe_nonzero_address (base0) > 0
8508 /* Avoid folding references to struct members at offset 0 to
8509 prevent tests like '&ptr->firstmember == 0' from getting
8510 eliminated. When ptr is null, although the -> expression
8511 is strictly speaking invalid, GCC retains it as a matter
8512 of QoI. See PR c/44555. */
8513 && (offset0 == NULL_TREE && bitpos0 != 0))
8514 || CONSTANT_CLASS_P (base0))
8515 && indirect_base0
8516 /* The caller guarantees that when one of the arguments is
8517 constant (i.e., null in this case) it is second. */
8518 && integer_zerop (arg1))
8520 switch (code)
8522 case EQ_EXPR:
8523 case LE_EXPR:
8524 case LT_EXPR:
8525 return constant_boolean_node (false, type);
8526 case GE_EXPR:
8527 case GT_EXPR:
8528 case NE_EXPR:
8529 return constant_boolean_node (true, type);
8530 default:
8531 gcc_unreachable ();
8536 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8537 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8538 the resulting offset is smaller in absolute value than the
8539 original one and has the same sign. */
8540 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8541 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8542 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8543 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8544 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8545 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8546 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8547 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8549 tree const1 = TREE_OPERAND (arg0, 1);
8550 tree const2 = TREE_OPERAND (arg1, 1);
8551 tree variable1 = TREE_OPERAND (arg0, 0);
8552 tree variable2 = TREE_OPERAND (arg1, 0);
8553 tree cst;
8554 const char * const warnmsg = G_("assuming signed overflow does not "
8555 "occur when combining constants around "
8556 "a comparison");
8558 /* Put the constant on the side where it doesn't overflow and is
8559 of lower absolute value and of same sign than before. */
8560 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8561 ? MINUS_EXPR : PLUS_EXPR,
8562 const2, const1);
8563 if (!TREE_OVERFLOW (cst)
8564 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8565 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8567 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8568 return fold_build2_loc (loc, code, type,
8569 variable1,
8570 fold_build2_loc (loc, TREE_CODE (arg1),
8571 TREE_TYPE (arg1),
8572 variable2, cst));
8575 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8576 ? MINUS_EXPR : PLUS_EXPR,
8577 const1, const2);
8578 if (!TREE_OVERFLOW (cst)
8579 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8580 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8582 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8583 return fold_build2_loc (loc, code, type,
8584 fold_build2_loc (loc, TREE_CODE (arg0),
8585 TREE_TYPE (arg0),
8586 variable1, cst),
8587 variable2);
8591 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8592 if (tem)
8593 return tem;
8595 /* If we are comparing an expression that just has comparisons
8596 of two integer values, arithmetic expressions of those comparisons,
8597 and constants, we can simplify it. There are only three cases
8598 to check: the two values can either be equal, the first can be
8599 greater, or the second can be greater. Fold the expression for
8600 those three values. Since each value must be 0 or 1, we have
8601 eight possibilities, each of which corresponds to the constant 0
8602 or 1 or one of the six possible comparisons.
8604 This handles common cases like (a > b) == 0 but also handles
8605 expressions like ((x > y) - (y > x)) > 0, which supposedly
8606 occur in macroized code. */
8608 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8610 tree cval1 = 0, cval2 = 0;
8611 int save_p = 0;
8613 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8614 /* Don't handle degenerate cases here; they should already
8615 have been handled anyway. */
8616 && cval1 != 0 && cval2 != 0
8617 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8618 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8619 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8620 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8621 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8622 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8623 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8625 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8626 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8628 /* We can't just pass T to eval_subst in case cval1 or cval2
8629 was the same as ARG1. */
8631 tree high_result
8632 = fold_build2_loc (loc, code, type,
8633 eval_subst (loc, arg0, cval1, maxval,
8634 cval2, minval),
8635 arg1);
8636 tree equal_result
8637 = fold_build2_loc (loc, code, type,
8638 eval_subst (loc, arg0, cval1, maxval,
8639 cval2, maxval),
8640 arg1);
8641 tree low_result
8642 = fold_build2_loc (loc, code, type,
8643 eval_subst (loc, arg0, cval1, minval,
8644 cval2, maxval),
8645 arg1);
8647 /* All three of these results should be 0 or 1. Confirm they are.
8648 Then use those values to select the proper code to use. */
8650 if (TREE_CODE (high_result) == INTEGER_CST
8651 && TREE_CODE (equal_result) == INTEGER_CST
8652 && TREE_CODE (low_result) == INTEGER_CST)
8654 /* Make a 3-bit mask with the high-order bit being the
8655 value for `>', the next for '=', and the low for '<'. */
8656 switch ((integer_onep (high_result) * 4)
8657 + (integer_onep (equal_result) * 2)
8658 + integer_onep (low_result))
8660 case 0:
8661 /* Always false. */
8662 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8663 case 1:
8664 code = LT_EXPR;
8665 break;
8666 case 2:
8667 code = EQ_EXPR;
8668 break;
8669 case 3:
8670 code = LE_EXPR;
8671 break;
8672 case 4:
8673 code = GT_EXPR;
8674 break;
8675 case 5:
8676 code = NE_EXPR;
8677 break;
8678 case 6:
8679 code = GE_EXPR;
8680 break;
8681 case 7:
8682 /* Always true. */
8683 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8686 if (save_p)
8688 tem = save_expr (build2 (code, type, cval1, cval2));
8689 protected_set_expr_location (tem, loc);
8690 return tem;
8692 return fold_build2_loc (loc, code, type, cval1, cval2);
8697 return NULL_TREE;
8701 /* Subroutine of fold_binary. Optimize complex multiplications of the
8702 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8703 argument EXPR represents the expression "z" of type TYPE. */
8705 static tree
8706 fold_mult_zconjz (location_t loc, tree type, tree expr)
8708 tree itype = TREE_TYPE (type);
8709 tree rpart, ipart, tem;
8711 if (TREE_CODE (expr) == COMPLEX_EXPR)
8713 rpart = TREE_OPERAND (expr, 0);
8714 ipart = TREE_OPERAND (expr, 1);
8716 else if (TREE_CODE (expr) == COMPLEX_CST)
8718 rpart = TREE_REALPART (expr);
8719 ipart = TREE_IMAGPART (expr);
8721 else
8723 expr = save_expr (expr);
8724 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8725 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8728 rpart = save_expr (rpart);
8729 ipart = save_expr (ipart);
8730 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8731 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8732 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8733 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8734 build_zero_cst (itype));
8738 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8739 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8740 true if successful. */
8742 static bool
8743 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8745 unsigned int i;
8747 if (TREE_CODE (arg) == VECTOR_CST)
8749 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8750 elts[i] = VECTOR_CST_ELT (arg, i);
8752 else if (TREE_CODE (arg) == CONSTRUCTOR)
8754 constructor_elt *elt;
8756 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8757 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8758 return false;
8759 else
8760 elts[i] = elt->value;
8762 else
8763 return false;
8764 for (; i < nelts; i++)
8765 elts[i]
8766 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8767 return true;
8770 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8771 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8772 NULL_TREE otherwise. */
8774 static tree
8775 fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel)
8777 unsigned int i;
8778 bool need_ctor = false;
8780 unsigned int nelts = sel.length ();
8781 gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
8782 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8783 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8784 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8785 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8786 return NULL_TREE;
8788 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8789 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8790 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8791 return NULL_TREE;
8793 tree_vector_builder out_elts (type, nelts, 1);
8794 for (i = 0; i < nelts; i++)
8796 if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
8797 need_ctor = true;
8798 out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
8801 if (need_ctor)
8803 vec<constructor_elt, va_gc> *v;
8804 vec_alloc (v, nelts);
8805 for (i = 0; i < nelts; i++)
8806 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8807 return build_constructor (type, v);
8809 else
8810 return out_elts.build ();
8813 /* Try to fold a pointer difference of type TYPE two address expressions of
8814 array references AREF0 and AREF1 using location LOC. Return a
8815 simplified expression for the difference or NULL_TREE. */
8817 static tree
8818 fold_addr_of_array_ref_difference (location_t loc, tree type,
8819 tree aref0, tree aref1,
8820 bool use_pointer_diff)
8822 tree base0 = TREE_OPERAND (aref0, 0);
8823 tree base1 = TREE_OPERAND (aref1, 0);
8824 tree base_offset = build_int_cst (type, 0);
8826 /* If the bases are array references as well, recurse. If the bases
8827 are pointer indirections compute the difference of the pointers.
8828 If the bases are equal, we are set. */
8829 if ((TREE_CODE (base0) == ARRAY_REF
8830 && TREE_CODE (base1) == ARRAY_REF
8831 && (base_offset
8832 = fold_addr_of_array_ref_difference (loc, type, base0, base1,
8833 use_pointer_diff)))
8834 || (INDIRECT_REF_P (base0)
8835 && INDIRECT_REF_P (base1)
8836 && (base_offset
8837 = use_pointer_diff
8838 ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
8839 TREE_OPERAND (base0, 0),
8840 TREE_OPERAND (base1, 0))
8841 : fold_binary_loc (loc, MINUS_EXPR, type,
8842 fold_convert (type,
8843 TREE_OPERAND (base0, 0)),
8844 fold_convert (type,
8845 TREE_OPERAND (base1, 0)))))
8846 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8848 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8849 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8850 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8851 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8852 return fold_build2_loc (loc, PLUS_EXPR, type,
8853 base_offset,
8854 fold_build2_loc (loc, MULT_EXPR, type,
8855 diff, esz));
8857 return NULL_TREE;
8860 /* If the real or vector real constant CST of type TYPE has an exact
8861 inverse, return it, else return NULL. */
8863 tree
8864 exact_inverse (tree type, tree cst)
8866 REAL_VALUE_TYPE r;
8867 tree unit_type;
8868 machine_mode mode;
8870 switch (TREE_CODE (cst))
8872 case REAL_CST:
8873 r = TREE_REAL_CST (cst);
8875 if (exact_real_inverse (TYPE_MODE (type), &r))
8876 return build_real (type, r);
8878 return NULL_TREE;
8880 case VECTOR_CST:
8882 unit_type = TREE_TYPE (type);
8883 mode = TYPE_MODE (unit_type);
8885 tree_vector_builder elts;
8886 if (!elts.new_unary_operation (type, cst, false))
8887 return NULL_TREE;
8888 unsigned int count = elts.encoded_nelts ();
8889 for (unsigned int i = 0; i < count; ++i)
8891 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8892 if (!exact_real_inverse (mode, &r))
8893 return NULL_TREE;
8894 elts.quick_push (build_real (unit_type, r));
8897 return elts.build ();
8900 default:
8901 return NULL_TREE;
8905 /* Mask out the tz least significant bits of X of type TYPE where
8906 tz is the number of trailing zeroes in Y. */
8907 static wide_int
8908 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8910 int tz = wi::ctz (y);
8911 if (tz > 0)
8912 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8913 return x;
8916 /* Return true when T is an address and is known to be nonzero.
8917 For floating point we further ensure that T is not denormal.
8918 Similar logic is present in nonzero_address in rtlanal.h.
8920 If the return value is based on the assumption that signed overflow
8921 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8922 change *STRICT_OVERFLOW_P. */
8924 static bool
8925 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8927 tree type = TREE_TYPE (t);
8928 enum tree_code code;
8930 /* Doing something useful for floating point would need more work. */
8931 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8932 return false;
8934 code = TREE_CODE (t);
8935 switch (TREE_CODE_CLASS (code))
8937 case tcc_unary:
8938 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8939 strict_overflow_p);
8940 case tcc_binary:
8941 case tcc_comparison:
8942 return tree_binary_nonzero_warnv_p (code, type,
8943 TREE_OPERAND (t, 0),
8944 TREE_OPERAND (t, 1),
8945 strict_overflow_p);
8946 case tcc_constant:
8947 case tcc_declaration:
8948 case tcc_reference:
8949 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8951 default:
8952 break;
8955 switch (code)
8957 case TRUTH_NOT_EXPR:
8958 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8959 strict_overflow_p);
8961 case TRUTH_AND_EXPR:
8962 case TRUTH_OR_EXPR:
8963 case TRUTH_XOR_EXPR:
8964 return tree_binary_nonzero_warnv_p (code, type,
8965 TREE_OPERAND (t, 0),
8966 TREE_OPERAND (t, 1),
8967 strict_overflow_p);
8969 case COND_EXPR:
8970 case CONSTRUCTOR:
8971 case OBJ_TYPE_REF:
8972 case ASSERT_EXPR:
8973 case ADDR_EXPR:
8974 case WITH_SIZE_EXPR:
8975 case SSA_NAME:
8976 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8978 case COMPOUND_EXPR:
8979 case MODIFY_EXPR:
8980 case BIND_EXPR:
8981 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8982 strict_overflow_p);
8984 case SAVE_EXPR:
8985 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8986 strict_overflow_p);
8988 case CALL_EXPR:
8990 tree fndecl = get_callee_fndecl (t);
8991 if (!fndecl) return false;
8992 if (flag_delete_null_pointer_checks && !flag_check_new
8993 && DECL_IS_OPERATOR_NEW (fndecl)
8994 && !TREE_NOTHROW (fndecl))
8995 return true;
8996 if (flag_delete_null_pointer_checks
8997 && lookup_attribute ("returns_nonnull",
8998 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
8999 return true;
9000 return alloca_call_p (t);
9003 default:
9004 break;
9006 return false;
9009 /* Return true when T is an address and is known to be nonzero.
9010 Handle warnings about undefined signed overflow. */
9012 bool
9013 tree_expr_nonzero_p (tree t)
9015 bool ret, strict_overflow_p;
9017 strict_overflow_p = false;
9018 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9019 if (strict_overflow_p)
9020 fold_overflow_warning (("assuming signed overflow does not occur when "
9021 "determining that expression is always "
9022 "non-zero"),
9023 WARN_STRICT_OVERFLOW_MISC);
9024 return ret;
9027 /* Return true if T is known not to be equal to an integer W. */
9029 bool
9030 expr_not_equal_to (tree t, const wide_int &w)
9032 wide_int min, max, nz;
9033 value_range_type rtype;
9034 switch (TREE_CODE (t))
9036 case INTEGER_CST:
9037 return wi::to_wide (t) != w;
9039 case SSA_NAME:
9040 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9041 return false;
9042 rtype = get_range_info (t, &min, &max);
9043 if (rtype == VR_RANGE)
9045 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9046 return true;
9047 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9048 return true;
9050 else if (rtype == VR_ANTI_RANGE
9051 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9052 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9053 return true;
9054 /* If T has some known zero bits and W has any of those bits set,
9055 then T is known not to be equal to W. */
9056 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9057 TYPE_PRECISION (TREE_TYPE (t))), 0))
9058 return true;
9059 return false;
9061 default:
9062 return false;
9066 /* Fold a binary expression of code CODE and type TYPE with operands
9067 OP0 and OP1. LOC is the location of the resulting expression.
9068 Return the folded expression if folding is successful. Otherwise,
9069 return NULL_TREE. */
9071 tree
9072 fold_binary_loc (location_t loc,
9073 enum tree_code code, tree type, tree op0, tree op1)
9075 enum tree_code_class kind = TREE_CODE_CLASS (code);
9076 tree arg0, arg1, tem;
9077 tree t1 = NULL_TREE;
9078 bool strict_overflow_p;
9079 unsigned int prec;
9081 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9082 && TREE_CODE_LENGTH (code) == 2
9083 && op0 != NULL_TREE
9084 && op1 != NULL_TREE);
9086 arg0 = op0;
9087 arg1 = op1;
9089 /* Strip any conversions that don't change the mode. This is
9090 safe for every expression, except for a comparison expression
9091 because its signedness is derived from its operands. So, in
9092 the latter case, only strip conversions that don't change the
9093 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9094 preserved.
9096 Note that this is done as an internal manipulation within the
9097 constant folder, in order to find the simplest representation
9098 of the arguments so that their form can be studied. In any
9099 cases, the appropriate type conversions should be put back in
9100 the tree that will get out of the constant folder. */
9102 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9104 STRIP_SIGN_NOPS (arg0);
9105 STRIP_SIGN_NOPS (arg1);
9107 else
9109 STRIP_NOPS (arg0);
9110 STRIP_NOPS (arg1);
9113 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9114 constant but we can't do arithmetic on them. */
9115 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9117 tem = const_binop (code, type, arg0, arg1);
9118 if (tem != NULL_TREE)
9120 if (TREE_TYPE (tem) != type)
9121 tem = fold_convert_loc (loc, type, tem);
9122 return tem;
9126 /* If this is a commutative operation, and ARG0 is a constant, move it
9127 to ARG1 to reduce the number of tests below. */
9128 if (commutative_tree_code (code)
9129 && tree_swap_operands_p (arg0, arg1))
9130 return fold_build2_loc (loc, code, type, op1, op0);
9132 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9133 to ARG1 to reduce the number of tests below. */
9134 if (kind == tcc_comparison
9135 && tree_swap_operands_p (arg0, arg1))
9136 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9138 tem = generic_simplify (loc, code, type, op0, op1);
9139 if (tem)
9140 return tem;
9142 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9144 First check for cases where an arithmetic operation is applied to a
9145 compound, conditional, or comparison operation. Push the arithmetic
9146 operation inside the compound or conditional to see if any folding
9147 can then be done. Convert comparison to conditional for this purpose.
9148 The also optimizes non-constant cases that used to be done in
9149 expand_expr.
9151 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9152 one of the operands is a comparison and the other is a comparison, a
9153 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9154 code below would make the expression more complex. Change it to a
9155 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9156 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9158 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9159 || code == EQ_EXPR || code == NE_EXPR)
9160 && TREE_CODE (type) != VECTOR_TYPE
9161 && ((truth_value_p (TREE_CODE (arg0))
9162 && (truth_value_p (TREE_CODE (arg1))
9163 || (TREE_CODE (arg1) == BIT_AND_EXPR
9164 && integer_onep (TREE_OPERAND (arg1, 1)))))
9165 || (truth_value_p (TREE_CODE (arg1))
9166 && (truth_value_p (TREE_CODE (arg0))
9167 || (TREE_CODE (arg0) == BIT_AND_EXPR
9168 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9170 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9171 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9172 : TRUTH_XOR_EXPR,
9173 boolean_type_node,
9174 fold_convert_loc (loc, boolean_type_node, arg0),
9175 fold_convert_loc (loc, boolean_type_node, arg1));
9177 if (code == EQ_EXPR)
9178 tem = invert_truthvalue_loc (loc, tem);
9180 return fold_convert_loc (loc, type, tem);
9183 if (TREE_CODE_CLASS (code) == tcc_binary
9184 || TREE_CODE_CLASS (code) == tcc_comparison)
9186 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9188 tem = fold_build2_loc (loc, code, type,
9189 fold_convert_loc (loc, TREE_TYPE (op0),
9190 TREE_OPERAND (arg0, 1)), op1);
9191 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9192 tem);
9194 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9196 tem = fold_build2_loc (loc, code, type, op0,
9197 fold_convert_loc (loc, TREE_TYPE (op1),
9198 TREE_OPERAND (arg1, 1)));
9199 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9200 tem);
9203 if (TREE_CODE (arg0) == COND_EXPR
9204 || TREE_CODE (arg0) == VEC_COND_EXPR
9205 || COMPARISON_CLASS_P (arg0))
9207 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9208 arg0, arg1,
9209 /*cond_first_p=*/1);
9210 if (tem != NULL_TREE)
9211 return tem;
9214 if (TREE_CODE (arg1) == COND_EXPR
9215 || TREE_CODE (arg1) == VEC_COND_EXPR
9216 || COMPARISON_CLASS_P (arg1))
9218 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9219 arg1, arg0,
9220 /*cond_first_p=*/0);
9221 if (tem != NULL_TREE)
9222 return tem;
9226 switch (code)
9228 case MEM_REF:
9229 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9230 if (TREE_CODE (arg0) == ADDR_EXPR
9231 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9233 tree iref = TREE_OPERAND (arg0, 0);
9234 return fold_build2 (MEM_REF, type,
9235 TREE_OPERAND (iref, 0),
9236 int_const_binop (PLUS_EXPR, arg1,
9237 TREE_OPERAND (iref, 1)));
9240 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9241 if (TREE_CODE (arg0) == ADDR_EXPR
9242 && handled_component_p (TREE_OPERAND (arg0, 0)))
9244 tree base;
9245 HOST_WIDE_INT coffset;
9246 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9247 &coffset);
9248 if (!base)
9249 return NULL_TREE;
9250 return fold_build2 (MEM_REF, type,
9251 build_fold_addr_expr (base),
9252 int_const_binop (PLUS_EXPR, arg1,
9253 size_int (coffset)));
9256 return NULL_TREE;
9258 case POINTER_PLUS_EXPR:
9259 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9260 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9261 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9262 return fold_convert_loc (loc, type,
9263 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9264 fold_convert_loc (loc, sizetype,
9265 arg1),
9266 fold_convert_loc (loc, sizetype,
9267 arg0)));
9269 return NULL_TREE;
9271 case PLUS_EXPR:
9272 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9274 /* X + (X / CST) * -CST is X % CST. */
9275 if (TREE_CODE (arg1) == MULT_EXPR
9276 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9277 && operand_equal_p (arg0,
9278 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9280 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9281 tree cst1 = TREE_OPERAND (arg1, 1);
9282 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9283 cst1, cst0);
9284 if (sum && integer_zerop (sum))
9285 return fold_convert_loc (loc, type,
9286 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9287 TREE_TYPE (arg0), arg0,
9288 cst0));
9292 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9293 one. Make sure the type is not saturating and has the signedness of
9294 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9295 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9296 if ((TREE_CODE (arg0) == MULT_EXPR
9297 || TREE_CODE (arg1) == MULT_EXPR)
9298 && !TYPE_SATURATING (type)
9299 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9300 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9301 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9303 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9304 if (tem)
9305 return tem;
9308 if (! FLOAT_TYPE_P (type))
9310 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9311 (plus (plus (mult) (mult)) (foo)) so that we can
9312 take advantage of the factoring cases below. */
9313 if (ANY_INTEGRAL_TYPE_P (type)
9314 && TYPE_OVERFLOW_WRAPS (type)
9315 && (((TREE_CODE (arg0) == PLUS_EXPR
9316 || TREE_CODE (arg0) == MINUS_EXPR)
9317 && TREE_CODE (arg1) == MULT_EXPR)
9318 || ((TREE_CODE (arg1) == PLUS_EXPR
9319 || TREE_CODE (arg1) == MINUS_EXPR)
9320 && TREE_CODE (arg0) == MULT_EXPR)))
9322 tree parg0, parg1, parg, marg;
9323 enum tree_code pcode;
9325 if (TREE_CODE (arg1) == MULT_EXPR)
9326 parg = arg0, marg = arg1;
9327 else
9328 parg = arg1, marg = arg0;
9329 pcode = TREE_CODE (parg);
9330 parg0 = TREE_OPERAND (parg, 0);
9331 parg1 = TREE_OPERAND (parg, 1);
9332 STRIP_NOPS (parg0);
9333 STRIP_NOPS (parg1);
9335 if (TREE_CODE (parg0) == MULT_EXPR
9336 && TREE_CODE (parg1) != MULT_EXPR)
9337 return fold_build2_loc (loc, pcode, type,
9338 fold_build2_loc (loc, PLUS_EXPR, type,
9339 fold_convert_loc (loc, type,
9340 parg0),
9341 fold_convert_loc (loc, type,
9342 marg)),
9343 fold_convert_loc (loc, type, parg1));
9344 if (TREE_CODE (parg0) != MULT_EXPR
9345 && TREE_CODE (parg1) == MULT_EXPR)
9346 return
9347 fold_build2_loc (loc, PLUS_EXPR, type,
9348 fold_convert_loc (loc, type, parg0),
9349 fold_build2_loc (loc, pcode, type,
9350 fold_convert_loc (loc, type, marg),
9351 fold_convert_loc (loc, type,
9352 parg1)));
9355 else
9357 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9358 to __complex__ ( x, y ). This is not the same for SNaNs or
9359 if signed zeros are involved. */
9360 if (!HONOR_SNANS (element_mode (arg0))
9361 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9362 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9364 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9365 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9366 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9367 bool arg0rz = false, arg0iz = false;
9368 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9369 || (arg0i && (arg0iz = real_zerop (arg0i))))
9371 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9372 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9373 if (arg0rz && arg1i && real_zerop (arg1i))
9375 tree rp = arg1r ? arg1r
9376 : build1 (REALPART_EXPR, rtype, arg1);
9377 tree ip = arg0i ? arg0i
9378 : build1 (IMAGPART_EXPR, rtype, arg0);
9379 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9381 else if (arg0iz && arg1r && real_zerop (arg1r))
9383 tree rp = arg0r ? arg0r
9384 : build1 (REALPART_EXPR, rtype, arg0);
9385 tree ip = arg1i ? arg1i
9386 : build1 (IMAGPART_EXPR, rtype, arg1);
9387 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9392 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9393 We associate floats only if the user has specified
9394 -fassociative-math. */
9395 if (flag_associative_math
9396 && TREE_CODE (arg1) == PLUS_EXPR
9397 && TREE_CODE (arg0) != MULT_EXPR)
9399 tree tree10 = TREE_OPERAND (arg1, 0);
9400 tree tree11 = TREE_OPERAND (arg1, 1);
9401 if (TREE_CODE (tree11) == MULT_EXPR
9402 && TREE_CODE (tree10) == MULT_EXPR)
9404 tree tree0;
9405 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9406 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9409 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9410 We associate floats only if the user has specified
9411 -fassociative-math. */
9412 if (flag_associative_math
9413 && TREE_CODE (arg0) == PLUS_EXPR
9414 && TREE_CODE (arg1) != MULT_EXPR)
9416 tree tree00 = TREE_OPERAND (arg0, 0);
9417 tree tree01 = TREE_OPERAND (arg0, 1);
9418 if (TREE_CODE (tree01) == MULT_EXPR
9419 && TREE_CODE (tree00) == MULT_EXPR)
9421 tree tree0;
9422 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9423 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9428 bit_rotate:
9429 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9430 is a rotate of A by C1 bits. */
9431 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9432 is a rotate of A by B bits.
9433 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9434 though in this case CODE must be | and not + or ^, otherwise
9435 it doesn't return A when B is 0. */
9437 enum tree_code code0, code1;
9438 tree rtype;
9439 code0 = TREE_CODE (arg0);
9440 code1 = TREE_CODE (arg1);
9441 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9442 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9443 && operand_equal_p (TREE_OPERAND (arg0, 0),
9444 TREE_OPERAND (arg1, 0), 0)
9445 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9446 TYPE_UNSIGNED (rtype))
9447 /* Only create rotates in complete modes. Other cases are not
9448 expanded properly. */
9449 && (element_precision (rtype)
9450 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9452 tree tree01, tree11;
9453 tree orig_tree01, orig_tree11;
9454 enum tree_code code01, code11;
9456 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9457 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9458 STRIP_NOPS (tree01);
9459 STRIP_NOPS (tree11);
9460 code01 = TREE_CODE (tree01);
9461 code11 = TREE_CODE (tree11);
9462 if (code11 != MINUS_EXPR
9463 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9465 std::swap (code0, code1);
9466 std::swap (code01, code11);
9467 std::swap (tree01, tree11);
9468 std::swap (orig_tree01, orig_tree11);
9470 if (code01 == INTEGER_CST
9471 && code11 == INTEGER_CST
9472 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9473 == element_precision (rtype)))
9475 tem = build2_loc (loc, LROTATE_EXPR,
9476 rtype, TREE_OPERAND (arg0, 0),
9477 code0 == LSHIFT_EXPR
9478 ? orig_tree01 : orig_tree11);
9479 return fold_convert_loc (loc, type, tem);
9481 else if (code11 == MINUS_EXPR)
9483 tree tree110, tree111;
9484 tree110 = TREE_OPERAND (tree11, 0);
9485 tree111 = TREE_OPERAND (tree11, 1);
9486 STRIP_NOPS (tree110);
9487 STRIP_NOPS (tree111);
9488 if (TREE_CODE (tree110) == INTEGER_CST
9489 && 0 == compare_tree_int (tree110,
9490 element_precision (rtype))
9491 && operand_equal_p (tree01, tree111, 0))
9493 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9494 ? LROTATE_EXPR : RROTATE_EXPR),
9495 rtype, TREE_OPERAND (arg0, 0),
9496 orig_tree01);
9497 return fold_convert_loc (loc, type, tem);
9500 else if (code == BIT_IOR_EXPR
9501 && code11 == BIT_AND_EXPR
9502 && pow2p_hwi (element_precision (rtype)))
9504 tree tree110, tree111;
9505 tree110 = TREE_OPERAND (tree11, 0);
9506 tree111 = TREE_OPERAND (tree11, 1);
9507 STRIP_NOPS (tree110);
9508 STRIP_NOPS (tree111);
9509 if (TREE_CODE (tree110) == NEGATE_EXPR
9510 && TREE_CODE (tree111) == INTEGER_CST
9511 && 0 == compare_tree_int (tree111,
9512 element_precision (rtype) - 1)
9513 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9515 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9516 ? LROTATE_EXPR : RROTATE_EXPR),
9517 rtype, TREE_OPERAND (arg0, 0),
9518 orig_tree01);
9519 return fold_convert_loc (loc, type, tem);
9525 associate:
9526 /* In most languages, can't associate operations on floats through
9527 parentheses. Rather than remember where the parentheses were, we
9528 don't associate floats at all, unless the user has specified
9529 -fassociative-math.
9530 And, we need to make sure type is not saturating. */
9532 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9533 && !TYPE_SATURATING (type))
9535 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9536 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9537 tree atype = type;
9538 bool ok = true;
9540 /* Split both trees into variables, constants, and literals. Then
9541 associate each group together, the constants with literals,
9542 then the result with variables. This increases the chances of
9543 literals being recombined later and of generating relocatable
9544 expressions for the sum of a constant and literal. */
9545 var0 = split_tree (arg0, type, code,
9546 &minus_var0, &con0, &minus_con0,
9547 &lit0, &minus_lit0, 0);
9548 var1 = split_tree (arg1, type, code,
9549 &minus_var1, &con1, &minus_con1,
9550 &lit1, &minus_lit1, code == MINUS_EXPR);
9552 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9553 if (code == MINUS_EXPR)
9554 code = PLUS_EXPR;
9556 /* With undefined overflow prefer doing association in a type
9557 which wraps on overflow, if that is one of the operand types. */
9558 if (POINTER_TYPE_P (type)
9559 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9561 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9562 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9563 atype = TREE_TYPE (arg0);
9564 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9565 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9566 atype = TREE_TYPE (arg1);
9567 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9570 /* With undefined overflow we can only associate constants with one
9571 variable, and constants whose association doesn't overflow. */
9572 if (POINTER_TYPE_P (atype)
9573 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9575 if ((var0 && var1) || (minus_var0 && minus_var1))
9577 /* ??? If split_tree would handle NEGATE_EXPR we could
9578 simply reject these cases and the allowed cases would
9579 be the var0/minus_var1 ones. */
9580 tree tmp0 = var0 ? var0 : minus_var0;
9581 tree tmp1 = var1 ? var1 : minus_var1;
9582 bool one_neg = false;
9584 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9586 tmp0 = TREE_OPERAND (tmp0, 0);
9587 one_neg = !one_neg;
9589 if (CONVERT_EXPR_P (tmp0)
9590 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9591 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9592 <= TYPE_PRECISION (atype)))
9593 tmp0 = TREE_OPERAND (tmp0, 0);
9594 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9596 tmp1 = TREE_OPERAND (tmp1, 0);
9597 one_neg = !one_neg;
9599 if (CONVERT_EXPR_P (tmp1)
9600 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9601 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9602 <= TYPE_PRECISION (atype)))
9603 tmp1 = TREE_OPERAND (tmp1, 0);
9604 /* The only case we can still associate with two variables
9605 is if they cancel out. */
9606 if (!one_neg
9607 || !operand_equal_p (tmp0, tmp1, 0))
9608 ok = false;
9610 else if ((var0 && minus_var1
9611 && ! operand_equal_p (var0, minus_var1, 0))
9612 || (minus_var0 && var1
9613 && ! operand_equal_p (minus_var0, var1, 0)))
9614 ok = false;
9617 /* Only do something if we found more than two objects. Otherwise,
9618 nothing has changed and we risk infinite recursion. */
9619 if (ok
9620 && (2 < ((var0 != 0) + (var1 != 0)
9621 + (minus_var0 != 0) + (minus_var1 != 0)
9622 + (con0 != 0) + (con1 != 0)
9623 + (minus_con0 != 0) + (minus_con1 != 0)
9624 + (lit0 != 0) + (lit1 != 0)
9625 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9627 var0 = associate_trees (loc, var0, var1, code, atype);
9628 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9629 code, atype);
9630 con0 = associate_trees (loc, con0, con1, code, atype);
9631 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9632 code, atype);
9633 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9634 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9635 code, atype);
9637 if (minus_var0 && var0)
9639 var0 = associate_trees (loc, var0, minus_var0,
9640 MINUS_EXPR, atype);
9641 minus_var0 = 0;
9643 if (minus_con0 && con0)
9645 con0 = associate_trees (loc, con0, minus_con0,
9646 MINUS_EXPR, atype);
9647 minus_con0 = 0;
9650 /* Preserve the MINUS_EXPR if the negative part of the literal is
9651 greater than the positive part. Otherwise, the multiplicative
9652 folding code (i.e extract_muldiv) may be fooled in case
9653 unsigned constants are subtracted, like in the following
9654 example: ((X*2 + 4) - 8U)/2. */
9655 if (minus_lit0 && lit0)
9657 if (TREE_CODE (lit0) == INTEGER_CST
9658 && TREE_CODE (minus_lit0) == INTEGER_CST
9659 && tree_int_cst_lt (lit0, minus_lit0)
9660 /* But avoid ending up with only negated parts. */
9661 && (var0 || con0))
9663 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9664 MINUS_EXPR, atype);
9665 lit0 = 0;
9667 else
9669 lit0 = associate_trees (loc, lit0, minus_lit0,
9670 MINUS_EXPR, atype);
9671 minus_lit0 = 0;
9675 /* Don't introduce overflows through reassociation. */
9676 if ((lit0 && TREE_OVERFLOW_P (lit0))
9677 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9678 return NULL_TREE;
9680 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9681 con0 = associate_trees (loc, con0, lit0, code, atype);
9682 lit0 = 0;
9683 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9684 code, atype);
9685 minus_lit0 = 0;
9687 /* Eliminate minus_con0. */
9688 if (minus_con0)
9690 if (con0)
9691 con0 = associate_trees (loc, con0, minus_con0,
9692 MINUS_EXPR, atype);
9693 else if (var0)
9694 var0 = associate_trees (loc, var0, minus_con0,
9695 MINUS_EXPR, atype);
9696 else
9697 gcc_unreachable ();
9698 minus_con0 = 0;
9701 /* Eliminate minus_var0. */
9702 if (minus_var0)
9704 if (con0)
9705 con0 = associate_trees (loc, con0, minus_var0,
9706 MINUS_EXPR, atype);
9707 else
9708 gcc_unreachable ();
9709 minus_var0 = 0;
9712 return
9713 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9714 code, atype));
9718 return NULL_TREE;
9720 case POINTER_DIFF_EXPR:
9721 case MINUS_EXPR:
9722 /* Fold &a[i] - &a[j] to i-j. */
9723 if (TREE_CODE (arg0) == ADDR_EXPR
9724 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9725 && TREE_CODE (arg1) == ADDR_EXPR
9726 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9728 tree tem = fold_addr_of_array_ref_difference (loc, type,
9729 TREE_OPERAND (arg0, 0),
9730 TREE_OPERAND (arg1, 0),
9731 code
9732 == POINTER_DIFF_EXPR);
9733 if (tem)
9734 return tem;
9737 /* Further transformations are not for pointers. */
9738 if (code == POINTER_DIFF_EXPR)
9739 return NULL_TREE;
9741 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9742 if (TREE_CODE (arg0) == NEGATE_EXPR
9743 && negate_expr_p (op1))
9744 return fold_build2_loc (loc, MINUS_EXPR, type,
9745 negate_expr (op1),
9746 fold_convert_loc (loc, type,
9747 TREE_OPERAND (arg0, 0)));
9749 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9750 __complex__ ( x, -y ). This is not the same for SNaNs or if
9751 signed zeros are involved. */
9752 if (!HONOR_SNANS (element_mode (arg0))
9753 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9754 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9756 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9757 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9758 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9759 bool arg0rz = false, arg0iz = false;
9760 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9761 || (arg0i && (arg0iz = real_zerop (arg0i))))
9763 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9764 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9765 if (arg0rz && arg1i && real_zerop (arg1i))
9767 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9768 arg1r ? arg1r
9769 : build1 (REALPART_EXPR, rtype, arg1));
9770 tree ip = arg0i ? arg0i
9771 : build1 (IMAGPART_EXPR, rtype, arg0);
9772 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9774 else if (arg0iz && arg1r && real_zerop (arg1r))
9776 tree rp = arg0r ? arg0r
9777 : build1 (REALPART_EXPR, rtype, arg0);
9778 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9779 arg1i ? arg1i
9780 : build1 (IMAGPART_EXPR, rtype, arg1));
9781 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9786 /* A - B -> A + (-B) if B is easily negatable. */
9787 if (negate_expr_p (op1)
9788 && ! TYPE_OVERFLOW_SANITIZED (type)
9789 && ((FLOAT_TYPE_P (type)
9790 /* Avoid this transformation if B is a positive REAL_CST. */
9791 && (TREE_CODE (op1) != REAL_CST
9792 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9793 || INTEGRAL_TYPE_P (type)))
9794 return fold_build2_loc (loc, PLUS_EXPR, type,
9795 fold_convert_loc (loc, type, arg0),
9796 negate_expr (op1));
9798 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9799 one. Make sure the type is not saturating and has the signedness of
9800 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9801 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9802 if ((TREE_CODE (arg0) == MULT_EXPR
9803 || TREE_CODE (arg1) == MULT_EXPR)
9804 && !TYPE_SATURATING (type)
9805 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9806 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9807 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9809 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9810 if (tem)
9811 return tem;
9814 goto associate;
9816 case MULT_EXPR:
9817 if (! FLOAT_TYPE_P (type))
9819 /* Transform x * -C into -x * C if x is easily negatable. */
9820 if (TREE_CODE (op1) == INTEGER_CST
9821 && tree_int_cst_sgn (op1) == -1
9822 && negate_expr_p (op0)
9823 && negate_expr_p (op1)
9824 && (tem = negate_expr (op1)) != op1
9825 && ! TREE_OVERFLOW (tem))
9826 return fold_build2_loc (loc, MULT_EXPR, type,
9827 fold_convert_loc (loc, type,
9828 negate_expr (op0)), tem);
9830 strict_overflow_p = false;
9831 if (TREE_CODE (arg1) == INTEGER_CST
9832 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9833 &strict_overflow_p)))
9835 if (strict_overflow_p)
9836 fold_overflow_warning (("assuming signed overflow does not "
9837 "occur when simplifying "
9838 "multiplication"),
9839 WARN_STRICT_OVERFLOW_MISC);
9840 return fold_convert_loc (loc, type, tem);
9843 /* Optimize z * conj(z) for integer complex numbers. */
9844 if (TREE_CODE (arg0) == CONJ_EXPR
9845 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9846 return fold_mult_zconjz (loc, type, arg1);
9847 if (TREE_CODE (arg1) == CONJ_EXPR
9848 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9849 return fold_mult_zconjz (loc, type, arg0);
9851 else
9853 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9854 This is not the same for NaNs or if signed zeros are
9855 involved. */
9856 if (!HONOR_NANS (arg0)
9857 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9858 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9859 && TREE_CODE (arg1) == COMPLEX_CST
9860 && real_zerop (TREE_REALPART (arg1)))
9862 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9863 if (real_onep (TREE_IMAGPART (arg1)))
9864 return
9865 fold_build2_loc (loc, COMPLEX_EXPR, type,
9866 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9867 rtype, arg0)),
9868 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9869 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9870 return
9871 fold_build2_loc (loc, COMPLEX_EXPR, type,
9872 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9873 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9874 rtype, arg0)));
9877 /* Optimize z * conj(z) for floating point complex numbers.
9878 Guarded by flag_unsafe_math_optimizations as non-finite
9879 imaginary components don't produce scalar results. */
9880 if (flag_unsafe_math_optimizations
9881 && TREE_CODE (arg0) == CONJ_EXPR
9882 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9883 return fold_mult_zconjz (loc, type, arg1);
9884 if (flag_unsafe_math_optimizations
9885 && TREE_CODE (arg1) == CONJ_EXPR
9886 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9887 return fold_mult_zconjz (loc, type, arg0);
9889 goto associate;
9891 case BIT_IOR_EXPR:
9892 /* Canonicalize (X & C1) | C2. */
9893 if (TREE_CODE (arg0) == BIT_AND_EXPR
9894 && TREE_CODE (arg1) == INTEGER_CST
9895 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9897 int width = TYPE_PRECISION (type), w;
9898 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
9899 wide_int c2 = wi::to_wide (arg1);
9901 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9902 if ((c1 & c2) == c1)
9903 return omit_one_operand_loc (loc, type, arg1,
9904 TREE_OPERAND (arg0, 0));
9906 wide_int msk = wi::mask (width, false,
9907 TYPE_PRECISION (TREE_TYPE (arg1)));
9909 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9910 if (wi::bit_and_not (msk, c1 | c2) == 0)
9912 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9913 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9916 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9917 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9918 mode which allows further optimizations. */
9919 c1 &= msk;
9920 c2 &= msk;
9921 wide_int c3 = wi::bit_and_not (c1, c2);
9922 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9924 wide_int mask = wi::mask (w, false,
9925 TYPE_PRECISION (type));
9926 if (((c1 | c2) & mask) == mask
9927 && wi::bit_and_not (c1, mask) == 0)
9929 c3 = mask;
9930 break;
9934 if (c3 != c1)
9936 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9937 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9938 wide_int_to_tree (type, c3));
9939 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9943 /* See if this can be simplified into a rotate first. If that
9944 is unsuccessful continue in the association code. */
9945 goto bit_rotate;
9947 case BIT_XOR_EXPR:
9948 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9949 if (TREE_CODE (arg0) == BIT_AND_EXPR
9950 && INTEGRAL_TYPE_P (type)
9951 && integer_onep (TREE_OPERAND (arg0, 1))
9952 && integer_onep (arg1))
9953 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9954 build_zero_cst (TREE_TYPE (arg0)));
9956 /* See if this can be simplified into a rotate first. If that
9957 is unsuccessful continue in the association code. */
9958 goto bit_rotate;
9960 case BIT_AND_EXPR:
9961 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9962 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9963 && INTEGRAL_TYPE_P (type)
9964 && integer_onep (TREE_OPERAND (arg0, 1))
9965 && integer_onep (arg1))
9967 tree tem2;
9968 tem = TREE_OPERAND (arg0, 0);
9969 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9970 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9971 tem, tem2);
9972 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9973 build_zero_cst (TREE_TYPE (tem)));
9975 /* Fold ~X & 1 as (X & 1) == 0. */
9976 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9977 && INTEGRAL_TYPE_P (type)
9978 && integer_onep (arg1))
9980 tree tem2;
9981 tem = TREE_OPERAND (arg0, 0);
9982 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9983 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9984 tem, tem2);
9985 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9986 build_zero_cst (TREE_TYPE (tem)));
9988 /* Fold !X & 1 as X == 0. */
9989 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9990 && integer_onep (arg1))
9992 tem = TREE_OPERAND (arg0, 0);
9993 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9994 build_zero_cst (TREE_TYPE (tem)));
9997 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9998 multiple of 1 << CST. */
9999 if (TREE_CODE (arg1) == INTEGER_CST)
10001 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10002 wide_int ncst1 = -cst1;
10003 if ((cst1 & ncst1) == ncst1
10004 && multiple_of_p (type, arg0,
10005 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10006 return fold_convert_loc (loc, type, arg0);
10009 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10010 bits from CST2. */
10011 if (TREE_CODE (arg1) == INTEGER_CST
10012 && TREE_CODE (arg0) == MULT_EXPR
10013 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10015 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10016 wide_int masked
10017 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10019 if (masked == 0)
10020 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10021 arg0, arg1);
10022 else if (masked != warg1)
10024 /* Avoid the transform if arg1 is a mask of some
10025 mode which allows further optimizations. */
10026 int pop = wi::popcount (warg1);
10027 if (!(pop >= BITS_PER_UNIT
10028 && pow2p_hwi (pop)
10029 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10030 return fold_build2_loc (loc, code, type, op0,
10031 wide_int_to_tree (type, masked));
10035 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10036 ((A & N) + B) & M -> (A + B) & M
10037 Similarly if (N & M) == 0,
10038 ((A | N) + B) & M -> (A + B) & M
10039 and for - instead of + (or unary - instead of +)
10040 and/or ^ instead of |.
10041 If B is constant and (B & M) == 0, fold into A & M. */
10042 if (TREE_CODE (arg1) == INTEGER_CST)
10044 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10045 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10046 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10047 && (TREE_CODE (arg0) == PLUS_EXPR
10048 || TREE_CODE (arg0) == MINUS_EXPR
10049 || TREE_CODE (arg0) == NEGATE_EXPR)
10050 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10051 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10053 tree pmop[2];
10054 int which = 0;
10055 wide_int cst0;
10057 /* Now we know that arg0 is (C + D) or (C - D) or
10058 -C and arg1 (M) is == (1LL << cst) - 1.
10059 Store C into PMOP[0] and D into PMOP[1]. */
10060 pmop[0] = TREE_OPERAND (arg0, 0);
10061 pmop[1] = NULL;
10062 if (TREE_CODE (arg0) != NEGATE_EXPR)
10064 pmop[1] = TREE_OPERAND (arg0, 1);
10065 which = 1;
10068 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10069 which = -1;
10071 for (; which >= 0; which--)
10072 switch (TREE_CODE (pmop[which]))
10074 case BIT_AND_EXPR:
10075 case BIT_IOR_EXPR:
10076 case BIT_XOR_EXPR:
10077 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10078 != INTEGER_CST)
10079 break;
10080 cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10081 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10083 if (cst0 != cst1)
10084 break;
10086 else if (cst0 != 0)
10087 break;
10088 /* If C or D is of the form (A & N) where
10089 (N & M) == M, or of the form (A | N) or
10090 (A ^ N) where (N & M) == 0, replace it with A. */
10091 pmop[which] = TREE_OPERAND (pmop[which], 0);
10092 break;
10093 case INTEGER_CST:
10094 /* If C or D is a N where (N & M) == 0, it can be
10095 omitted (assumed 0). */
10096 if ((TREE_CODE (arg0) == PLUS_EXPR
10097 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10098 && (cst1 & wi::to_wide (pmop[which])) == 0)
10099 pmop[which] = NULL;
10100 break;
10101 default:
10102 break;
10105 /* Only build anything new if we optimized one or both arguments
10106 above. */
10107 if (pmop[0] != TREE_OPERAND (arg0, 0)
10108 || (TREE_CODE (arg0) != NEGATE_EXPR
10109 && pmop[1] != TREE_OPERAND (arg0, 1)))
10111 tree utype = TREE_TYPE (arg0);
10112 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10114 /* Perform the operations in a type that has defined
10115 overflow behavior. */
10116 utype = unsigned_type_for (TREE_TYPE (arg0));
10117 if (pmop[0] != NULL)
10118 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10119 if (pmop[1] != NULL)
10120 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10123 if (TREE_CODE (arg0) == NEGATE_EXPR)
10124 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10125 else if (TREE_CODE (arg0) == PLUS_EXPR)
10127 if (pmop[0] != NULL && pmop[1] != NULL)
10128 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10129 pmop[0], pmop[1]);
10130 else if (pmop[0] != NULL)
10131 tem = pmop[0];
10132 else if (pmop[1] != NULL)
10133 tem = pmop[1];
10134 else
10135 return build_int_cst (type, 0);
10137 else if (pmop[0] == NULL)
10138 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10139 else
10140 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10141 pmop[0], pmop[1]);
10142 /* TEM is now the new binary +, - or unary - replacement. */
10143 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10144 fold_convert_loc (loc, utype, arg1));
10145 return fold_convert_loc (loc, type, tem);
10150 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10151 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10152 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10154 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10156 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10157 if (mask == -1)
10158 return
10159 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10162 goto associate;
10164 case RDIV_EXPR:
10165 /* Don't touch a floating-point divide by zero unless the mode
10166 of the constant can represent infinity. */
10167 if (TREE_CODE (arg1) == REAL_CST
10168 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10169 && real_zerop (arg1))
10170 return NULL_TREE;
10172 /* (-A) / (-B) -> A / B */
10173 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10174 return fold_build2_loc (loc, RDIV_EXPR, type,
10175 TREE_OPERAND (arg0, 0),
10176 negate_expr (arg1));
10177 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10178 return fold_build2_loc (loc, RDIV_EXPR, type,
10179 negate_expr (arg0),
10180 TREE_OPERAND (arg1, 0));
10181 return NULL_TREE;
10183 case TRUNC_DIV_EXPR:
10184 /* Fall through */
10186 case FLOOR_DIV_EXPR:
10187 /* Simplify A / (B << N) where A and B are positive and B is
10188 a power of 2, to A >> (N + log2(B)). */
10189 strict_overflow_p = false;
10190 if (TREE_CODE (arg1) == LSHIFT_EXPR
10191 && (TYPE_UNSIGNED (type)
10192 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10194 tree sval = TREE_OPERAND (arg1, 0);
10195 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10197 tree sh_cnt = TREE_OPERAND (arg1, 1);
10198 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10199 wi::exact_log2 (wi::to_wide (sval)));
10201 if (strict_overflow_p)
10202 fold_overflow_warning (("assuming signed overflow does not "
10203 "occur when simplifying A / (B << N)"),
10204 WARN_STRICT_OVERFLOW_MISC);
10206 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10207 sh_cnt, pow2);
10208 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10209 fold_convert_loc (loc, type, arg0), sh_cnt);
10213 /* Fall through */
10215 case ROUND_DIV_EXPR:
10216 case CEIL_DIV_EXPR:
10217 case EXACT_DIV_EXPR:
10218 if (integer_zerop (arg1))
10219 return NULL_TREE;
10221 /* Convert -A / -B to A / B when the type is signed and overflow is
10222 undefined. */
10223 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10224 && TREE_CODE (op0) == NEGATE_EXPR
10225 && negate_expr_p (op1))
10227 if (INTEGRAL_TYPE_P (type))
10228 fold_overflow_warning (("assuming signed overflow does not occur "
10229 "when distributing negation across "
10230 "division"),
10231 WARN_STRICT_OVERFLOW_MISC);
10232 return fold_build2_loc (loc, code, type,
10233 fold_convert_loc (loc, type,
10234 TREE_OPERAND (arg0, 0)),
10235 negate_expr (op1));
10237 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10238 && TREE_CODE (arg1) == NEGATE_EXPR
10239 && negate_expr_p (op0))
10241 if (INTEGRAL_TYPE_P (type))
10242 fold_overflow_warning (("assuming signed overflow does not occur "
10243 "when distributing negation across "
10244 "division"),
10245 WARN_STRICT_OVERFLOW_MISC);
10246 return fold_build2_loc (loc, code, type,
10247 negate_expr (op0),
10248 fold_convert_loc (loc, type,
10249 TREE_OPERAND (arg1, 0)));
10252 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10253 operation, EXACT_DIV_EXPR.
10255 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10256 At one time others generated faster code, it's not clear if they do
10257 after the last round to changes to the DIV code in expmed.c. */
10258 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10259 && multiple_of_p (type, arg0, arg1))
10260 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10261 fold_convert (type, arg0),
10262 fold_convert (type, arg1));
10264 strict_overflow_p = false;
10265 if (TREE_CODE (arg1) == INTEGER_CST
10266 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10267 &strict_overflow_p)))
10269 if (strict_overflow_p)
10270 fold_overflow_warning (("assuming signed overflow does not occur "
10271 "when simplifying division"),
10272 WARN_STRICT_OVERFLOW_MISC);
10273 return fold_convert_loc (loc, type, tem);
10276 return NULL_TREE;
10278 case CEIL_MOD_EXPR:
10279 case FLOOR_MOD_EXPR:
10280 case ROUND_MOD_EXPR:
10281 case TRUNC_MOD_EXPR:
10282 strict_overflow_p = false;
10283 if (TREE_CODE (arg1) == INTEGER_CST
10284 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10285 &strict_overflow_p)))
10287 if (strict_overflow_p)
10288 fold_overflow_warning (("assuming signed overflow does not occur "
10289 "when simplifying modulus"),
10290 WARN_STRICT_OVERFLOW_MISC);
10291 return fold_convert_loc (loc, type, tem);
10294 return NULL_TREE;
10296 case LROTATE_EXPR:
10297 case RROTATE_EXPR:
10298 case RSHIFT_EXPR:
10299 case LSHIFT_EXPR:
10300 /* Since negative shift count is not well-defined,
10301 don't try to compute it in the compiler. */
10302 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10303 return NULL_TREE;
10305 prec = element_precision (type);
10307 /* If we have a rotate of a bit operation with the rotate count and
10308 the second operand of the bit operation both constant,
10309 permute the two operations. */
10310 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10311 && (TREE_CODE (arg0) == BIT_AND_EXPR
10312 || TREE_CODE (arg0) == BIT_IOR_EXPR
10313 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10314 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10316 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10317 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10318 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10319 fold_build2_loc (loc, code, type,
10320 arg00, arg1),
10321 fold_build2_loc (loc, code, type,
10322 arg01, arg1));
10325 /* Two consecutive rotates adding up to the some integer
10326 multiple of the precision of the type can be ignored. */
10327 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10328 && TREE_CODE (arg0) == RROTATE_EXPR
10329 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10330 && wi::umod_trunc (wi::to_wide (arg1)
10331 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10332 prec) == 0)
10333 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10335 return NULL_TREE;
10337 case MIN_EXPR:
10338 case MAX_EXPR:
10339 goto associate;
10341 case TRUTH_ANDIF_EXPR:
10342 /* Note that the operands of this must be ints
10343 and their values must be 0 or 1.
10344 ("true" is a fixed value perhaps depending on the language.) */
10345 /* If first arg is constant zero, return it. */
10346 if (integer_zerop (arg0))
10347 return fold_convert_loc (loc, type, arg0);
10348 /* FALLTHRU */
10349 case TRUTH_AND_EXPR:
10350 /* If either arg is constant true, drop it. */
10351 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10352 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10353 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10354 /* Preserve sequence points. */
10355 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10356 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10357 /* If second arg is constant zero, result is zero, but first arg
10358 must be evaluated. */
10359 if (integer_zerop (arg1))
10360 return omit_one_operand_loc (loc, type, arg1, arg0);
10361 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10362 case will be handled here. */
10363 if (integer_zerop (arg0))
10364 return omit_one_operand_loc (loc, type, arg0, arg1);
10366 /* !X && X is always false. */
10367 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10368 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10369 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10370 /* X && !X is always false. */
10371 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10372 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10373 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10375 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10376 means A >= Y && A != MAX, but in this case we know that
10377 A < X <= MAX. */
10379 if (!TREE_SIDE_EFFECTS (arg0)
10380 && !TREE_SIDE_EFFECTS (arg1))
10382 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10383 if (tem && !operand_equal_p (tem, arg0, 0))
10384 return fold_build2_loc (loc, code, type, tem, arg1);
10386 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10387 if (tem && !operand_equal_p (tem, arg1, 0))
10388 return fold_build2_loc (loc, code, type, arg0, tem);
10391 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10392 != NULL_TREE)
10393 return tem;
10395 return NULL_TREE;
10397 case TRUTH_ORIF_EXPR:
10398 /* Note that the operands of this must be ints
10399 and their values must be 0 or true.
10400 ("true" is a fixed value perhaps depending on the language.) */
10401 /* If first arg is constant true, return it. */
10402 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10403 return fold_convert_loc (loc, type, arg0);
10404 /* FALLTHRU */
10405 case TRUTH_OR_EXPR:
10406 /* If either arg is constant zero, drop it. */
10407 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10408 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10409 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10410 /* Preserve sequence points. */
10411 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10412 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10413 /* If second arg is constant true, result is true, but we must
10414 evaluate first arg. */
10415 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10416 return omit_one_operand_loc (loc, type, arg1, arg0);
10417 /* Likewise for first arg, but note this only occurs here for
10418 TRUTH_OR_EXPR. */
10419 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10420 return omit_one_operand_loc (loc, type, arg0, arg1);
10422 /* !X || X is always true. */
10423 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10424 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10425 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10426 /* X || !X is always true. */
10427 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10428 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10429 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10431 /* (X && !Y) || (!X && Y) is X ^ Y */
10432 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10433 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10435 tree a0, a1, l0, l1, n0, n1;
10437 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10438 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10440 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10441 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10443 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10444 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10446 if ((operand_equal_p (n0, a0, 0)
10447 && operand_equal_p (n1, a1, 0))
10448 || (operand_equal_p (n0, a1, 0)
10449 && operand_equal_p (n1, a0, 0)))
10450 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10453 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10454 != NULL_TREE)
10455 return tem;
10457 return NULL_TREE;
10459 case TRUTH_XOR_EXPR:
10460 /* If the second arg is constant zero, drop it. */
10461 if (integer_zerop (arg1))
10462 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10463 /* If the second arg is constant true, this is a logical inversion. */
10464 if (integer_onep (arg1))
10466 tem = invert_truthvalue_loc (loc, arg0);
10467 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10469 /* Identical arguments cancel to zero. */
10470 if (operand_equal_p (arg0, arg1, 0))
10471 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10473 /* !X ^ X is always true. */
10474 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10475 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10476 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10478 /* X ^ !X is always true. */
10479 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10480 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10481 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10483 return NULL_TREE;
10485 case EQ_EXPR:
10486 case NE_EXPR:
10487 STRIP_NOPS (arg0);
10488 STRIP_NOPS (arg1);
10490 tem = fold_comparison (loc, code, type, op0, op1);
10491 if (tem != NULL_TREE)
10492 return tem;
10494 /* bool_var != 1 becomes !bool_var. */
10495 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10496 && code == NE_EXPR)
10497 return fold_convert_loc (loc, type,
10498 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10499 TREE_TYPE (arg0), arg0));
10501 /* bool_var == 0 becomes !bool_var. */
10502 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10503 && code == EQ_EXPR)
10504 return fold_convert_loc (loc, type,
10505 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10506 TREE_TYPE (arg0), arg0));
10508 /* !exp != 0 becomes !exp */
10509 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10510 && code == NE_EXPR)
10511 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10513 /* If this is an EQ or NE comparison with zero and ARG0 is
10514 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10515 two operations, but the latter can be done in one less insn
10516 on machines that have only two-operand insns or on which a
10517 constant cannot be the first operand. */
10518 if (TREE_CODE (arg0) == BIT_AND_EXPR
10519 && integer_zerop (arg1))
10521 tree arg00 = TREE_OPERAND (arg0, 0);
10522 tree arg01 = TREE_OPERAND (arg0, 1);
10523 if (TREE_CODE (arg00) == LSHIFT_EXPR
10524 && integer_onep (TREE_OPERAND (arg00, 0)))
10526 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10527 arg01, TREE_OPERAND (arg00, 1));
10528 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10529 build_int_cst (TREE_TYPE (arg0), 1));
10530 return fold_build2_loc (loc, code, type,
10531 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10532 arg1);
10534 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10535 && integer_onep (TREE_OPERAND (arg01, 0)))
10537 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10538 arg00, TREE_OPERAND (arg01, 1));
10539 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10540 build_int_cst (TREE_TYPE (arg0), 1));
10541 return fold_build2_loc (loc, code, type,
10542 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10543 arg1);
10547 /* If this is an NE or EQ comparison of zero against the result of a
10548 signed MOD operation whose second operand is a power of 2, make
10549 the MOD operation unsigned since it is simpler and equivalent. */
10550 if (integer_zerop (arg1)
10551 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10552 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10553 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10554 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10555 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10556 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10558 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10559 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10560 fold_convert_loc (loc, newtype,
10561 TREE_OPERAND (arg0, 0)),
10562 fold_convert_loc (loc, newtype,
10563 TREE_OPERAND (arg0, 1)));
10565 return fold_build2_loc (loc, code, type, newmod,
10566 fold_convert_loc (loc, newtype, arg1));
10569 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10570 C1 is a valid shift constant, and C2 is a power of two, i.e.
10571 a single bit. */
10572 if (TREE_CODE (arg0) == BIT_AND_EXPR
10573 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10574 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10575 == INTEGER_CST
10576 && integer_pow2p (TREE_OPERAND (arg0, 1))
10577 && integer_zerop (arg1))
10579 tree itype = TREE_TYPE (arg0);
10580 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10581 prec = TYPE_PRECISION (itype);
10583 /* Check for a valid shift count. */
10584 if (wi::ltu_p (wi::to_wide (arg001), prec))
10586 tree arg01 = TREE_OPERAND (arg0, 1);
10587 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10588 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10589 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10590 can be rewritten as (X & (C2 << C1)) != 0. */
10591 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10593 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10594 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10595 return fold_build2_loc (loc, code, type, tem,
10596 fold_convert_loc (loc, itype, arg1));
10598 /* Otherwise, for signed (arithmetic) shifts,
10599 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10600 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10601 else if (!TYPE_UNSIGNED (itype))
10602 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10603 arg000, build_int_cst (itype, 0));
10604 /* Otherwise, of unsigned (logical) shifts,
10605 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10606 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10607 else
10608 return omit_one_operand_loc (loc, type,
10609 code == EQ_EXPR ? integer_one_node
10610 : integer_zero_node,
10611 arg000);
10615 /* If this is a comparison of a field, we may be able to simplify it. */
10616 if ((TREE_CODE (arg0) == COMPONENT_REF
10617 || TREE_CODE (arg0) == BIT_FIELD_REF)
10618 /* Handle the constant case even without -O
10619 to make sure the warnings are given. */
10620 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10622 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10623 if (t1)
10624 return t1;
10627 /* Optimize comparisons of strlen vs zero to a compare of the
10628 first character of the string vs zero. To wit,
10629 strlen(ptr) == 0 => *ptr == 0
10630 strlen(ptr) != 0 => *ptr != 0
10631 Other cases should reduce to one of these two (or a constant)
10632 due to the return value of strlen being unsigned. */
10633 if (TREE_CODE (arg0) == CALL_EXPR
10634 && integer_zerop (arg1))
10636 tree fndecl = get_callee_fndecl (arg0);
10638 if (fndecl
10639 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10640 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10641 && call_expr_nargs (arg0) == 1
10642 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10644 tree iref = build_fold_indirect_ref_loc (loc,
10645 CALL_EXPR_ARG (arg0, 0));
10646 return fold_build2_loc (loc, code, type, iref,
10647 build_int_cst (TREE_TYPE (iref), 0));
10651 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10652 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10653 if (TREE_CODE (arg0) == RSHIFT_EXPR
10654 && integer_zerop (arg1)
10655 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10657 tree arg00 = TREE_OPERAND (arg0, 0);
10658 tree arg01 = TREE_OPERAND (arg0, 1);
10659 tree itype = TREE_TYPE (arg00);
10660 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10662 if (TYPE_UNSIGNED (itype))
10664 itype = signed_type_for (itype);
10665 arg00 = fold_convert_loc (loc, itype, arg00);
10667 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10668 type, arg00, build_zero_cst (itype));
10672 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10673 (X & C) == 0 when C is a single bit. */
10674 if (TREE_CODE (arg0) == BIT_AND_EXPR
10675 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10676 && integer_zerop (arg1)
10677 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10679 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10680 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10681 TREE_OPERAND (arg0, 1));
10682 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10683 type, tem,
10684 fold_convert_loc (loc, TREE_TYPE (arg0),
10685 arg1));
10688 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10689 constant C is a power of two, i.e. a single bit. */
10690 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10691 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10692 && integer_zerop (arg1)
10693 && integer_pow2p (TREE_OPERAND (arg0, 1))
10694 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10695 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10697 tree arg00 = TREE_OPERAND (arg0, 0);
10698 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10699 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10702 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10703 when is C is a power of two, i.e. a single bit. */
10704 if (TREE_CODE (arg0) == BIT_AND_EXPR
10705 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10706 && integer_zerop (arg1)
10707 && integer_pow2p (TREE_OPERAND (arg0, 1))
10708 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10709 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10711 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10712 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10713 arg000, TREE_OPERAND (arg0, 1));
10714 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10715 tem, build_int_cst (TREE_TYPE (tem), 0));
10718 if (integer_zerop (arg1)
10719 && tree_expr_nonzero_p (arg0))
10721 tree res = constant_boolean_node (code==NE_EXPR, type);
10722 return omit_one_operand_loc (loc, type, res, arg0);
10725 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10726 if (TREE_CODE (arg0) == BIT_AND_EXPR
10727 && TREE_CODE (arg1) == BIT_AND_EXPR)
10729 tree arg00 = TREE_OPERAND (arg0, 0);
10730 tree arg01 = TREE_OPERAND (arg0, 1);
10731 tree arg10 = TREE_OPERAND (arg1, 0);
10732 tree arg11 = TREE_OPERAND (arg1, 1);
10733 tree itype = TREE_TYPE (arg0);
10735 if (operand_equal_p (arg01, arg11, 0))
10737 tem = fold_convert_loc (loc, itype, arg10);
10738 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10739 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10740 return fold_build2_loc (loc, code, type, tem,
10741 build_zero_cst (itype));
10743 if (operand_equal_p (arg01, arg10, 0))
10745 tem = fold_convert_loc (loc, itype, arg11);
10746 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10747 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10748 return fold_build2_loc (loc, code, type, tem,
10749 build_zero_cst (itype));
10751 if (operand_equal_p (arg00, arg11, 0))
10753 tem = fold_convert_loc (loc, itype, arg10);
10754 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10755 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10756 return fold_build2_loc (loc, code, type, tem,
10757 build_zero_cst (itype));
10759 if (operand_equal_p (arg00, arg10, 0))
10761 tem = fold_convert_loc (loc, itype, arg11);
10762 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10763 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10764 return fold_build2_loc (loc, code, type, tem,
10765 build_zero_cst (itype));
10769 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10770 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10772 tree arg00 = TREE_OPERAND (arg0, 0);
10773 tree arg01 = TREE_OPERAND (arg0, 1);
10774 tree arg10 = TREE_OPERAND (arg1, 0);
10775 tree arg11 = TREE_OPERAND (arg1, 1);
10776 tree itype = TREE_TYPE (arg0);
10778 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10779 operand_equal_p guarantees no side-effects so we don't need
10780 to use omit_one_operand on Z. */
10781 if (operand_equal_p (arg01, arg11, 0))
10782 return fold_build2_loc (loc, code, type, arg00,
10783 fold_convert_loc (loc, TREE_TYPE (arg00),
10784 arg10));
10785 if (operand_equal_p (arg01, arg10, 0))
10786 return fold_build2_loc (loc, code, type, arg00,
10787 fold_convert_loc (loc, TREE_TYPE (arg00),
10788 arg11));
10789 if (operand_equal_p (arg00, arg11, 0))
10790 return fold_build2_loc (loc, code, type, arg01,
10791 fold_convert_loc (loc, TREE_TYPE (arg01),
10792 arg10));
10793 if (operand_equal_p (arg00, arg10, 0))
10794 return fold_build2_loc (loc, code, type, arg01,
10795 fold_convert_loc (loc, TREE_TYPE (arg01),
10796 arg11));
10798 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10799 if (TREE_CODE (arg01) == INTEGER_CST
10800 && TREE_CODE (arg11) == INTEGER_CST)
10802 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10803 fold_convert_loc (loc, itype, arg11));
10804 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10805 return fold_build2_loc (loc, code, type, tem,
10806 fold_convert_loc (loc, itype, arg10));
10810 /* Attempt to simplify equality/inequality comparisons of complex
10811 values. Only lower the comparison if the result is known or
10812 can be simplified to a single scalar comparison. */
10813 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10814 || TREE_CODE (arg0) == COMPLEX_CST)
10815 && (TREE_CODE (arg1) == COMPLEX_EXPR
10816 || TREE_CODE (arg1) == COMPLEX_CST))
10818 tree real0, imag0, real1, imag1;
10819 tree rcond, icond;
10821 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10823 real0 = TREE_OPERAND (arg0, 0);
10824 imag0 = TREE_OPERAND (arg0, 1);
10826 else
10828 real0 = TREE_REALPART (arg0);
10829 imag0 = TREE_IMAGPART (arg0);
10832 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10834 real1 = TREE_OPERAND (arg1, 0);
10835 imag1 = TREE_OPERAND (arg1, 1);
10837 else
10839 real1 = TREE_REALPART (arg1);
10840 imag1 = TREE_IMAGPART (arg1);
10843 rcond = fold_binary_loc (loc, code, type, real0, real1);
10844 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10846 if (integer_zerop (rcond))
10848 if (code == EQ_EXPR)
10849 return omit_two_operands_loc (loc, type, boolean_false_node,
10850 imag0, imag1);
10851 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10853 else
10855 if (code == NE_EXPR)
10856 return omit_two_operands_loc (loc, type, boolean_true_node,
10857 imag0, imag1);
10858 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10862 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10863 if (icond && TREE_CODE (icond) == INTEGER_CST)
10865 if (integer_zerop (icond))
10867 if (code == EQ_EXPR)
10868 return omit_two_operands_loc (loc, type, boolean_false_node,
10869 real0, real1);
10870 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10872 else
10874 if (code == NE_EXPR)
10875 return omit_two_operands_loc (loc, type, boolean_true_node,
10876 real0, real1);
10877 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10882 return NULL_TREE;
10884 case LT_EXPR:
10885 case GT_EXPR:
10886 case LE_EXPR:
10887 case GE_EXPR:
10888 tem = fold_comparison (loc, code, type, op0, op1);
10889 if (tem != NULL_TREE)
10890 return tem;
10892 /* Transform comparisons of the form X +- C CMP X. */
10893 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10894 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10895 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10896 && !HONOR_SNANS (arg0))
10898 tree arg01 = TREE_OPERAND (arg0, 1);
10899 enum tree_code code0 = TREE_CODE (arg0);
10900 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10902 /* (X - c) > X becomes false. */
10903 if (code == GT_EXPR
10904 && ((code0 == MINUS_EXPR && is_positive >= 0)
10905 || (code0 == PLUS_EXPR && is_positive <= 0)))
10906 return constant_boolean_node (0, type);
10908 /* Likewise (X + c) < X becomes false. */
10909 if (code == LT_EXPR
10910 && ((code0 == PLUS_EXPR && is_positive >= 0)
10911 || (code0 == MINUS_EXPR && is_positive <= 0)))
10912 return constant_boolean_node (0, type);
10914 /* Convert (X - c) <= X to true. */
10915 if (!HONOR_NANS (arg1)
10916 && code == LE_EXPR
10917 && ((code0 == MINUS_EXPR && is_positive >= 0)
10918 || (code0 == PLUS_EXPR && is_positive <= 0)))
10919 return constant_boolean_node (1, type);
10921 /* Convert (X + c) >= X to true. */
10922 if (!HONOR_NANS (arg1)
10923 && code == GE_EXPR
10924 && ((code0 == PLUS_EXPR && is_positive >= 0)
10925 || (code0 == MINUS_EXPR && is_positive <= 0)))
10926 return constant_boolean_node (1, type);
10929 /* If we are comparing an ABS_EXPR with a constant, we can
10930 convert all the cases into explicit comparisons, but they may
10931 well not be faster than doing the ABS and one comparison.
10932 But ABS (X) <= C is a range comparison, which becomes a subtraction
10933 and a comparison, and is probably faster. */
10934 if (code == LE_EXPR
10935 && TREE_CODE (arg1) == INTEGER_CST
10936 && TREE_CODE (arg0) == ABS_EXPR
10937 && ! TREE_SIDE_EFFECTS (arg0)
10938 && (0 != (tem = negate_expr (arg1)))
10939 && TREE_CODE (tem) == INTEGER_CST
10940 && !TREE_OVERFLOW (tem))
10941 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
10942 build2 (GE_EXPR, type,
10943 TREE_OPERAND (arg0, 0), tem),
10944 build2 (LE_EXPR, type,
10945 TREE_OPERAND (arg0, 0), arg1));
10947 /* Convert ABS_EXPR<x> >= 0 to true. */
10948 strict_overflow_p = false;
10949 if (code == GE_EXPR
10950 && (integer_zerop (arg1)
10951 || (! HONOR_NANS (arg0)
10952 && real_zerop (arg1)))
10953 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10955 if (strict_overflow_p)
10956 fold_overflow_warning (("assuming signed overflow does not occur "
10957 "when simplifying comparison of "
10958 "absolute value and zero"),
10959 WARN_STRICT_OVERFLOW_CONDITIONAL);
10960 return omit_one_operand_loc (loc, type,
10961 constant_boolean_node (true, type),
10962 arg0);
10965 /* Convert ABS_EXPR<x> < 0 to false. */
10966 strict_overflow_p = false;
10967 if (code == LT_EXPR
10968 && (integer_zerop (arg1) || real_zerop (arg1))
10969 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10971 if (strict_overflow_p)
10972 fold_overflow_warning (("assuming signed overflow does not occur "
10973 "when simplifying comparison of "
10974 "absolute value and zero"),
10975 WARN_STRICT_OVERFLOW_CONDITIONAL);
10976 return omit_one_operand_loc (loc, type,
10977 constant_boolean_node (false, type),
10978 arg0);
10981 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10982 and similarly for >= into !=. */
10983 if ((code == LT_EXPR || code == GE_EXPR)
10984 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10985 && TREE_CODE (arg1) == LSHIFT_EXPR
10986 && integer_onep (TREE_OPERAND (arg1, 0)))
10987 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10988 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10989 TREE_OPERAND (arg1, 1)),
10990 build_zero_cst (TREE_TYPE (arg0)));
10992 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
10993 otherwise Y might be >= # of bits in X's type and thus e.g.
10994 (unsigned char) (1 << Y) for Y 15 might be 0.
10995 If the cast is widening, then 1 << Y should have unsigned type,
10996 otherwise if Y is number of bits in the signed shift type minus 1,
10997 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
10998 31 might be 0xffffffff80000000. */
10999 if ((code == LT_EXPR || code == GE_EXPR)
11000 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11001 && CONVERT_EXPR_P (arg1)
11002 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11003 && (element_precision (TREE_TYPE (arg1))
11004 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11005 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11006 || (element_precision (TREE_TYPE (arg1))
11007 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11008 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11010 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11011 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11012 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11013 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11014 build_zero_cst (TREE_TYPE (arg0)));
11017 return NULL_TREE;
11019 case UNORDERED_EXPR:
11020 case ORDERED_EXPR:
11021 case UNLT_EXPR:
11022 case UNLE_EXPR:
11023 case UNGT_EXPR:
11024 case UNGE_EXPR:
11025 case UNEQ_EXPR:
11026 case LTGT_EXPR:
11027 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11029 tree targ0 = strip_float_extensions (arg0);
11030 tree targ1 = strip_float_extensions (arg1);
11031 tree newtype = TREE_TYPE (targ0);
11033 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11034 newtype = TREE_TYPE (targ1);
11036 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11037 return fold_build2_loc (loc, code, type,
11038 fold_convert_loc (loc, newtype, targ0),
11039 fold_convert_loc (loc, newtype, targ1));
11042 return NULL_TREE;
11044 case COMPOUND_EXPR:
11045 /* When pedantic, a compound expression can be neither an lvalue
11046 nor an integer constant expression. */
11047 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11048 return NULL_TREE;
11049 /* Don't let (0, 0) be null pointer constant. */
11050 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11051 : fold_convert_loc (loc, type, arg1);
11052 return pedantic_non_lvalue_loc (loc, tem);
11054 case ASSERT_EXPR:
11055 /* An ASSERT_EXPR should never be passed to fold_binary. */
11056 gcc_unreachable ();
11058 default:
11059 return NULL_TREE;
11060 } /* switch (code) */
11063 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11064 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11065 of GOTO_EXPR. */
11067 static tree
11068 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11070 switch (TREE_CODE (*tp))
11072 case LABEL_EXPR:
11073 return *tp;
11075 case GOTO_EXPR:
11076 *walk_subtrees = 0;
11078 /* fall through */
11080 default:
11081 return NULL_TREE;
11085 /* Return whether the sub-tree ST contains a label which is accessible from
11086 outside the sub-tree. */
11088 static bool
11089 contains_label_p (tree st)
11091 return
11092 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11095 /* Fold a ternary expression of code CODE and type TYPE with operands
11096 OP0, OP1, and OP2. Return the folded expression if folding is
11097 successful. Otherwise, return NULL_TREE. */
11099 tree
11100 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11101 tree op0, tree op1, tree op2)
11103 tree tem;
11104 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11105 enum tree_code_class kind = TREE_CODE_CLASS (code);
11107 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11108 && TREE_CODE_LENGTH (code) == 3);
11110 /* If this is a commutative operation, and OP0 is a constant, move it
11111 to OP1 to reduce the number of tests below. */
11112 if (commutative_ternary_tree_code (code)
11113 && tree_swap_operands_p (op0, op1))
11114 return fold_build3_loc (loc, code, type, op1, op0, op2);
11116 tem = generic_simplify (loc, code, type, op0, op1, op2);
11117 if (tem)
11118 return tem;
11120 /* Strip any conversions that don't change the mode. This is safe
11121 for every expression, except for a comparison expression because
11122 its signedness is derived from its operands. So, in the latter
11123 case, only strip conversions that don't change the signedness.
11125 Note that this is done as an internal manipulation within the
11126 constant folder, in order to find the simplest representation of
11127 the arguments so that their form can be studied. In any cases,
11128 the appropriate type conversions should be put back in the tree
11129 that will get out of the constant folder. */
11130 if (op0)
11132 arg0 = op0;
11133 STRIP_NOPS (arg0);
11136 if (op1)
11138 arg1 = op1;
11139 STRIP_NOPS (arg1);
11142 if (op2)
11144 arg2 = op2;
11145 STRIP_NOPS (arg2);
11148 switch (code)
11150 case COMPONENT_REF:
11151 if (TREE_CODE (arg0) == CONSTRUCTOR
11152 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11154 unsigned HOST_WIDE_INT idx;
11155 tree field, value;
11156 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11157 if (field == arg1)
11158 return value;
11160 return NULL_TREE;
11162 case COND_EXPR:
11163 case VEC_COND_EXPR:
11164 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11165 so all simple results must be passed through pedantic_non_lvalue. */
11166 if (TREE_CODE (arg0) == INTEGER_CST)
11168 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11169 tem = integer_zerop (arg0) ? op2 : op1;
11170 /* Only optimize constant conditions when the selected branch
11171 has the same type as the COND_EXPR. This avoids optimizing
11172 away "c ? x : throw", where the throw has a void type.
11173 Avoid throwing away that operand which contains label. */
11174 if ((!TREE_SIDE_EFFECTS (unused_op)
11175 || !contains_label_p (unused_op))
11176 && (! VOID_TYPE_P (TREE_TYPE (tem))
11177 || VOID_TYPE_P (type)))
11178 return pedantic_non_lvalue_loc (loc, tem);
11179 return NULL_TREE;
11181 else if (TREE_CODE (arg0) == VECTOR_CST)
11183 if ((TREE_CODE (arg1) == VECTOR_CST
11184 || TREE_CODE (arg1) == CONSTRUCTOR)
11185 && (TREE_CODE (arg2) == VECTOR_CST
11186 || TREE_CODE (arg2) == CONSTRUCTOR))
11188 unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
11189 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11190 auto_vec_perm_indices sel (nelts);
11191 for (i = 0; i < nelts; i++)
11193 tree val = VECTOR_CST_ELT (arg0, i);
11194 if (integer_all_onesp (val))
11195 sel.quick_push (i);
11196 else if (integer_zerop (val))
11197 sel.quick_push (nelts + i);
11198 else /* Currently unreachable. */
11199 return NULL_TREE;
11201 tree t = fold_vec_perm (type, arg1, arg2, sel);
11202 if (t != NULL_TREE)
11203 return t;
11207 /* If we have A op B ? A : C, we may be able to convert this to a
11208 simpler expression, depending on the operation and the values
11209 of B and C. Signed zeros prevent all of these transformations,
11210 for reasons given above each one.
11212 Also try swapping the arguments and inverting the conditional. */
11213 if (COMPARISON_CLASS_P (arg0)
11214 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11215 && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11217 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11218 if (tem)
11219 return tem;
11222 if (COMPARISON_CLASS_P (arg0)
11223 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11224 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11226 location_t loc0 = expr_location_or (arg0, loc);
11227 tem = fold_invert_truthvalue (loc0, arg0);
11228 if (tem && COMPARISON_CLASS_P (tem))
11230 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11231 if (tem)
11232 return tem;
11236 /* If the second operand is simpler than the third, swap them
11237 since that produces better jump optimization results. */
11238 if (truth_value_p (TREE_CODE (arg0))
11239 && tree_swap_operands_p (op1, op2))
11241 location_t loc0 = expr_location_or (arg0, loc);
11242 /* See if this can be inverted. If it can't, possibly because
11243 it was a floating-point inequality comparison, don't do
11244 anything. */
11245 tem = fold_invert_truthvalue (loc0, arg0);
11246 if (tem)
11247 return fold_build3_loc (loc, code, type, tem, op2, op1);
11250 /* Convert A ? 1 : 0 to simply A. */
11251 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11252 : (integer_onep (op1)
11253 && !VECTOR_TYPE_P (type)))
11254 && integer_zerop (op2)
11255 /* If we try to convert OP0 to our type, the
11256 call to fold will try to move the conversion inside
11257 a COND, which will recurse. In that case, the COND_EXPR
11258 is probably the best choice, so leave it alone. */
11259 && type == TREE_TYPE (arg0))
11260 return pedantic_non_lvalue_loc (loc, arg0);
11262 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11263 over COND_EXPR in cases such as floating point comparisons. */
11264 if (integer_zerop (op1)
11265 && code == COND_EXPR
11266 && integer_onep (op2)
11267 && !VECTOR_TYPE_P (type)
11268 && truth_value_p (TREE_CODE (arg0)))
11269 return pedantic_non_lvalue_loc (loc,
11270 fold_convert_loc (loc, type,
11271 invert_truthvalue_loc (loc,
11272 arg0)));
11274 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11275 if (TREE_CODE (arg0) == LT_EXPR
11276 && integer_zerop (TREE_OPERAND (arg0, 1))
11277 && integer_zerop (op2)
11278 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11280 /* sign_bit_p looks through both zero and sign extensions,
11281 but for this optimization only sign extensions are
11282 usable. */
11283 tree tem2 = TREE_OPERAND (arg0, 0);
11284 while (tem != tem2)
11286 if (TREE_CODE (tem2) != NOP_EXPR
11287 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11289 tem = NULL_TREE;
11290 break;
11292 tem2 = TREE_OPERAND (tem2, 0);
11294 /* sign_bit_p only checks ARG1 bits within A's precision.
11295 If <sign bit of A> has wider type than A, bits outside
11296 of A's precision in <sign bit of A> need to be checked.
11297 If they are all 0, this optimization needs to be done
11298 in unsigned A's type, if they are all 1 in signed A's type,
11299 otherwise this can't be done. */
11300 if (tem
11301 && TYPE_PRECISION (TREE_TYPE (tem))
11302 < TYPE_PRECISION (TREE_TYPE (arg1))
11303 && TYPE_PRECISION (TREE_TYPE (tem))
11304 < TYPE_PRECISION (type))
11306 int inner_width, outer_width;
11307 tree tem_type;
11309 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11310 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11311 if (outer_width > TYPE_PRECISION (type))
11312 outer_width = TYPE_PRECISION (type);
11314 wide_int mask = wi::shifted_mask
11315 (inner_width, outer_width - inner_width, false,
11316 TYPE_PRECISION (TREE_TYPE (arg1)));
11318 wide_int common = mask & wi::to_wide (arg1);
11319 if (common == mask)
11321 tem_type = signed_type_for (TREE_TYPE (tem));
11322 tem = fold_convert_loc (loc, tem_type, tem);
11324 else if (common == 0)
11326 tem_type = unsigned_type_for (TREE_TYPE (tem));
11327 tem = fold_convert_loc (loc, tem_type, tem);
11329 else
11330 tem = NULL;
11333 if (tem)
11334 return
11335 fold_convert_loc (loc, type,
11336 fold_build2_loc (loc, BIT_AND_EXPR,
11337 TREE_TYPE (tem), tem,
11338 fold_convert_loc (loc,
11339 TREE_TYPE (tem),
11340 arg1)));
11343 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11344 already handled above. */
11345 if (TREE_CODE (arg0) == BIT_AND_EXPR
11346 && integer_onep (TREE_OPERAND (arg0, 1))
11347 && integer_zerop (op2)
11348 && integer_pow2p (arg1))
11350 tree tem = TREE_OPERAND (arg0, 0);
11351 STRIP_NOPS (tem);
11352 if (TREE_CODE (tem) == RSHIFT_EXPR
11353 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11354 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11355 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11356 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11357 fold_convert_loc (loc, type,
11358 TREE_OPERAND (tem, 0)),
11359 op1);
11362 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11363 is probably obsolete because the first operand should be a
11364 truth value (that's why we have the two cases above), but let's
11365 leave it in until we can confirm this for all front-ends. */
11366 if (integer_zerop (op2)
11367 && TREE_CODE (arg0) == NE_EXPR
11368 && integer_zerop (TREE_OPERAND (arg0, 1))
11369 && integer_pow2p (arg1)
11370 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11371 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11372 arg1, OEP_ONLY_CONST))
11373 return pedantic_non_lvalue_loc (loc,
11374 fold_convert_loc (loc, type,
11375 TREE_OPERAND (arg0, 0)));
11377 /* Disable the transformations below for vectors, since
11378 fold_binary_op_with_conditional_arg may undo them immediately,
11379 yielding an infinite loop. */
11380 if (code == VEC_COND_EXPR)
11381 return NULL_TREE;
11383 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11384 if (integer_zerop (op2)
11385 && truth_value_p (TREE_CODE (arg0))
11386 && truth_value_p (TREE_CODE (arg1))
11387 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11388 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11389 : TRUTH_ANDIF_EXPR,
11390 type, fold_convert_loc (loc, type, arg0), op1);
11392 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11393 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11394 && truth_value_p (TREE_CODE (arg0))
11395 && truth_value_p (TREE_CODE (arg1))
11396 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11398 location_t loc0 = expr_location_or (arg0, loc);
11399 /* Only perform transformation if ARG0 is easily inverted. */
11400 tem = fold_invert_truthvalue (loc0, arg0);
11401 if (tem)
11402 return fold_build2_loc (loc, code == VEC_COND_EXPR
11403 ? BIT_IOR_EXPR
11404 : TRUTH_ORIF_EXPR,
11405 type, fold_convert_loc (loc, type, tem),
11406 op1);
11409 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11410 if (integer_zerop (arg1)
11411 && truth_value_p (TREE_CODE (arg0))
11412 && truth_value_p (TREE_CODE (op2))
11413 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11415 location_t loc0 = expr_location_or (arg0, loc);
11416 /* Only perform transformation if ARG0 is easily inverted. */
11417 tem = fold_invert_truthvalue (loc0, arg0);
11418 if (tem)
11419 return fold_build2_loc (loc, code == VEC_COND_EXPR
11420 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11421 type, fold_convert_loc (loc, type, tem),
11422 op2);
11425 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11426 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11427 && truth_value_p (TREE_CODE (arg0))
11428 && truth_value_p (TREE_CODE (op2))
11429 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11430 return fold_build2_loc (loc, code == VEC_COND_EXPR
11431 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11432 type, fold_convert_loc (loc, type, arg0), op2);
11434 return NULL_TREE;
11436 case CALL_EXPR:
11437 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11438 of fold_ternary on them. */
11439 gcc_unreachable ();
11441 case BIT_FIELD_REF:
11442 if (TREE_CODE (arg0) == VECTOR_CST
11443 && (type == TREE_TYPE (TREE_TYPE (arg0))
11444 || (TREE_CODE (type) == VECTOR_TYPE
11445 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11447 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11448 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11449 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11450 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11452 if (n != 0
11453 && (idx % width) == 0
11454 && (n % width) == 0
11455 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11457 idx = idx / width;
11458 n = n / width;
11460 if (TREE_CODE (arg0) == VECTOR_CST)
11462 if (n == 1)
11463 return VECTOR_CST_ELT (arg0, idx);
11465 tree_vector_builder vals (type, n, 1);
11466 for (unsigned i = 0; i < n; ++i)
11467 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11468 return vals.build ();
11473 /* On constants we can use native encode/interpret to constant
11474 fold (nearly) all BIT_FIELD_REFs. */
11475 if (CONSTANT_CLASS_P (arg0)
11476 && can_native_interpret_type_p (type)
11477 && BITS_PER_UNIT == 8)
11479 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11480 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11481 /* Limit us to a reasonable amount of work. To relax the
11482 other limitations we need bit-shifting of the buffer
11483 and rounding up the size. */
11484 if (bitpos % BITS_PER_UNIT == 0
11485 && bitsize % BITS_PER_UNIT == 0
11486 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11488 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11489 unsigned HOST_WIDE_INT len
11490 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11491 bitpos / BITS_PER_UNIT);
11492 if (len > 0
11493 && len * BITS_PER_UNIT >= bitsize)
11495 tree v = native_interpret_expr (type, b,
11496 bitsize / BITS_PER_UNIT);
11497 if (v)
11498 return v;
11503 return NULL_TREE;
11505 case FMA_EXPR:
11506 /* For integers we can decompose the FMA if possible. */
11507 if (TREE_CODE (arg0) == INTEGER_CST
11508 && TREE_CODE (arg1) == INTEGER_CST)
11509 return fold_build2_loc (loc, PLUS_EXPR, type,
11510 const_binop (MULT_EXPR, arg0, arg1), arg2);
11511 if (integer_zerop (arg2))
11512 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11514 return fold_fma (loc, type, arg0, arg1, arg2);
11516 case VEC_PERM_EXPR:
11517 if (TREE_CODE (arg2) == VECTOR_CST)
11519 unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
11520 bool need_mask_canon = false;
11521 bool need_mask_canon2 = false;
11522 bool all_in_vec0 = true;
11523 bool all_in_vec1 = true;
11524 bool maybe_identity = true;
11525 bool single_arg = (op0 == op1);
11526 bool changed = false;
11528 mask2 = 2 * nelts - 1;
11529 mask = single_arg ? (nelts - 1) : mask2;
11530 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11531 auto_vec_perm_indices sel (nelts);
11532 auto_vec_perm_indices sel2 (nelts);
11533 for (i = 0; i < nelts; i++)
11535 tree val = VECTOR_CST_ELT (arg2, i);
11536 if (TREE_CODE (val) != INTEGER_CST)
11537 return NULL_TREE;
11539 /* Make sure that the perm value is in an acceptable
11540 range. */
11541 wi::tree_to_wide_ref t = wi::to_wide (val);
11542 need_mask_canon |= wi::gtu_p (t, mask);
11543 need_mask_canon2 |= wi::gtu_p (t, mask2);
11544 unsigned int elt = t.to_uhwi () & mask;
11545 unsigned int elt2 = t.to_uhwi () & mask2;
11547 if (elt < nelts)
11548 all_in_vec1 = false;
11549 else
11550 all_in_vec0 = false;
11552 if ((elt & (nelts - 1)) != i)
11553 maybe_identity = false;
11555 sel.quick_push (elt);
11556 sel2.quick_push (elt2);
11559 if (maybe_identity)
11561 if (all_in_vec0)
11562 return op0;
11563 if (all_in_vec1)
11564 return op1;
11567 if (all_in_vec0)
11568 op1 = op0;
11569 else if (all_in_vec1)
11571 op0 = op1;
11572 for (i = 0; i < nelts; i++)
11573 sel[i] -= nelts;
11574 need_mask_canon = true;
11577 if ((TREE_CODE (op0) == VECTOR_CST
11578 || TREE_CODE (op0) == CONSTRUCTOR)
11579 && (TREE_CODE (op1) == VECTOR_CST
11580 || TREE_CODE (op1) == CONSTRUCTOR))
11582 tree t = fold_vec_perm (type, op0, op1, sel);
11583 if (t != NULL_TREE)
11584 return t;
11587 if (op0 == op1 && !single_arg)
11588 changed = true;
11590 /* Some targets are deficient and fail to expand a single
11591 argument permutation while still allowing an equivalent
11592 2-argument version. */
11593 if (need_mask_canon && arg2 == op2
11594 && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
11595 && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
11597 need_mask_canon = need_mask_canon2;
11598 sel = sel2;
11601 if (need_mask_canon && arg2 == op2)
11603 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11604 tree_vector_builder tsel (TREE_TYPE (arg2), nelts, 1);
11605 for (i = 0; i < nelts; i++)
11606 tsel.quick_push (build_int_cst (eltype, sel[i]));
11607 op2 = tsel.build ();
11608 changed = true;
11611 if (changed)
11612 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11614 return NULL_TREE;
11616 case BIT_INSERT_EXPR:
11617 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11618 if (TREE_CODE (arg0) == INTEGER_CST
11619 && TREE_CODE (arg1) == INTEGER_CST)
11621 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11622 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11623 wide_int tem = (wi::to_wide (arg0)
11624 & wi::shifted_mask (bitpos, bitsize, true,
11625 TYPE_PRECISION (type)));
11626 wide_int tem2
11627 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11628 bitsize), bitpos);
11629 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11631 else if (TREE_CODE (arg0) == VECTOR_CST
11632 && CONSTANT_CLASS_P (arg1)
11633 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11634 TREE_TYPE (arg1)))
11636 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11637 unsigned HOST_WIDE_INT elsize
11638 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11639 if (bitpos % elsize == 0)
11641 unsigned k = bitpos / elsize;
11642 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11643 return arg0;
11644 else
11646 unsigned int nelts = VECTOR_CST_NELTS (arg0);
11647 tree_vector_builder elts (type, nelts, 1);
11648 elts.quick_grow (nelts);
11649 for (unsigned int i = 0; i < nelts; ++i)
11650 elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11651 return elts.build ();
11655 return NULL_TREE;
11657 default:
11658 return NULL_TREE;
11659 } /* switch (code) */
11662 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11663 of an array (or vector). */
11665 tree
11666 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11668 tree index_type = NULL_TREE;
11669 offset_int low_bound = 0;
11671 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11673 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11674 if (domain_type && TYPE_MIN_VALUE (domain_type))
11676 /* Static constructors for variably sized objects makes no sense. */
11677 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11678 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11679 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11683 if (index_type)
11684 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11685 TYPE_SIGN (index_type));
11687 offset_int index = low_bound - 1;
11688 if (index_type)
11689 index = wi::ext (index, TYPE_PRECISION (index_type),
11690 TYPE_SIGN (index_type));
11692 offset_int max_index;
11693 unsigned HOST_WIDE_INT cnt;
11694 tree cfield, cval;
11696 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11698 /* Array constructor might explicitly set index, or specify a range,
11699 or leave index NULL meaning that it is next index after previous
11700 one. */
11701 if (cfield)
11703 if (TREE_CODE (cfield) == INTEGER_CST)
11704 max_index = index = wi::to_offset (cfield);
11705 else
11707 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11708 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11709 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11712 else
11714 index += 1;
11715 if (index_type)
11716 index = wi::ext (index, TYPE_PRECISION (index_type),
11717 TYPE_SIGN (index_type));
11718 max_index = index;
11721 /* Do we have match? */
11722 if (wi::cmpu (access_index, index) >= 0
11723 && wi::cmpu (access_index, max_index) <= 0)
11724 return cval;
11726 return NULL_TREE;
11729 /* Perform constant folding and related simplification of EXPR.
11730 The related simplifications include x*1 => x, x*0 => 0, etc.,
11731 and application of the associative law.
11732 NOP_EXPR conversions may be removed freely (as long as we
11733 are careful not to change the type of the overall expression).
11734 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11735 but we can constant-fold them if they have constant operands. */
11737 #ifdef ENABLE_FOLD_CHECKING
11738 # define fold(x) fold_1 (x)
11739 static tree fold_1 (tree);
11740 static
11741 #endif
11742 tree
11743 fold (tree expr)
11745 const tree t = expr;
11746 enum tree_code code = TREE_CODE (t);
11747 enum tree_code_class kind = TREE_CODE_CLASS (code);
11748 tree tem;
11749 location_t loc = EXPR_LOCATION (expr);
11751 /* Return right away if a constant. */
11752 if (kind == tcc_constant)
11753 return t;
11755 /* CALL_EXPR-like objects with variable numbers of operands are
11756 treated specially. */
11757 if (kind == tcc_vl_exp)
11759 if (code == CALL_EXPR)
11761 tem = fold_call_expr (loc, expr, false);
11762 return tem ? tem : expr;
11764 return expr;
11767 if (IS_EXPR_CODE_CLASS (kind))
11769 tree type = TREE_TYPE (t);
11770 tree op0, op1, op2;
11772 switch (TREE_CODE_LENGTH (code))
11774 case 1:
11775 op0 = TREE_OPERAND (t, 0);
11776 tem = fold_unary_loc (loc, code, type, op0);
11777 return tem ? tem : expr;
11778 case 2:
11779 op0 = TREE_OPERAND (t, 0);
11780 op1 = TREE_OPERAND (t, 1);
11781 tem = fold_binary_loc (loc, code, type, op0, op1);
11782 return tem ? tem : expr;
11783 case 3:
11784 op0 = TREE_OPERAND (t, 0);
11785 op1 = TREE_OPERAND (t, 1);
11786 op2 = TREE_OPERAND (t, 2);
11787 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11788 return tem ? tem : expr;
11789 default:
11790 break;
11794 switch (code)
11796 case ARRAY_REF:
11798 tree op0 = TREE_OPERAND (t, 0);
11799 tree op1 = TREE_OPERAND (t, 1);
11801 if (TREE_CODE (op1) == INTEGER_CST
11802 && TREE_CODE (op0) == CONSTRUCTOR
11803 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11805 tree val = get_array_ctor_element_at_index (op0,
11806 wi::to_offset (op1));
11807 if (val)
11808 return val;
11811 return t;
11814 /* Return a VECTOR_CST if possible. */
11815 case CONSTRUCTOR:
11817 tree type = TREE_TYPE (t);
11818 if (TREE_CODE (type) != VECTOR_TYPE)
11819 return t;
11821 unsigned i;
11822 tree val;
11823 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11824 if (! CONSTANT_CLASS_P (val))
11825 return t;
11827 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11830 case CONST_DECL:
11831 return fold (DECL_INITIAL (t));
11833 default:
11834 return t;
11835 } /* switch (code) */
11838 #ifdef ENABLE_FOLD_CHECKING
11839 #undef fold
11841 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11842 hash_table<nofree_ptr_hash<const tree_node> > *);
11843 static void fold_check_failed (const_tree, const_tree);
11844 void print_fold_checksum (const_tree);
11846 /* When --enable-checking=fold, compute a digest of expr before
11847 and after actual fold call to see if fold did not accidentally
11848 change original expr. */
11850 tree
11851 fold (tree expr)
11853 tree ret;
11854 struct md5_ctx ctx;
11855 unsigned char checksum_before[16], checksum_after[16];
11856 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11858 md5_init_ctx (&ctx);
11859 fold_checksum_tree (expr, &ctx, &ht);
11860 md5_finish_ctx (&ctx, checksum_before);
11861 ht.empty ();
11863 ret = fold_1 (expr);
11865 md5_init_ctx (&ctx);
11866 fold_checksum_tree (expr, &ctx, &ht);
11867 md5_finish_ctx (&ctx, checksum_after);
11869 if (memcmp (checksum_before, checksum_after, 16))
11870 fold_check_failed (expr, ret);
11872 return ret;
11875 void
11876 print_fold_checksum (const_tree expr)
11878 struct md5_ctx ctx;
11879 unsigned char checksum[16], cnt;
11880 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11882 md5_init_ctx (&ctx);
11883 fold_checksum_tree (expr, &ctx, &ht);
11884 md5_finish_ctx (&ctx, checksum);
11885 for (cnt = 0; cnt < 16; ++cnt)
11886 fprintf (stderr, "%02x", checksum[cnt]);
11887 putc ('\n', stderr);
11890 static void
11891 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
11893 internal_error ("fold check: original tree changed by fold");
11896 static void
11897 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
11898 hash_table<nofree_ptr_hash <const tree_node> > *ht)
11900 const tree_node **slot;
11901 enum tree_code code;
11902 union tree_node buf;
11903 int i, len;
11905 recursive_label:
11906 if (expr == NULL)
11907 return;
11908 slot = ht->find_slot (expr, INSERT);
11909 if (*slot != NULL)
11910 return;
11911 *slot = expr;
11912 code = TREE_CODE (expr);
11913 if (TREE_CODE_CLASS (code) == tcc_declaration
11914 && HAS_DECL_ASSEMBLER_NAME_P (expr))
11916 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
11917 memcpy ((char *) &buf, expr, tree_size (expr));
11918 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
11919 buf.decl_with_vis.symtab_node = NULL;
11920 expr = (tree) &buf;
11922 else if (TREE_CODE_CLASS (code) == tcc_type
11923 && (TYPE_POINTER_TO (expr)
11924 || TYPE_REFERENCE_TO (expr)
11925 || TYPE_CACHED_VALUES_P (expr)
11926 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
11927 || TYPE_NEXT_VARIANT (expr)
11928 || TYPE_ALIAS_SET_KNOWN_P (expr)))
11930 /* Allow these fields to be modified. */
11931 tree tmp;
11932 memcpy ((char *) &buf, expr, tree_size (expr));
11933 expr = tmp = (tree) &buf;
11934 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
11935 TYPE_POINTER_TO (tmp) = NULL;
11936 TYPE_REFERENCE_TO (tmp) = NULL;
11937 TYPE_NEXT_VARIANT (tmp) = NULL;
11938 TYPE_ALIAS_SET (tmp) = -1;
11939 if (TYPE_CACHED_VALUES_P (tmp))
11941 TYPE_CACHED_VALUES_P (tmp) = 0;
11942 TYPE_CACHED_VALUES (tmp) = NULL;
11945 md5_process_bytes (expr, tree_size (expr), ctx);
11946 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
11947 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11948 if (TREE_CODE_CLASS (code) != tcc_type
11949 && TREE_CODE_CLASS (code) != tcc_declaration
11950 && code != TREE_LIST
11951 && code != SSA_NAME
11952 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
11953 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11954 switch (TREE_CODE_CLASS (code))
11956 case tcc_constant:
11957 switch (code)
11959 case STRING_CST:
11960 md5_process_bytes (TREE_STRING_POINTER (expr),
11961 TREE_STRING_LENGTH (expr), ctx);
11962 break;
11963 case COMPLEX_CST:
11964 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11965 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11966 break;
11967 case VECTOR_CST:
11968 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
11969 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
11970 break;
11971 default:
11972 break;
11974 break;
11975 case tcc_exceptional:
11976 switch (code)
11978 case TREE_LIST:
11979 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11980 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11981 expr = TREE_CHAIN (expr);
11982 goto recursive_label;
11983 break;
11984 case TREE_VEC:
11985 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11986 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11987 break;
11988 default:
11989 break;
11991 break;
11992 case tcc_expression:
11993 case tcc_reference:
11994 case tcc_comparison:
11995 case tcc_unary:
11996 case tcc_binary:
11997 case tcc_statement:
11998 case tcc_vl_exp:
11999 len = TREE_OPERAND_LENGTH (expr);
12000 for (i = 0; i < len; ++i)
12001 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12002 break;
12003 case tcc_declaration:
12004 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12005 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12006 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12008 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12009 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12010 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12011 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12012 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12015 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12017 if (TREE_CODE (expr) == FUNCTION_DECL)
12019 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12020 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12022 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12024 break;
12025 case tcc_type:
12026 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12027 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12028 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12029 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12030 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12031 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12032 if (INTEGRAL_TYPE_P (expr)
12033 || SCALAR_FLOAT_TYPE_P (expr))
12035 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12036 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12038 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12039 if (TREE_CODE (expr) == RECORD_TYPE
12040 || TREE_CODE (expr) == UNION_TYPE
12041 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12042 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12043 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12044 break;
12045 default:
12046 break;
12050 /* Helper function for outputting the checksum of a tree T. When
12051 debugging with gdb, you can "define mynext" to be "next" followed
12052 by "call debug_fold_checksum (op0)", then just trace down till the
12053 outputs differ. */
12055 DEBUG_FUNCTION void
12056 debug_fold_checksum (const_tree t)
12058 int i;
12059 unsigned char checksum[16];
12060 struct md5_ctx ctx;
12061 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12063 md5_init_ctx (&ctx);
12064 fold_checksum_tree (t, &ctx, &ht);
12065 md5_finish_ctx (&ctx, checksum);
12066 ht.empty ();
12068 for (i = 0; i < 16; i++)
12069 fprintf (stderr, "%d ", checksum[i]);
12071 fprintf (stderr, "\n");
12074 #endif
12076 /* Fold a unary tree expression with code CODE of type TYPE with an
12077 operand OP0. LOC is the location of the resulting expression.
12078 Return a folded expression if successful. Otherwise, return a tree
12079 expression with code CODE of type TYPE with an operand OP0. */
12081 tree
12082 fold_build1_loc (location_t loc,
12083 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12085 tree tem;
12086 #ifdef ENABLE_FOLD_CHECKING
12087 unsigned char checksum_before[16], checksum_after[16];
12088 struct md5_ctx ctx;
12089 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12091 md5_init_ctx (&ctx);
12092 fold_checksum_tree (op0, &ctx, &ht);
12093 md5_finish_ctx (&ctx, checksum_before);
12094 ht.empty ();
12095 #endif
12097 tem = fold_unary_loc (loc, code, type, op0);
12098 if (!tem)
12099 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12101 #ifdef ENABLE_FOLD_CHECKING
12102 md5_init_ctx (&ctx);
12103 fold_checksum_tree (op0, &ctx, &ht);
12104 md5_finish_ctx (&ctx, checksum_after);
12106 if (memcmp (checksum_before, checksum_after, 16))
12107 fold_check_failed (op0, tem);
12108 #endif
12109 return tem;
12112 /* Fold a binary tree expression with code CODE of type TYPE with
12113 operands OP0 and OP1. LOC is the location of the resulting
12114 expression. Return a folded expression if successful. Otherwise,
12115 return a tree expression with code CODE of type TYPE with operands
12116 OP0 and OP1. */
12118 tree
12119 fold_build2_loc (location_t loc,
12120 enum tree_code code, tree type, tree op0, tree op1
12121 MEM_STAT_DECL)
12123 tree tem;
12124 #ifdef ENABLE_FOLD_CHECKING
12125 unsigned char checksum_before_op0[16],
12126 checksum_before_op1[16],
12127 checksum_after_op0[16],
12128 checksum_after_op1[16];
12129 struct md5_ctx ctx;
12130 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12132 md5_init_ctx (&ctx);
12133 fold_checksum_tree (op0, &ctx, &ht);
12134 md5_finish_ctx (&ctx, checksum_before_op0);
12135 ht.empty ();
12137 md5_init_ctx (&ctx);
12138 fold_checksum_tree (op1, &ctx, &ht);
12139 md5_finish_ctx (&ctx, checksum_before_op1);
12140 ht.empty ();
12141 #endif
12143 tem = fold_binary_loc (loc, code, type, op0, op1);
12144 if (!tem)
12145 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12147 #ifdef ENABLE_FOLD_CHECKING
12148 md5_init_ctx (&ctx);
12149 fold_checksum_tree (op0, &ctx, &ht);
12150 md5_finish_ctx (&ctx, checksum_after_op0);
12151 ht.empty ();
12153 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12154 fold_check_failed (op0, tem);
12156 md5_init_ctx (&ctx);
12157 fold_checksum_tree (op1, &ctx, &ht);
12158 md5_finish_ctx (&ctx, checksum_after_op1);
12160 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12161 fold_check_failed (op1, tem);
12162 #endif
12163 return tem;
12166 /* Fold a ternary tree expression with code CODE of type TYPE with
12167 operands OP0, OP1, and OP2. Return a folded expression if
12168 successful. Otherwise, return a tree expression with code CODE of
12169 type TYPE with operands OP0, OP1, and OP2. */
12171 tree
12172 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12173 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12175 tree tem;
12176 #ifdef ENABLE_FOLD_CHECKING
12177 unsigned char checksum_before_op0[16],
12178 checksum_before_op1[16],
12179 checksum_before_op2[16],
12180 checksum_after_op0[16],
12181 checksum_after_op1[16],
12182 checksum_after_op2[16];
12183 struct md5_ctx ctx;
12184 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12186 md5_init_ctx (&ctx);
12187 fold_checksum_tree (op0, &ctx, &ht);
12188 md5_finish_ctx (&ctx, checksum_before_op0);
12189 ht.empty ();
12191 md5_init_ctx (&ctx);
12192 fold_checksum_tree (op1, &ctx, &ht);
12193 md5_finish_ctx (&ctx, checksum_before_op1);
12194 ht.empty ();
12196 md5_init_ctx (&ctx);
12197 fold_checksum_tree (op2, &ctx, &ht);
12198 md5_finish_ctx (&ctx, checksum_before_op2);
12199 ht.empty ();
12200 #endif
12202 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12203 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12204 if (!tem)
12205 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12207 #ifdef ENABLE_FOLD_CHECKING
12208 md5_init_ctx (&ctx);
12209 fold_checksum_tree (op0, &ctx, &ht);
12210 md5_finish_ctx (&ctx, checksum_after_op0);
12211 ht.empty ();
12213 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12214 fold_check_failed (op0, tem);
12216 md5_init_ctx (&ctx);
12217 fold_checksum_tree (op1, &ctx, &ht);
12218 md5_finish_ctx (&ctx, checksum_after_op1);
12219 ht.empty ();
12221 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12222 fold_check_failed (op1, tem);
12224 md5_init_ctx (&ctx);
12225 fold_checksum_tree (op2, &ctx, &ht);
12226 md5_finish_ctx (&ctx, checksum_after_op2);
12228 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12229 fold_check_failed (op2, tem);
12230 #endif
12231 return tem;
12234 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12235 arguments in ARGARRAY, and a null static chain.
12236 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12237 of type TYPE from the given operands as constructed by build_call_array. */
12239 tree
12240 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12241 int nargs, tree *argarray)
12243 tree tem;
12244 #ifdef ENABLE_FOLD_CHECKING
12245 unsigned char checksum_before_fn[16],
12246 checksum_before_arglist[16],
12247 checksum_after_fn[16],
12248 checksum_after_arglist[16];
12249 struct md5_ctx ctx;
12250 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12251 int i;
12253 md5_init_ctx (&ctx);
12254 fold_checksum_tree (fn, &ctx, &ht);
12255 md5_finish_ctx (&ctx, checksum_before_fn);
12256 ht.empty ();
12258 md5_init_ctx (&ctx);
12259 for (i = 0; i < nargs; i++)
12260 fold_checksum_tree (argarray[i], &ctx, &ht);
12261 md5_finish_ctx (&ctx, checksum_before_arglist);
12262 ht.empty ();
12263 #endif
12265 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12266 if (!tem)
12267 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12269 #ifdef ENABLE_FOLD_CHECKING
12270 md5_init_ctx (&ctx);
12271 fold_checksum_tree (fn, &ctx, &ht);
12272 md5_finish_ctx (&ctx, checksum_after_fn);
12273 ht.empty ();
12275 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12276 fold_check_failed (fn, tem);
12278 md5_init_ctx (&ctx);
12279 for (i = 0; i < nargs; i++)
12280 fold_checksum_tree (argarray[i], &ctx, &ht);
12281 md5_finish_ctx (&ctx, checksum_after_arglist);
12283 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12284 fold_check_failed (NULL_TREE, tem);
12285 #endif
12286 return tem;
12289 /* Perform constant folding and related simplification of initializer
12290 expression EXPR. These behave identically to "fold_buildN" but ignore
12291 potential run-time traps and exceptions that fold must preserve. */
12293 #define START_FOLD_INIT \
12294 int saved_signaling_nans = flag_signaling_nans;\
12295 int saved_trapping_math = flag_trapping_math;\
12296 int saved_rounding_math = flag_rounding_math;\
12297 int saved_trapv = flag_trapv;\
12298 int saved_folding_initializer = folding_initializer;\
12299 flag_signaling_nans = 0;\
12300 flag_trapping_math = 0;\
12301 flag_rounding_math = 0;\
12302 flag_trapv = 0;\
12303 folding_initializer = 1;
12305 #define END_FOLD_INIT \
12306 flag_signaling_nans = saved_signaling_nans;\
12307 flag_trapping_math = saved_trapping_math;\
12308 flag_rounding_math = saved_rounding_math;\
12309 flag_trapv = saved_trapv;\
12310 folding_initializer = saved_folding_initializer;
12312 tree
12313 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12314 tree type, tree op)
12316 tree result;
12317 START_FOLD_INIT;
12319 result = fold_build1_loc (loc, code, type, op);
12321 END_FOLD_INIT;
12322 return result;
12325 tree
12326 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12327 tree type, tree op0, tree op1)
12329 tree result;
12330 START_FOLD_INIT;
12332 result = fold_build2_loc (loc, code, type, op0, op1);
12334 END_FOLD_INIT;
12335 return result;
12338 tree
12339 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12340 int nargs, tree *argarray)
12342 tree result;
12343 START_FOLD_INIT;
12345 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12347 END_FOLD_INIT;
12348 return result;
12351 #undef START_FOLD_INIT
12352 #undef END_FOLD_INIT
12354 /* Determine if first argument is a multiple of second argument. Return 0 if
12355 it is not, or we cannot easily determined it to be.
12357 An example of the sort of thing we care about (at this point; this routine
12358 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12359 fold cases do now) is discovering that
12361 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12363 is a multiple of
12365 SAVE_EXPR (J * 8)
12367 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12369 This code also handles discovering that
12371 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12373 is a multiple of 8 so we don't have to worry about dealing with a
12374 possible remainder.
12376 Note that we *look* inside a SAVE_EXPR only to determine how it was
12377 calculated; it is not safe for fold to do much of anything else with the
12378 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12379 at run time. For example, the latter example above *cannot* be implemented
12380 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12381 evaluation time of the original SAVE_EXPR is not necessarily the same at
12382 the time the new expression is evaluated. The only optimization of this
12383 sort that would be valid is changing
12385 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12387 divided by 8 to
12389 SAVE_EXPR (I) * SAVE_EXPR (J)
12391 (where the same SAVE_EXPR (J) is used in the original and the
12392 transformed version). */
12395 multiple_of_p (tree type, const_tree top, const_tree bottom)
12397 gimple *stmt;
12398 tree t1, op1, op2;
12400 if (operand_equal_p (top, bottom, 0))
12401 return 1;
12403 if (TREE_CODE (type) != INTEGER_TYPE)
12404 return 0;
12406 switch (TREE_CODE (top))
12408 case BIT_AND_EXPR:
12409 /* Bitwise and provides a power of two multiple. If the mask is
12410 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12411 if (!integer_pow2p (bottom))
12412 return 0;
12413 /* FALLTHRU */
12415 case MULT_EXPR:
12416 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12417 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12419 case MINUS_EXPR:
12420 /* It is impossible to prove if op0 - op1 is multiple of bottom
12421 precisely, so be conservative here checking if both op0 and op1
12422 are multiple of bottom. Note we check the second operand first
12423 since it's usually simpler. */
12424 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12425 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12427 case PLUS_EXPR:
12428 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12429 as op0 - 3 if the expression has unsigned type. For example,
12430 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12431 op1 = TREE_OPERAND (top, 1);
12432 if (TYPE_UNSIGNED (type)
12433 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12434 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12435 return (multiple_of_p (type, op1, bottom)
12436 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12438 case LSHIFT_EXPR:
12439 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12441 op1 = TREE_OPERAND (top, 1);
12442 /* const_binop may not detect overflow correctly,
12443 so check for it explicitly here. */
12444 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12445 wi::to_wide (op1))
12446 && 0 != (t1 = fold_convert (type,
12447 const_binop (LSHIFT_EXPR,
12448 size_one_node,
12449 op1)))
12450 && !TREE_OVERFLOW (t1))
12451 return multiple_of_p (type, t1, bottom);
12453 return 0;
12455 case NOP_EXPR:
12456 /* Can't handle conversions from non-integral or wider integral type. */
12457 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12458 || (TYPE_PRECISION (type)
12459 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12460 return 0;
12462 /* fall through */
12464 case SAVE_EXPR:
12465 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12467 case COND_EXPR:
12468 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12469 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12471 case INTEGER_CST:
12472 if (TREE_CODE (bottom) != INTEGER_CST
12473 || integer_zerop (bottom)
12474 || (TYPE_UNSIGNED (type)
12475 && (tree_int_cst_sgn (top) < 0
12476 || tree_int_cst_sgn (bottom) < 0)))
12477 return 0;
12478 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12479 SIGNED);
12481 case SSA_NAME:
12482 if (TREE_CODE (bottom) == INTEGER_CST
12483 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12484 && gimple_code (stmt) == GIMPLE_ASSIGN)
12486 enum tree_code code = gimple_assign_rhs_code (stmt);
12488 /* Check for special cases to see if top is defined as multiple
12489 of bottom:
12491 top = (X & ~(bottom - 1) ; bottom is power of 2
12495 Y = X % bottom
12496 top = X - Y. */
12497 if (code == BIT_AND_EXPR
12498 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12499 && TREE_CODE (op2) == INTEGER_CST
12500 && integer_pow2p (bottom)
12501 && wi::multiple_of_p (wi::to_widest (op2),
12502 wi::to_widest (bottom), UNSIGNED))
12503 return 1;
12505 op1 = gimple_assign_rhs1 (stmt);
12506 if (code == MINUS_EXPR
12507 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12508 && TREE_CODE (op2) == SSA_NAME
12509 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12510 && gimple_code (stmt) == GIMPLE_ASSIGN
12511 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12512 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12513 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12514 return 1;
12517 /* fall through */
12519 default:
12520 return 0;
12524 #define tree_expr_nonnegative_warnv_p(X, Y) \
12525 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12527 #define RECURSE(X) \
12528 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12530 /* Return true if CODE or TYPE is known to be non-negative. */
12532 static bool
12533 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12535 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12536 && truth_value_p (code))
12537 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12538 have a signed:1 type (where the value is -1 and 0). */
12539 return true;
12540 return false;
12543 /* Return true if (CODE OP0) is known to be non-negative. If the return
12544 value is based on the assumption that signed overflow is undefined,
12545 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12546 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12548 bool
12549 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12550 bool *strict_overflow_p, int depth)
12552 if (TYPE_UNSIGNED (type))
12553 return true;
12555 switch (code)
12557 case ABS_EXPR:
12558 /* We can't return 1 if flag_wrapv is set because
12559 ABS_EXPR<INT_MIN> = INT_MIN. */
12560 if (!ANY_INTEGRAL_TYPE_P (type))
12561 return true;
12562 if (TYPE_OVERFLOW_UNDEFINED (type))
12564 *strict_overflow_p = true;
12565 return true;
12567 break;
12569 case NON_LVALUE_EXPR:
12570 case FLOAT_EXPR:
12571 case FIX_TRUNC_EXPR:
12572 return RECURSE (op0);
12574 CASE_CONVERT:
12576 tree inner_type = TREE_TYPE (op0);
12577 tree outer_type = type;
12579 if (TREE_CODE (outer_type) == REAL_TYPE)
12581 if (TREE_CODE (inner_type) == REAL_TYPE)
12582 return RECURSE (op0);
12583 if (INTEGRAL_TYPE_P (inner_type))
12585 if (TYPE_UNSIGNED (inner_type))
12586 return true;
12587 return RECURSE (op0);
12590 else if (INTEGRAL_TYPE_P (outer_type))
12592 if (TREE_CODE (inner_type) == REAL_TYPE)
12593 return RECURSE (op0);
12594 if (INTEGRAL_TYPE_P (inner_type))
12595 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12596 && TYPE_UNSIGNED (inner_type);
12599 break;
12601 default:
12602 return tree_simple_nonnegative_warnv_p (code, type);
12605 /* We don't know sign of `t', so be conservative and return false. */
12606 return false;
12609 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12610 value is based on the assumption that signed overflow is undefined,
12611 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12612 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12614 bool
12615 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12616 tree op1, bool *strict_overflow_p,
12617 int depth)
12619 if (TYPE_UNSIGNED (type))
12620 return true;
12622 switch (code)
12624 case POINTER_PLUS_EXPR:
12625 case PLUS_EXPR:
12626 if (FLOAT_TYPE_P (type))
12627 return RECURSE (op0) && RECURSE (op1);
12629 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12630 both unsigned and at least 2 bits shorter than the result. */
12631 if (TREE_CODE (type) == INTEGER_TYPE
12632 && TREE_CODE (op0) == NOP_EXPR
12633 && TREE_CODE (op1) == NOP_EXPR)
12635 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12636 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12637 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12638 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12640 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12641 TYPE_PRECISION (inner2)) + 1;
12642 return prec < TYPE_PRECISION (type);
12645 break;
12647 case MULT_EXPR:
12648 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12650 /* x * x is always non-negative for floating point x
12651 or without overflow. */
12652 if (operand_equal_p (op0, op1, 0)
12653 || (RECURSE (op0) && RECURSE (op1)))
12655 if (ANY_INTEGRAL_TYPE_P (type)
12656 && TYPE_OVERFLOW_UNDEFINED (type))
12657 *strict_overflow_p = true;
12658 return true;
12662 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12663 both unsigned and their total bits is shorter than the result. */
12664 if (TREE_CODE (type) == INTEGER_TYPE
12665 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12666 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12668 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12669 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12670 : TREE_TYPE (op0);
12671 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12672 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12673 : TREE_TYPE (op1);
12675 bool unsigned0 = TYPE_UNSIGNED (inner0);
12676 bool unsigned1 = TYPE_UNSIGNED (inner1);
12678 if (TREE_CODE (op0) == INTEGER_CST)
12679 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12681 if (TREE_CODE (op1) == INTEGER_CST)
12682 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12684 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12685 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12687 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12688 ? tree_int_cst_min_precision (op0, UNSIGNED)
12689 : TYPE_PRECISION (inner0);
12691 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12692 ? tree_int_cst_min_precision (op1, UNSIGNED)
12693 : TYPE_PRECISION (inner1);
12695 return precision0 + precision1 < TYPE_PRECISION (type);
12698 return false;
12700 case BIT_AND_EXPR:
12701 case MAX_EXPR:
12702 return RECURSE (op0) || RECURSE (op1);
12704 case BIT_IOR_EXPR:
12705 case BIT_XOR_EXPR:
12706 case MIN_EXPR:
12707 case RDIV_EXPR:
12708 case TRUNC_DIV_EXPR:
12709 case CEIL_DIV_EXPR:
12710 case FLOOR_DIV_EXPR:
12711 case ROUND_DIV_EXPR:
12712 return RECURSE (op0) && RECURSE (op1);
12714 case TRUNC_MOD_EXPR:
12715 return RECURSE (op0);
12717 case FLOOR_MOD_EXPR:
12718 return RECURSE (op1);
12720 case CEIL_MOD_EXPR:
12721 case ROUND_MOD_EXPR:
12722 default:
12723 return tree_simple_nonnegative_warnv_p (code, type);
12726 /* We don't know sign of `t', so be conservative and return false. */
12727 return false;
12730 /* Return true if T is known to be non-negative. If the return
12731 value is based on the assumption that signed overflow is undefined,
12732 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12733 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12735 bool
12736 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12738 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12739 return true;
12741 switch (TREE_CODE (t))
12743 case INTEGER_CST:
12744 return tree_int_cst_sgn (t) >= 0;
12746 case REAL_CST:
12747 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12749 case FIXED_CST:
12750 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12752 case COND_EXPR:
12753 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12755 case SSA_NAME:
12756 /* Limit the depth of recursion to avoid quadratic behavior.
12757 This is expected to catch almost all occurrences in practice.
12758 If this code misses important cases that unbounded recursion
12759 would not, passes that need this information could be revised
12760 to provide it through dataflow propagation. */
12761 return (!name_registered_for_update_p (t)
12762 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12763 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12764 strict_overflow_p, depth));
12766 default:
12767 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12771 /* Return true if T is known to be non-negative. If the return
12772 value is based on the assumption that signed overflow is undefined,
12773 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12774 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12776 bool
12777 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12778 bool *strict_overflow_p, int depth)
12780 switch (fn)
12782 CASE_CFN_ACOS:
12783 CASE_CFN_ACOSH:
12784 CASE_CFN_CABS:
12785 CASE_CFN_COSH:
12786 CASE_CFN_ERFC:
12787 CASE_CFN_EXP:
12788 CASE_CFN_EXP10:
12789 CASE_CFN_EXP2:
12790 CASE_CFN_FABS:
12791 CASE_CFN_FDIM:
12792 CASE_CFN_HYPOT:
12793 CASE_CFN_POW10:
12794 CASE_CFN_FFS:
12795 CASE_CFN_PARITY:
12796 CASE_CFN_POPCOUNT:
12797 CASE_CFN_CLZ:
12798 CASE_CFN_CLRSB:
12799 case CFN_BUILT_IN_BSWAP32:
12800 case CFN_BUILT_IN_BSWAP64:
12801 /* Always true. */
12802 return true;
12804 CASE_CFN_SQRT:
12805 CASE_CFN_SQRT_FN:
12806 /* sqrt(-0.0) is -0.0. */
12807 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12808 return true;
12809 return RECURSE (arg0);
12811 CASE_CFN_ASINH:
12812 CASE_CFN_ATAN:
12813 CASE_CFN_ATANH:
12814 CASE_CFN_CBRT:
12815 CASE_CFN_CEIL:
12816 CASE_CFN_ERF:
12817 CASE_CFN_EXPM1:
12818 CASE_CFN_FLOOR:
12819 CASE_CFN_FMOD:
12820 CASE_CFN_FREXP:
12821 CASE_CFN_ICEIL:
12822 CASE_CFN_IFLOOR:
12823 CASE_CFN_IRINT:
12824 CASE_CFN_IROUND:
12825 CASE_CFN_LCEIL:
12826 CASE_CFN_LDEXP:
12827 CASE_CFN_LFLOOR:
12828 CASE_CFN_LLCEIL:
12829 CASE_CFN_LLFLOOR:
12830 CASE_CFN_LLRINT:
12831 CASE_CFN_LLROUND:
12832 CASE_CFN_LRINT:
12833 CASE_CFN_LROUND:
12834 CASE_CFN_MODF:
12835 CASE_CFN_NEARBYINT:
12836 CASE_CFN_RINT:
12837 CASE_CFN_ROUND:
12838 CASE_CFN_SCALB:
12839 CASE_CFN_SCALBLN:
12840 CASE_CFN_SCALBN:
12841 CASE_CFN_SIGNBIT:
12842 CASE_CFN_SIGNIFICAND:
12843 CASE_CFN_SINH:
12844 CASE_CFN_TANH:
12845 CASE_CFN_TRUNC:
12846 /* True if the 1st argument is nonnegative. */
12847 return RECURSE (arg0);
12849 CASE_CFN_FMAX:
12850 CASE_CFN_FMAX_FN:
12851 /* True if the 1st OR 2nd arguments are nonnegative. */
12852 return RECURSE (arg0) || RECURSE (arg1);
12854 CASE_CFN_FMIN:
12855 CASE_CFN_FMIN_FN:
12856 /* True if the 1st AND 2nd arguments are nonnegative. */
12857 return RECURSE (arg0) && RECURSE (arg1);
12859 CASE_CFN_COPYSIGN:
12860 CASE_CFN_COPYSIGN_FN:
12861 /* True if the 2nd argument is nonnegative. */
12862 return RECURSE (arg1);
12864 CASE_CFN_POWI:
12865 /* True if the 1st argument is nonnegative or the second
12866 argument is an even integer. */
12867 if (TREE_CODE (arg1) == INTEGER_CST
12868 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12869 return true;
12870 return RECURSE (arg0);
12872 CASE_CFN_POW:
12873 /* True if the 1st argument is nonnegative or the second
12874 argument is an even integer valued real. */
12875 if (TREE_CODE (arg1) == REAL_CST)
12877 REAL_VALUE_TYPE c;
12878 HOST_WIDE_INT n;
12880 c = TREE_REAL_CST (arg1);
12881 n = real_to_integer (&c);
12882 if ((n & 1) == 0)
12884 REAL_VALUE_TYPE cint;
12885 real_from_integer (&cint, VOIDmode, n, SIGNED);
12886 if (real_identical (&c, &cint))
12887 return true;
12890 return RECURSE (arg0);
12892 default:
12893 break;
12895 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
12898 /* Return true if T is known to be non-negative. If the return
12899 value is based on the assumption that signed overflow is undefined,
12900 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12901 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12903 static bool
12904 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12906 enum tree_code code = TREE_CODE (t);
12907 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12908 return true;
12910 switch (code)
12912 case TARGET_EXPR:
12914 tree temp = TARGET_EXPR_SLOT (t);
12915 t = TARGET_EXPR_INITIAL (t);
12917 /* If the initializer is non-void, then it's a normal expression
12918 that will be assigned to the slot. */
12919 if (!VOID_TYPE_P (t))
12920 return RECURSE (t);
12922 /* Otherwise, the initializer sets the slot in some way. One common
12923 way is an assignment statement at the end of the initializer. */
12924 while (1)
12926 if (TREE_CODE (t) == BIND_EXPR)
12927 t = expr_last (BIND_EXPR_BODY (t));
12928 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12929 || TREE_CODE (t) == TRY_CATCH_EXPR)
12930 t = expr_last (TREE_OPERAND (t, 0));
12931 else if (TREE_CODE (t) == STATEMENT_LIST)
12932 t = expr_last (t);
12933 else
12934 break;
12936 if (TREE_CODE (t) == MODIFY_EXPR
12937 && TREE_OPERAND (t, 0) == temp)
12938 return RECURSE (TREE_OPERAND (t, 1));
12940 return false;
12943 case CALL_EXPR:
12945 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
12946 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
12948 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
12949 get_call_combined_fn (t),
12950 arg0,
12951 arg1,
12952 strict_overflow_p, depth);
12954 case COMPOUND_EXPR:
12955 case MODIFY_EXPR:
12956 return RECURSE (TREE_OPERAND (t, 1));
12958 case BIND_EXPR:
12959 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
12961 case SAVE_EXPR:
12962 return RECURSE (TREE_OPERAND (t, 0));
12964 default:
12965 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12969 #undef RECURSE
12970 #undef tree_expr_nonnegative_warnv_p
12972 /* Return true if T is known to be non-negative. If the return
12973 value is based on the assumption that signed overflow is undefined,
12974 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12975 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12977 bool
12978 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12980 enum tree_code code;
12981 if (t == error_mark_node)
12982 return false;
12984 code = TREE_CODE (t);
12985 switch (TREE_CODE_CLASS (code))
12987 case tcc_binary:
12988 case tcc_comparison:
12989 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12990 TREE_TYPE (t),
12991 TREE_OPERAND (t, 0),
12992 TREE_OPERAND (t, 1),
12993 strict_overflow_p, depth);
12995 case tcc_unary:
12996 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12997 TREE_TYPE (t),
12998 TREE_OPERAND (t, 0),
12999 strict_overflow_p, depth);
13001 case tcc_constant:
13002 case tcc_declaration:
13003 case tcc_reference:
13004 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13006 default:
13007 break;
13010 switch (code)
13012 case TRUTH_AND_EXPR:
13013 case TRUTH_OR_EXPR:
13014 case TRUTH_XOR_EXPR:
13015 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13016 TREE_TYPE (t),
13017 TREE_OPERAND (t, 0),
13018 TREE_OPERAND (t, 1),
13019 strict_overflow_p, depth);
13020 case TRUTH_NOT_EXPR:
13021 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13022 TREE_TYPE (t),
13023 TREE_OPERAND (t, 0),
13024 strict_overflow_p, depth);
13026 case COND_EXPR:
13027 case CONSTRUCTOR:
13028 case OBJ_TYPE_REF:
13029 case ASSERT_EXPR:
13030 case ADDR_EXPR:
13031 case WITH_SIZE_EXPR:
13032 case SSA_NAME:
13033 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13035 default:
13036 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13040 /* Return true if `t' is known to be non-negative. Handle warnings
13041 about undefined signed overflow. */
13043 bool
13044 tree_expr_nonnegative_p (tree t)
13046 bool ret, strict_overflow_p;
13048 strict_overflow_p = false;
13049 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13050 if (strict_overflow_p)
13051 fold_overflow_warning (("assuming signed overflow does not occur when "
13052 "determining that expression is always "
13053 "non-negative"),
13054 WARN_STRICT_OVERFLOW_MISC);
13055 return ret;
13059 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13060 For floating point we further ensure that T is not denormal.
13061 Similar logic is present in nonzero_address in rtlanal.h.
13063 If the return value is based on the assumption that signed overflow
13064 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13065 change *STRICT_OVERFLOW_P. */
13067 bool
13068 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13069 bool *strict_overflow_p)
13071 switch (code)
13073 case ABS_EXPR:
13074 return tree_expr_nonzero_warnv_p (op0,
13075 strict_overflow_p);
13077 case NOP_EXPR:
13079 tree inner_type = TREE_TYPE (op0);
13080 tree outer_type = type;
13082 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13083 && tree_expr_nonzero_warnv_p (op0,
13084 strict_overflow_p));
13086 break;
13088 case NON_LVALUE_EXPR:
13089 return tree_expr_nonzero_warnv_p (op0,
13090 strict_overflow_p);
13092 default:
13093 break;
13096 return false;
13099 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13100 For floating point we further ensure that T is not denormal.
13101 Similar logic is present in nonzero_address in rtlanal.h.
13103 If the return value is based on the assumption that signed overflow
13104 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13105 change *STRICT_OVERFLOW_P. */
13107 bool
13108 tree_binary_nonzero_warnv_p (enum tree_code code,
13109 tree type,
13110 tree op0,
13111 tree op1, bool *strict_overflow_p)
13113 bool sub_strict_overflow_p;
13114 switch (code)
13116 case POINTER_PLUS_EXPR:
13117 case PLUS_EXPR:
13118 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13120 /* With the presence of negative values it is hard
13121 to say something. */
13122 sub_strict_overflow_p = false;
13123 if (!tree_expr_nonnegative_warnv_p (op0,
13124 &sub_strict_overflow_p)
13125 || !tree_expr_nonnegative_warnv_p (op1,
13126 &sub_strict_overflow_p))
13127 return false;
13128 /* One of operands must be positive and the other non-negative. */
13129 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13130 overflows, on a twos-complement machine the sum of two
13131 nonnegative numbers can never be zero. */
13132 return (tree_expr_nonzero_warnv_p (op0,
13133 strict_overflow_p)
13134 || tree_expr_nonzero_warnv_p (op1,
13135 strict_overflow_p));
13137 break;
13139 case MULT_EXPR:
13140 if (TYPE_OVERFLOW_UNDEFINED (type))
13142 if (tree_expr_nonzero_warnv_p (op0,
13143 strict_overflow_p)
13144 && tree_expr_nonzero_warnv_p (op1,
13145 strict_overflow_p))
13147 *strict_overflow_p = true;
13148 return true;
13151 break;
13153 case MIN_EXPR:
13154 sub_strict_overflow_p = false;
13155 if (tree_expr_nonzero_warnv_p (op0,
13156 &sub_strict_overflow_p)
13157 && tree_expr_nonzero_warnv_p (op1,
13158 &sub_strict_overflow_p))
13160 if (sub_strict_overflow_p)
13161 *strict_overflow_p = true;
13163 break;
13165 case MAX_EXPR:
13166 sub_strict_overflow_p = false;
13167 if (tree_expr_nonzero_warnv_p (op0,
13168 &sub_strict_overflow_p))
13170 if (sub_strict_overflow_p)
13171 *strict_overflow_p = true;
13173 /* When both operands are nonzero, then MAX must be too. */
13174 if (tree_expr_nonzero_warnv_p (op1,
13175 strict_overflow_p))
13176 return true;
13178 /* MAX where operand 0 is positive is positive. */
13179 return tree_expr_nonnegative_warnv_p (op0,
13180 strict_overflow_p);
13182 /* MAX where operand 1 is positive is positive. */
13183 else if (tree_expr_nonzero_warnv_p (op1,
13184 &sub_strict_overflow_p)
13185 && tree_expr_nonnegative_warnv_p (op1,
13186 &sub_strict_overflow_p))
13188 if (sub_strict_overflow_p)
13189 *strict_overflow_p = true;
13190 return true;
13192 break;
13194 case BIT_IOR_EXPR:
13195 return (tree_expr_nonzero_warnv_p (op1,
13196 strict_overflow_p)
13197 || tree_expr_nonzero_warnv_p (op0,
13198 strict_overflow_p));
13200 default:
13201 break;
13204 return false;
13207 /* Return true when T is an address and is known to be nonzero.
13208 For floating point we further ensure that T is not denormal.
13209 Similar logic is present in nonzero_address in rtlanal.h.
13211 If the return value is based on the assumption that signed overflow
13212 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13213 change *STRICT_OVERFLOW_P. */
13215 bool
13216 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13218 bool sub_strict_overflow_p;
13219 switch (TREE_CODE (t))
13221 case INTEGER_CST:
13222 return !integer_zerop (t);
13224 case ADDR_EXPR:
13226 tree base = TREE_OPERAND (t, 0);
13228 if (!DECL_P (base))
13229 base = get_base_address (base);
13231 if (base && TREE_CODE (base) == TARGET_EXPR)
13232 base = TARGET_EXPR_SLOT (base);
13234 if (!base)
13235 return false;
13237 /* For objects in symbol table check if we know they are non-zero.
13238 Don't do anything for variables and functions before symtab is built;
13239 it is quite possible that they will be declared weak later. */
13240 int nonzero_addr = maybe_nonzero_address (base);
13241 if (nonzero_addr >= 0)
13242 return nonzero_addr;
13244 /* Constants are never weak. */
13245 if (CONSTANT_CLASS_P (base))
13246 return true;
13248 return false;
13251 case COND_EXPR:
13252 sub_strict_overflow_p = false;
13253 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13254 &sub_strict_overflow_p)
13255 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13256 &sub_strict_overflow_p))
13258 if (sub_strict_overflow_p)
13259 *strict_overflow_p = true;
13260 return true;
13262 break;
13264 case SSA_NAME:
13265 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13266 break;
13267 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13269 default:
13270 break;
13272 return false;
13275 #define integer_valued_real_p(X) \
13276 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13278 #define RECURSE(X) \
13279 ((integer_valued_real_p) (X, depth + 1))
13281 /* Return true if the floating point result of (CODE OP0) has an
13282 integer value. We also allow +Inf, -Inf and NaN to be considered
13283 integer values. Return false for signaling NaN.
13285 DEPTH is the current nesting depth of the query. */
13287 bool
13288 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13290 switch (code)
13292 case FLOAT_EXPR:
13293 return true;
13295 case ABS_EXPR:
13296 return RECURSE (op0);
13298 CASE_CONVERT:
13300 tree type = TREE_TYPE (op0);
13301 if (TREE_CODE (type) == INTEGER_TYPE)
13302 return true;
13303 if (TREE_CODE (type) == REAL_TYPE)
13304 return RECURSE (op0);
13305 break;
13308 default:
13309 break;
13311 return false;
13314 /* Return true if the floating point result of (CODE OP0 OP1) has an
13315 integer value. We also allow +Inf, -Inf and NaN to be considered
13316 integer values. Return false for signaling NaN.
13318 DEPTH is the current nesting depth of the query. */
13320 bool
13321 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13323 switch (code)
13325 case PLUS_EXPR:
13326 case MINUS_EXPR:
13327 case MULT_EXPR:
13328 case MIN_EXPR:
13329 case MAX_EXPR:
13330 return RECURSE (op0) && RECURSE (op1);
13332 default:
13333 break;
13335 return false;
13338 /* Return true if the floating point result of calling FNDECL with arguments
13339 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13340 considered integer values. Return false for signaling NaN. If FNDECL
13341 takes fewer than 2 arguments, the remaining ARGn are null.
13343 DEPTH is the current nesting depth of the query. */
13345 bool
13346 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13348 switch (fn)
13350 CASE_CFN_CEIL:
13351 CASE_CFN_FLOOR:
13352 CASE_CFN_NEARBYINT:
13353 CASE_CFN_RINT:
13354 CASE_CFN_ROUND:
13355 CASE_CFN_TRUNC:
13356 return true;
13358 CASE_CFN_FMIN:
13359 CASE_CFN_FMIN_FN:
13360 CASE_CFN_FMAX:
13361 CASE_CFN_FMAX_FN:
13362 return RECURSE (arg0) && RECURSE (arg1);
13364 default:
13365 break;
13367 return false;
13370 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13371 has an integer value. We also allow +Inf, -Inf and NaN to be
13372 considered integer values. Return false for signaling NaN.
13374 DEPTH is the current nesting depth of the query. */
13376 bool
13377 integer_valued_real_single_p (tree t, int depth)
13379 switch (TREE_CODE (t))
13381 case REAL_CST:
13382 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13384 case COND_EXPR:
13385 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13387 case SSA_NAME:
13388 /* Limit the depth of recursion to avoid quadratic behavior.
13389 This is expected to catch almost all occurrences in practice.
13390 If this code misses important cases that unbounded recursion
13391 would not, passes that need this information could be revised
13392 to provide it through dataflow propagation. */
13393 return (!name_registered_for_update_p (t)
13394 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13395 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13396 depth));
13398 default:
13399 break;
13401 return false;
13404 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13405 has an integer value. We also allow +Inf, -Inf and NaN to be
13406 considered integer values. Return false for signaling NaN.
13408 DEPTH is the current nesting depth of the query. */
13410 static bool
13411 integer_valued_real_invalid_p (tree t, int depth)
13413 switch (TREE_CODE (t))
13415 case COMPOUND_EXPR:
13416 case MODIFY_EXPR:
13417 case BIND_EXPR:
13418 return RECURSE (TREE_OPERAND (t, 1));
13420 case SAVE_EXPR:
13421 return RECURSE (TREE_OPERAND (t, 0));
13423 default:
13424 break;
13426 return false;
13429 #undef RECURSE
13430 #undef integer_valued_real_p
13432 /* Return true if the floating point expression T has an integer value.
13433 We also allow +Inf, -Inf and NaN to be considered integer values.
13434 Return false for signaling NaN.
13436 DEPTH is the current nesting depth of the query. */
13438 bool
13439 integer_valued_real_p (tree t, int depth)
13441 if (t == error_mark_node)
13442 return false;
13444 tree_code code = TREE_CODE (t);
13445 switch (TREE_CODE_CLASS (code))
13447 case tcc_binary:
13448 case tcc_comparison:
13449 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13450 TREE_OPERAND (t, 1), depth);
13452 case tcc_unary:
13453 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13455 case tcc_constant:
13456 case tcc_declaration:
13457 case tcc_reference:
13458 return integer_valued_real_single_p (t, depth);
13460 default:
13461 break;
13464 switch (code)
13466 case COND_EXPR:
13467 case SSA_NAME:
13468 return integer_valued_real_single_p (t, depth);
13470 case CALL_EXPR:
13472 tree arg0 = (call_expr_nargs (t) > 0
13473 ? CALL_EXPR_ARG (t, 0)
13474 : NULL_TREE);
13475 tree arg1 = (call_expr_nargs (t) > 1
13476 ? CALL_EXPR_ARG (t, 1)
13477 : NULL_TREE);
13478 return integer_valued_real_call_p (get_call_combined_fn (t),
13479 arg0, arg1, depth);
13482 default:
13483 return integer_valued_real_invalid_p (t, depth);
13487 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13488 attempt to fold the expression to a constant without modifying TYPE,
13489 OP0 or OP1.
13491 If the expression could be simplified to a constant, then return
13492 the constant. If the expression would not be simplified to a
13493 constant, then return NULL_TREE. */
13495 tree
13496 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13498 tree tem = fold_binary (code, type, op0, op1);
13499 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13502 /* Given the components of a unary expression CODE, TYPE and OP0,
13503 attempt to fold the expression to a constant without modifying
13504 TYPE or OP0.
13506 If the expression could be simplified to a constant, then return
13507 the constant. If the expression would not be simplified to a
13508 constant, then return NULL_TREE. */
13510 tree
13511 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13513 tree tem = fold_unary (code, type, op0);
13514 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13517 /* If EXP represents referencing an element in a constant string
13518 (either via pointer arithmetic or array indexing), return the
13519 tree representing the value accessed, otherwise return NULL. */
13521 tree
13522 fold_read_from_constant_string (tree exp)
13524 if ((TREE_CODE (exp) == INDIRECT_REF
13525 || TREE_CODE (exp) == ARRAY_REF)
13526 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13528 tree exp1 = TREE_OPERAND (exp, 0);
13529 tree index;
13530 tree string;
13531 location_t loc = EXPR_LOCATION (exp);
13533 if (TREE_CODE (exp) == INDIRECT_REF)
13534 string = string_constant (exp1, &index);
13535 else
13537 tree low_bound = array_ref_low_bound (exp);
13538 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13540 /* Optimize the special-case of a zero lower bound.
13542 We convert the low_bound to sizetype to avoid some problems
13543 with constant folding. (E.g. suppose the lower bound is 1,
13544 and its mode is QI. Without the conversion,l (ARRAY
13545 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13546 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13547 if (! integer_zerop (low_bound))
13548 index = size_diffop_loc (loc, index,
13549 fold_convert_loc (loc, sizetype, low_bound));
13551 string = exp1;
13554 scalar_int_mode char_mode;
13555 if (string
13556 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13557 && TREE_CODE (string) == STRING_CST
13558 && TREE_CODE (index) == INTEGER_CST
13559 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13560 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13561 &char_mode)
13562 && GET_MODE_SIZE (char_mode) == 1)
13563 return build_int_cst_type (TREE_TYPE (exp),
13564 (TREE_STRING_POINTER (string)
13565 [TREE_INT_CST_LOW (index)]));
13567 return NULL;
13570 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13571 an integer constant, real, or fixed-point constant.
13573 TYPE is the type of the result. */
13575 static tree
13576 fold_negate_const (tree arg0, tree type)
13578 tree t = NULL_TREE;
13580 switch (TREE_CODE (arg0))
13582 case INTEGER_CST:
13584 bool overflow;
13585 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13586 t = force_fit_type (type, val, 1,
13587 (overflow && ! TYPE_UNSIGNED (type))
13588 || TREE_OVERFLOW (arg0));
13589 break;
13592 case REAL_CST:
13593 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13594 break;
13596 case FIXED_CST:
13598 FIXED_VALUE_TYPE f;
13599 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13600 &(TREE_FIXED_CST (arg0)), NULL,
13601 TYPE_SATURATING (type));
13602 t = build_fixed (type, f);
13603 /* Propagate overflow flags. */
13604 if (overflow_p | TREE_OVERFLOW (arg0))
13605 TREE_OVERFLOW (t) = 1;
13606 break;
13609 default:
13610 gcc_unreachable ();
13613 return t;
13616 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13617 an integer constant or real constant.
13619 TYPE is the type of the result. */
13621 tree
13622 fold_abs_const (tree arg0, tree type)
13624 tree t = NULL_TREE;
13626 switch (TREE_CODE (arg0))
13628 case INTEGER_CST:
13630 /* If the value is unsigned or non-negative, then the absolute value
13631 is the same as the ordinary value. */
13632 if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13633 t = arg0;
13635 /* If the value is negative, then the absolute value is
13636 its negation. */
13637 else
13639 bool overflow;
13640 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13641 t = force_fit_type (type, val, -1,
13642 overflow | TREE_OVERFLOW (arg0));
13645 break;
13647 case REAL_CST:
13648 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13649 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13650 else
13651 t = arg0;
13652 break;
13654 default:
13655 gcc_unreachable ();
13658 return t;
13661 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13662 constant. TYPE is the type of the result. */
13664 static tree
13665 fold_not_const (const_tree arg0, tree type)
13667 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13669 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13672 /* Given CODE, a relational operator, the target type, TYPE and two
13673 constant operands OP0 and OP1, return the result of the
13674 relational operation. If the result is not a compile time
13675 constant, then return NULL_TREE. */
13677 static tree
13678 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13680 int result, invert;
13682 /* From here on, the only cases we handle are when the result is
13683 known to be a constant. */
13685 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13687 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13688 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13690 /* Handle the cases where either operand is a NaN. */
13691 if (real_isnan (c0) || real_isnan (c1))
13693 switch (code)
13695 case EQ_EXPR:
13696 case ORDERED_EXPR:
13697 result = 0;
13698 break;
13700 case NE_EXPR:
13701 case UNORDERED_EXPR:
13702 case UNLT_EXPR:
13703 case UNLE_EXPR:
13704 case UNGT_EXPR:
13705 case UNGE_EXPR:
13706 case UNEQ_EXPR:
13707 result = 1;
13708 break;
13710 case LT_EXPR:
13711 case LE_EXPR:
13712 case GT_EXPR:
13713 case GE_EXPR:
13714 case LTGT_EXPR:
13715 if (flag_trapping_math)
13716 return NULL_TREE;
13717 result = 0;
13718 break;
13720 default:
13721 gcc_unreachable ();
13724 return constant_boolean_node (result, type);
13727 return constant_boolean_node (real_compare (code, c0, c1), type);
13730 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13732 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13733 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13734 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13737 /* Handle equality/inequality of complex constants. */
13738 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13740 tree rcond = fold_relational_const (code, type,
13741 TREE_REALPART (op0),
13742 TREE_REALPART (op1));
13743 tree icond = fold_relational_const (code, type,
13744 TREE_IMAGPART (op0),
13745 TREE_IMAGPART (op1));
13746 if (code == EQ_EXPR)
13747 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13748 else if (code == NE_EXPR)
13749 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13750 else
13751 return NULL_TREE;
13754 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13756 if (!VECTOR_TYPE_P (type))
13758 /* Have vector comparison with scalar boolean result. */
13759 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13760 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13761 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13763 tree elem0 = VECTOR_CST_ELT (op0, i);
13764 tree elem1 = VECTOR_CST_ELT (op1, i);
13765 tree tmp = fold_relational_const (code, type, elem0, elem1);
13766 if (tmp == NULL_TREE)
13767 return NULL_TREE;
13768 if (integer_zerop (tmp))
13769 return constant_boolean_node (false, type);
13771 return constant_boolean_node (true, type);
13773 unsigned count = VECTOR_CST_NELTS (op0);
13774 gcc_assert (VECTOR_CST_NELTS (op1) == count
13775 && TYPE_VECTOR_SUBPARTS (type) == count);
13777 auto_vec<tree, 32> elts (count);
13778 for (unsigned i = 0; i < count; i++)
13780 tree elem_type = TREE_TYPE (type);
13781 tree elem0 = VECTOR_CST_ELT (op0, i);
13782 tree elem1 = VECTOR_CST_ELT (op1, i);
13784 tree tem = fold_relational_const (code, elem_type,
13785 elem0, elem1);
13787 if (tem == NULL_TREE)
13788 return NULL_TREE;
13790 elts.quick_push (build_int_cst (elem_type,
13791 integer_zerop (tem) ? 0 : -1));
13794 return build_vector (type, elts);
13797 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13799 To compute GT, swap the arguments and do LT.
13800 To compute GE, do LT and invert the result.
13801 To compute LE, swap the arguments, do LT and invert the result.
13802 To compute NE, do EQ and invert the result.
13804 Therefore, the code below must handle only EQ and LT. */
13806 if (code == LE_EXPR || code == GT_EXPR)
13808 std::swap (op0, op1);
13809 code = swap_tree_comparison (code);
13812 /* Note that it is safe to invert for real values here because we
13813 have already handled the one case that it matters. */
13815 invert = 0;
13816 if (code == NE_EXPR || code == GE_EXPR)
13818 invert = 1;
13819 code = invert_tree_comparison (code, false);
13822 /* Compute a result for LT or EQ if args permit;
13823 Otherwise return T. */
13824 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13826 if (code == EQ_EXPR)
13827 result = tree_int_cst_equal (op0, op1);
13828 else
13829 result = tree_int_cst_lt (op0, op1);
13831 else
13832 return NULL_TREE;
13834 if (invert)
13835 result ^= 1;
13836 return constant_boolean_node (result, type);
13839 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13840 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13841 itself. */
13843 tree
13844 fold_build_cleanup_point_expr (tree type, tree expr)
13846 /* If the expression does not have side effects then we don't have to wrap
13847 it with a cleanup point expression. */
13848 if (!TREE_SIDE_EFFECTS (expr))
13849 return expr;
13851 /* If the expression is a return, check to see if the expression inside the
13852 return has no side effects or the right hand side of the modify expression
13853 inside the return. If either don't have side effects set we don't need to
13854 wrap the expression in a cleanup point expression. Note we don't check the
13855 left hand side of the modify because it should always be a return decl. */
13856 if (TREE_CODE (expr) == RETURN_EXPR)
13858 tree op = TREE_OPERAND (expr, 0);
13859 if (!op || !TREE_SIDE_EFFECTS (op))
13860 return expr;
13861 op = TREE_OPERAND (op, 1);
13862 if (!TREE_SIDE_EFFECTS (op))
13863 return expr;
13866 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13869 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13870 of an indirection through OP0, or NULL_TREE if no simplification is
13871 possible. */
13873 tree
13874 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13876 tree sub = op0;
13877 tree subtype;
13879 STRIP_NOPS (sub);
13880 subtype = TREE_TYPE (sub);
13881 if (!POINTER_TYPE_P (subtype)
13882 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
13883 return NULL_TREE;
13885 if (TREE_CODE (sub) == ADDR_EXPR)
13887 tree op = TREE_OPERAND (sub, 0);
13888 tree optype = TREE_TYPE (op);
13889 /* *&CONST_DECL -> to the value of the const decl. */
13890 if (TREE_CODE (op) == CONST_DECL)
13891 return DECL_INITIAL (op);
13892 /* *&p => p; make sure to handle *&"str"[cst] here. */
13893 if (type == optype)
13895 tree fop = fold_read_from_constant_string (op);
13896 if (fop)
13897 return fop;
13898 else
13899 return op;
13901 /* *(foo *)&fooarray => fooarray[0] */
13902 else if (TREE_CODE (optype) == ARRAY_TYPE
13903 && type == TREE_TYPE (optype)
13904 && (!in_gimple_form
13905 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13907 tree type_domain = TYPE_DOMAIN (optype);
13908 tree min_val = size_zero_node;
13909 if (type_domain && TYPE_MIN_VALUE (type_domain))
13910 min_val = TYPE_MIN_VALUE (type_domain);
13911 if (in_gimple_form
13912 && TREE_CODE (min_val) != INTEGER_CST)
13913 return NULL_TREE;
13914 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13915 NULL_TREE, NULL_TREE);
13917 /* *(foo *)&complexfoo => __real__ complexfoo */
13918 else if (TREE_CODE (optype) == COMPLEX_TYPE
13919 && type == TREE_TYPE (optype))
13920 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13921 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13922 else if (TREE_CODE (optype) == VECTOR_TYPE
13923 && type == TREE_TYPE (optype))
13925 tree part_width = TYPE_SIZE (type);
13926 tree index = bitsize_int (0);
13927 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13931 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13932 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13934 tree op00 = TREE_OPERAND (sub, 0);
13935 tree op01 = TREE_OPERAND (sub, 1);
13937 STRIP_NOPS (op00);
13938 if (TREE_CODE (op00) == ADDR_EXPR)
13940 tree op00type;
13941 op00 = TREE_OPERAND (op00, 0);
13942 op00type = TREE_TYPE (op00);
13944 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13945 if (TREE_CODE (op00type) == VECTOR_TYPE
13946 && type == TREE_TYPE (op00type))
13948 tree part_width = TYPE_SIZE (type);
13949 unsigned HOST_WIDE_INT max_offset
13950 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
13951 * TYPE_VECTOR_SUBPARTS (op00type));
13952 if (tree_int_cst_sign_bit (op01) == 0
13953 && compare_tree_int (op01, max_offset) == -1)
13955 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
13956 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13957 tree index = bitsize_int (indexi);
13958 return fold_build3_loc (loc,
13959 BIT_FIELD_REF, type, op00,
13960 part_width, index);
13963 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13964 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13965 && type == TREE_TYPE (op00type))
13967 tree size = TYPE_SIZE_UNIT (type);
13968 if (tree_int_cst_equal (size, op01))
13969 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13971 /* ((foo *)&fooarray)[1] => fooarray[1] */
13972 else if (TREE_CODE (op00type) == ARRAY_TYPE
13973 && type == TREE_TYPE (op00type))
13975 tree type_domain = TYPE_DOMAIN (op00type);
13976 tree min = size_zero_node;
13977 if (type_domain && TYPE_MIN_VALUE (type_domain))
13978 min = TYPE_MIN_VALUE (type_domain);
13979 offset_int off = wi::to_offset (op01);
13980 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
13981 offset_int remainder;
13982 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
13983 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
13985 off = off + wi::to_offset (min);
13986 op01 = wide_int_to_tree (sizetype, off);
13987 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13988 NULL_TREE, NULL_TREE);
13994 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13995 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13996 && type == TREE_TYPE (TREE_TYPE (subtype))
13997 && (!in_gimple_form
13998 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14000 tree type_domain;
14001 tree min_val = size_zero_node;
14002 sub = build_fold_indirect_ref_loc (loc, sub);
14003 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14004 if (type_domain && TYPE_MIN_VALUE (type_domain))
14005 min_val = TYPE_MIN_VALUE (type_domain);
14006 if (in_gimple_form
14007 && TREE_CODE (min_val) != INTEGER_CST)
14008 return NULL_TREE;
14009 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14010 NULL_TREE);
14013 return NULL_TREE;
14016 /* Builds an expression for an indirection through T, simplifying some
14017 cases. */
14019 tree
14020 build_fold_indirect_ref_loc (location_t loc, tree t)
14022 tree type = TREE_TYPE (TREE_TYPE (t));
14023 tree sub = fold_indirect_ref_1 (loc, type, t);
14025 if (sub)
14026 return sub;
14028 return build1_loc (loc, INDIRECT_REF, type, t);
14031 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14033 tree
14034 fold_indirect_ref_loc (location_t loc, tree t)
14036 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14038 if (sub)
14039 return sub;
14040 else
14041 return t;
14044 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14045 whose result is ignored. The type of the returned tree need not be
14046 the same as the original expression. */
14048 tree
14049 fold_ignored_result (tree t)
14051 if (!TREE_SIDE_EFFECTS (t))
14052 return integer_zero_node;
14054 for (;;)
14055 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14057 case tcc_unary:
14058 t = TREE_OPERAND (t, 0);
14059 break;
14061 case tcc_binary:
14062 case tcc_comparison:
14063 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14064 t = TREE_OPERAND (t, 0);
14065 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14066 t = TREE_OPERAND (t, 1);
14067 else
14068 return t;
14069 break;
14071 case tcc_expression:
14072 switch (TREE_CODE (t))
14074 case COMPOUND_EXPR:
14075 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14076 return t;
14077 t = TREE_OPERAND (t, 0);
14078 break;
14080 case COND_EXPR:
14081 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14082 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14083 return t;
14084 t = TREE_OPERAND (t, 0);
14085 break;
14087 default:
14088 return t;
14090 break;
14092 default:
14093 return t;
14097 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14099 tree
14100 round_up_loc (location_t loc, tree value, unsigned int divisor)
14102 tree div = NULL_TREE;
14104 if (divisor == 1)
14105 return value;
14107 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14108 have to do anything. Only do this when we are not given a const,
14109 because in that case, this check is more expensive than just
14110 doing it. */
14111 if (TREE_CODE (value) != INTEGER_CST)
14113 div = build_int_cst (TREE_TYPE (value), divisor);
14115 if (multiple_of_p (TREE_TYPE (value), value, div))
14116 return value;
14119 /* If divisor is a power of two, simplify this to bit manipulation. */
14120 if (pow2_or_zerop (divisor))
14122 if (TREE_CODE (value) == INTEGER_CST)
14124 wide_int val = wi::to_wide (value);
14125 bool overflow_p;
14127 if ((val & (divisor - 1)) == 0)
14128 return value;
14130 overflow_p = TREE_OVERFLOW (value);
14131 val += divisor - 1;
14132 val &= (int) -divisor;
14133 if (val == 0)
14134 overflow_p = true;
14136 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14138 else
14140 tree t;
14142 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14143 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14144 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14145 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14148 else
14150 if (!div)
14151 div = build_int_cst (TREE_TYPE (value), divisor);
14152 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14153 value = size_binop_loc (loc, MULT_EXPR, value, div);
14156 return value;
14159 /* Likewise, but round down. */
14161 tree
14162 round_down_loc (location_t loc, tree value, int divisor)
14164 tree div = NULL_TREE;
14166 gcc_assert (divisor > 0);
14167 if (divisor == 1)
14168 return value;
14170 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14171 have to do anything. Only do this when we are not given a const,
14172 because in that case, this check is more expensive than just
14173 doing it. */
14174 if (TREE_CODE (value) != INTEGER_CST)
14176 div = build_int_cst (TREE_TYPE (value), divisor);
14178 if (multiple_of_p (TREE_TYPE (value), value, div))
14179 return value;
14182 /* If divisor is a power of two, simplify this to bit manipulation. */
14183 if (pow2_or_zerop (divisor))
14185 tree t;
14187 t = build_int_cst (TREE_TYPE (value), -divisor);
14188 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14190 else
14192 if (!div)
14193 div = build_int_cst (TREE_TYPE (value), divisor);
14194 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14195 value = size_binop_loc (loc, MULT_EXPR, value, div);
14198 return value;
14201 /* Returns the pointer to the base of the object addressed by EXP and
14202 extracts the information about the offset of the access, storing it
14203 to PBITPOS and POFFSET. */
14205 static tree
14206 split_address_to_core_and_offset (tree exp,
14207 HOST_WIDE_INT *pbitpos, tree *poffset)
14209 tree core;
14210 machine_mode mode;
14211 int unsignedp, reversep, volatilep;
14212 HOST_WIDE_INT bitsize;
14213 location_t loc = EXPR_LOCATION (exp);
14215 if (TREE_CODE (exp) == ADDR_EXPR)
14217 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14218 poffset, &mode, &unsignedp, &reversep,
14219 &volatilep);
14220 core = build_fold_addr_expr_loc (loc, core);
14222 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14224 core = TREE_OPERAND (exp, 0);
14225 STRIP_NOPS (core);
14226 *pbitpos = 0;
14227 *poffset = TREE_OPERAND (exp, 1);
14228 if (TREE_CODE (*poffset) == INTEGER_CST)
14230 offset_int tem = wi::sext (wi::to_offset (*poffset),
14231 TYPE_PRECISION (TREE_TYPE (*poffset)));
14232 tem <<= LOG2_BITS_PER_UNIT;
14233 if (wi::fits_shwi_p (tem))
14235 *pbitpos = tem.to_shwi ();
14236 *poffset = NULL_TREE;
14240 else
14242 core = exp;
14243 *pbitpos = 0;
14244 *poffset = NULL_TREE;
14247 return core;
14250 /* Returns true if addresses of E1 and E2 differ by a constant, false
14251 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14253 bool
14254 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14256 tree core1, core2;
14257 HOST_WIDE_INT bitpos1, bitpos2;
14258 tree toffset1, toffset2, tdiff, type;
14260 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14261 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14263 if (bitpos1 % BITS_PER_UNIT != 0
14264 || bitpos2 % BITS_PER_UNIT != 0
14265 || !operand_equal_p (core1, core2, 0))
14266 return false;
14268 if (toffset1 && toffset2)
14270 type = TREE_TYPE (toffset1);
14271 if (type != TREE_TYPE (toffset2))
14272 toffset2 = fold_convert (type, toffset2);
14274 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14275 if (!cst_and_fits_in_hwi (tdiff))
14276 return false;
14278 *diff = int_cst_value (tdiff);
14280 else if (toffset1 || toffset2)
14282 /* If only one of the offsets is non-constant, the difference cannot
14283 be a constant. */
14284 return false;
14286 else
14287 *diff = 0;
14289 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14290 return true;
14293 /* Return OFF converted to a pointer offset type suitable as offset for
14294 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14295 tree
14296 convert_to_ptrofftype_loc (location_t loc, tree off)
14298 return fold_convert_loc (loc, sizetype, off);
14301 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14302 tree
14303 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14305 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14306 ptr, convert_to_ptrofftype_loc (loc, off));
14309 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14310 tree
14311 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14313 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14314 ptr, size_int (off));
14317 /* Return a char pointer for a C string if it is a string constant
14318 or sum of string constant and integer constant. We only support
14319 string constants properly terminated with '\0' character.
14320 If STRLEN is a valid pointer, length (including terminating character)
14321 of returned string is stored to the argument. */
14323 const char *
14324 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14326 tree offset_node;
14328 if (strlen)
14329 *strlen = 0;
14331 src = string_constant (src, &offset_node);
14332 if (src == 0)
14333 return NULL;
14335 unsigned HOST_WIDE_INT offset = 0;
14336 if (offset_node != NULL_TREE)
14338 if (!tree_fits_uhwi_p (offset_node))
14339 return NULL;
14340 else
14341 offset = tree_to_uhwi (offset_node);
14344 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14345 const char *string = TREE_STRING_POINTER (src);
14347 /* Support only properly null-terminated strings. */
14348 if (string_length == 0
14349 || string[string_length - 1] != '\0'
14350 || offset >= string_length)
14351 return NULL;
14353 if (strlen)
14354 *strlen = string_length - offset;
14355 return string + offset;
14358 #if CHECKING_P
14360 namespace selftest {
14362 /* Helper functions for writing tests of folding trees. */
14364 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14366 static void
14367 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14368 tree constant)
14370 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14373 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14374 wrapping WRAPPED_EXPR. */
14376 static void
14377 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14378 tree wrapped_expr)
14380 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14381 ASSERT_NE (wrapped_expr, result);
14382 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14383 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14386 /* Verify that various arithmetic binary operations are folded
14387 correctly. */
14389 static void
14390 test_arithmetic_folding ()
14392 tree type = integer_type_node;
14393 tree x = create_tmp_var_raw (type, "x");
14394 tree zero = build_zero_cst (type);
14395 tree one = build_int_cst (type, 1);
14397 /* Addition. */
14398 /* 1 <-- (0 + 1) */
14399 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14400 one);
14401 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14402 one);
14404 /* (nonlvalue)x <-- (x + 0) */
14405 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14408 /* Subtraction. */
14409 /* 0 <-- (x - x) */
14410 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14411 zero);
14412 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14415 /* Multiplication. */
14416 /* 0 <-- (x * 0) */
14417 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14418 zero);
14420 /* (nonlvalue)x <-- (x * 1) */
14421 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14425 /* Verify that various binary operations on vectors are folded
14426 correctly. */
14428 static void
14429 test_vector_folding ()
14431 tree inner_type = integer_type_node;
14432 tree type = build_vector_type (inner_type, 4);
14433 tree zero = build_zero_cst (type);
14434 tree one = build_one_cst (type);
14436 /* Verify equality tests that return a scalar boolean result. */
14437 tree res_type = boolean_type_node;
14438 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14439 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14440 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14441 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14444 /* Run all of the selftests within this file. */
14446 void
14447 fold_const_c_tests ()
14449 test_arithmetic_folding ();
14450 test_vector_folding ();
14453 } // namespace selftest
14455 #endif /* CHECKING_P */