debug/dwarf: support 64-bit DWARF in byte order check
[official-gcc.git] / gcc / fold-const.c
blobc16959b84ace943d5be3ce18cca7df8f98f2e347
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
85 /* Nonzero if we are folding constants inside an initializer; zero
86 otherwise. */
87 int folding_initializer = 0;
89 /* The following constants represent a bit based encoding of GCC's
90 comparison operators. This encoding simplifies transformations
91 on relational comparison operators, such as AND and OR. */
92 enum comparison_code {
93 COMPCODE_FALSE = 0,
94 COMPCODE_LT = 1,
95 COMPCODE_EQ = 2,
96 COMPCODE_LE = 3,
97 COMPCODE_GT = 4,
98 COMPCODE_LTGT = 5,
99 COMPCODE_GE = 6,
100 COMPCODE_ORD = 7,
101 COMPCODE_UNORD = 8,
102 COMPCODE_UNLT = 9,
103 COMPCODE_UNEQ = 10,
104 COMPCODE_UNLE = 11,
105 COMPCODE_UNGT = 12,
106 COMPCODE_NE = 13,
107 COMPCODE_UNGE = 14,
108 COMPCODE_TRUE = 15
111 static bool negate_expr_p (tree);
112 static tree negate_expr (tree);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int twoval_comparison_p (tree, tree *, tree *, int *);
117 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
118 static tree optimize_bit_field_compare (location_t, enum tree_code,
119 tree, tree, tree);
120 static int simple_operand_p (const_tree);
121 static bool simple_operand_p_2 (tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static tree fold_binary_op_with_conditional_arg (location_t,
131 enum tree_code, tree,
132 tree, tree,
133 tree, tree, int);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138 static tree fold_view_convert_expr (tree, tree);
139 static tree fold_negate_expr (location_t, tree);
142 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
143 Otherwise, return LOC. */
145 static location_t
146 expr_location_or (tree t, location_t loc)
148 location_t tloc = EXPR_LOCATION (t);
149 return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 /* Similar to protected_set_expr_location, but never modify x in place,
153 if location can and needs to be set, unshare it. */
155 static inline tree
156 protected_set_expr_location_unshare (tree x, location_t loc)
158 if (CAN_HAVE_LOCATION_P (x)
159 && EXPR_LOCATION (x) != loc
160 && !(TREE_CODE (x) == SAVE_EXPR
161 || TREE_CODE (x) == TARGET_EXPR
162 || TREE_CODE (x) == BIND_EXPR))
164 x = copy_node (x);
165 SET_EXPR_LOCATION (x, loc);
167 return x;
170 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
171 division and returns the quotient. Otherwise returns
172 NULL_TREE. */
174 tree
175 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 widest_int quo;
179 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
180 SIGNED, &quo))
181 return wide_int_to_tree (TREE_TYPE (arg1), quo);
183 return NULL_TREE;
186 /* This is nonzero if we should defer warnings about undefined
187 overflow. This facility exists because these warnings are a
188 special case. The code to estimate loop iterations does not want
189 to issue any warnings, since it works with expressions which do not
190 occur in user code. Various bits of cleanup code call fold(), but
191 only use the result if it has certain characteristics (e.g., is a
192 constant); that code only wants to issue a warning if the result is
193 used. */
195 static int fold_deferring_overflow_warnings;
197 /* If a warning about undefined overflow is deferred, this is the
198 warning. Note that this may cause us to turn two warnings into
199 one, but that is fine since it is sufficient to only give one
200 warning per expression. */
202 static const char* fold_deferred_overflow_warning;
204 /* If a warning about undefined overflow is deferred, this is the
205 level at which the warning should be emitted. */
207 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209 /* Start deferring overflow warnings. We could use a stack here to
210 permit nested calls, but at present it is not necessary. */
212 void
213 fold_defer_overflow_warnings (void)
215 ++fold_deferring_overflow_warnings;
218 /* Stop deferring overflow warnings. If there is a pending warning,
219 and ISSUE is true, then issue the warning if appropriate. STMT is
220 the statement with which the warning should be associated (used for
221 location information); STMT may be NULL. CODE is the level of the
222 warning--a warn_strict_overflow_code value. This function will use
223 the smaller of CODE and the deferred code when deciding whether to
224 issue the warning. CODE may be zero to mean to always use the
225 deferred code. */
227 void
228 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 const char *warnmsg;
231 location_t locus;
233 gcc_assert (fold_deferring_overflow_warnings > 0);
234 --fold_deferring_overflow_warnings;
235 if (fold_deferring_overflow_warnings > 0)
237 if (fold_deferred_overflow_warning != NULL
238 && code != 0
239 && code < (int) fold_deferred_overflow_code)
240 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
241 return;
244 warnmsg = fold_deferred_overflow_warning;
245 fold_deferred_overflow_warning = NULL;
247 if (!issue || warnmsg == NULL)
248 return;
250 if (gimple_no_warning_p (stmt))
251 return;
253 /* Use the smallest code level when deciding to issue the
254 warning. */
255 if (code == 0 || code > (int) fold_deferred_overflow_code)
256 code = fold_deferred_overflow_code;
258 if (!issue_strict_overflow_warning (code))
259 return;
261 if (stmt == NULL)
262 locus = input_location;
263 else
264 locus = gimple_location (stmt);
265 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 /* Stop deferring overflow warnings, ignoring any deferred
269 warnings. */
271 void
272 fold_undefer_and_ignore_overflow_warnings (void)
274 fold_undefer_overflow_warnings (false, NULL, 0);
277 /* Whether we are deferring overflow warnings. */
279 bool
280 fold_deferring_overflow_warnings_p (void)
282 return fold_deferring_overflow_warnings > 0;
285 /* This is called when we fold something based on the fact that signed
286 overflow is undefined. */
288 void
289 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 if (fold_deferring_overflow_warnings > 0)
293 if (fold_deferred_overflow_warning == NULL
294 || wc < fold_deferred_overflow_code)
296 fold_deferred_overflow_warning = gmsgid;
297 fold_deferred_overflow_code = wc;
300 else if (issue_strict_overflow_warning (wc))
301 warning (OPT_Wstrict_overflow, gmsgid);
304 /* Return true if the built-in mathematical function specified by CODE
305 is odd, i.e. -f(x) == f(-x). */
307 bool
308 negate_mathfn_p (combined_fn fn)
310 switch (fn)
312 CASE_CFN_ASIN:
313 CASE_CFN_ASINH:
314 CASE_CFN_ATAN:
315 CASE_CFN_ATANH:
316 CASE_CFN_CASIN:
317 CASE_CFN_CASINH:
318 CASE_CFN_CATAN:
319 CASE_CFN_CATANH:
320 CASE_CFN_CBRT:
321 CASE_CFN_CPROJ:
322 CASE_CFN_CSIN:
323 CASE_CFN_CSINH:
324 CASE_CFN_CTAN:
325 CASE_CFN_CTANH:
326 CASE_CFN_ERF:
327 CASE_CFN_LLROUND:
328 CASE_CFN_LROUND:
329 CASE_CFN_ROUND:
330 CASE_CFN_SIN:
331 CASE_CFN_SINH:
332 CASE_CFN_TAN:
333 CASE_CFN_TANH:
334 CASE_CFN_TRUNC:
335 return true;
337 CASE_CFN_LLRINT:
338 CASE_CFN_LRINT:
339 CASE_CFN_NEARBYINT:
340 CASE_CFN_RINT:
341 return !flag_rounding_math;
343 default:
344 break;
346 return false;
349 /* Check whether we may negate an integer constant T without causing
350 overflow. */
352 bool
353 may_negate_without_overflow_p (const_tree t)
355 tree type;
357 gcc_assert (TREE_CODE (t) == INTEGER_CST);
359 type = TREE_TYPE (t);
360 if (TYPE_UNSIGNED (type))
361 return false;
363 return !wi::only_sign_bit_p (wi::to_wide (t));
366 /* Determine whether an expression T can be cheaply negated using
367 the function negate_expr without introducing undefined overflow. */
369 static bool
370 negate_expr_p (tree t)
372 tree type;
374 if (t == 0)
375 return false;
377 type = TREE_TYPE (t);
379 STRIP_SIGN_NOPS (t);
380 switch (TREE_CODE (t))
382 case INTEGER_CST:
383 if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
384 return true;
386 /* Check that -CST will not overflow type. */
387 return may_negate_without_overflow_p (t);
388 case BIT_NOT_EXPR:
389 return (INTEGRAL_TYPE_P (type)
390 && TYPE_OVERFLOW_WRAPS (type));
392 case FIXED_CST:
393 return true;
395 case NEGATE_EXPR:
396 return !TYPE_OVERFLOW_SANITIZED (type);
398 case REAL_CST:
399 /* We want to canonicalize to positive real constants. Pretend
400 that only negative ones can be easily negated. */
401 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
403 case COMPLEX_CST:
404 return negate_expr_p (TREE_REALPART (t))
405 && negate_expr_p (TREE_IMAGPART (t));
407 case VECTOR_CST:
409 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
410 return true;
412 int count = VECTOR_CST_NELTS (t), i;
414 for (i = 0; i < count; i++)
415 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
416 return false;
418 return true;
421 case COMPLEX_EXPR:
422 return negate_expr_p (TREE_OPERAND (t, 0))
423 && negate_expr_p (TREE_OPERAND (t, 1));
425 case CONJ_EXPR:
426 return negate_expr_p (TREE_OPERAND (t, 0));
428 case PLUS_EXPR:
429 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
430 || HONOR_SIGNED_ZEROS (element_mode (type))
431 || (INTEGRAL_TYPE_P (type)
432 && ! TYPE_OVERFLOW_WRAPS (type)))
433 return false;
434 /* -(A + B) -> (-B) - A. */
435 if (negate_expr_p (TREE_OPERAND (t, 1)))
436 return true;
437 /* -(A + B) -> (-A) - B. */
438 return negate_expr_p (TREE_OPERAND (t, 0));
440 case MINUS_EXPR:
441 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
442 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
443 && !HONOR_SIGNED_ZEROS (element_mode (type))
444 && (! INTEGRAL_TYPE_P (type)
445 || TYPE_OVERFLOW_WRAPS (type));
447 case MULT_EXPR:
448 if (TYPE_UNSIGNED (type))
449 break;
450 /* INT_MIN/n * n doesn't overflow while negating one operand it does
451 if n is a (negative) power of two. */
452 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
453 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
454 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
455 && (wi::popcount
456 (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
457 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
458 && (wi::popcount
459 (wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
460 break;
462 /* Fall through. */
464 case RDIV_EXPR:
465 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
466 return negate_expr_p (TREE_OPERAND (t, 1))
467 || negate_expr_p (TREE_OPERAND (t, 0));
468 break;
470 case TRUNC_DIV_EXPR:
471 case ROUND_DIV_EXPR:
472 case EXACT_DIV_EXPR:
473 if (TYPE_UNSIGNED (type))
474 break;
475 if (negate_expr_p (TREE_OPERAND (t, 0)))
476 return true;
477 /* In general we can't negate B in A / B, because if A is INT_MIN and
478 B is 1, we may turn this into INT_MIN / -1 which is undefined
479 and actually traps on some architectures. */
480 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
481 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
482 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
483 && ! integer_onep (TREE_OPERAND (t, 1))))
484 return negate_expr_p (TREE_OPERAND (t, 1));
485 break;
487 case NOP_EXPR:
488 /* Negate -((double)float) as (double)(-float). */
489 if (TREE_CODE (type) == REAL_TYPE)
491 tree tem = strip_float_extensions (t);
492 if (tem != t)
493 return negate_expr_p (tem);
495 break;
497 case CALL_EXPR:
498 /* Negate -f(x) as f(-x). */
499 if (negate_mathfn_p (get_call_combined_fn (t)))
500 return negate_expr_p (CALL_EXPR_ARG (t, 0));
501 break;
503 case RSHIFT_EXPR:
504 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
505 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
507 tree op1 = TREE_OPERAND (t, 1);
508 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
509 return true;
511 break;
513 default:
514 break;
516 return false;
519 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
520 simplification is possible.
521 If negate_expr_p would return true for T, NULL_TREE will never be
522 returned. */
524 static tree
525 fold_negate_expr_1 (location_t loc, tree t)
527 tree type = TREE_TYPE (t);
528 tree tem;
530 switch (TREE_CODE (t))
532 /* Convert - (~A) to A + 1. */
533 case BIT_NOT_EXPR:
534 if (INTEGRAL_TYPE_P (type))
535 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
536 build_one_cst (type));
537 break;
539 case INTEGER_CST:
540 tem = fold_negate_const (t, type);
541 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
542 || (ANY_INTEGRAL_TYPE_P (type)
543 && !TYPE_OVERFLOW_TRAPS (type)
544 && TYPE_OVERFLOW_WRAPS (type))
545 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
546 return tem;
547 break;
549 case REAL_CST:
550 tem = fold_negate_const (t, type);
551 return tem;
553 case FIXED_CST:
554 tem = fold_negate_const (t, type);
555 return tem;
557 case COMPLEX_CST:
559 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
560 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
561 if (rpart && ipart)
562 return build_complex (type, rpart, ipart);
564 break;
566 case VECTOR_CST:
568 int count = VECTOR_CST_NELTS (t), i;
570 auto_vec<tree, 32> elts (count);
571 for (i = 0; i < count; i++)
573 tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
574 if (elt == NULL_TREE)
575 return NULL_TREE;
576 elts.quick_push (elt);
579 return build_vector (type, elts);
582 case COMPLEX_EXPR:
583 if (negate_expr_p (t))
584 return fold_build2_loc (loc, COMPLEX_EXPR, type,
585 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
586 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
587 break;
589 case CONJ_EXPR:
590 if (negate_expr_p (t))
591 return fold_build1_loc (loc, CONJ_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
593 break;
595 case NEGATE_EXPR:
596 if (!TYPE_OVERFLOW_SANITIZED (type))
597 return TREE_OPERAND (t, 0);
598 break;
600 case PLUS_EXPR:
601 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
602 && !HONOR_SIGNED_ZEROS (element_mode (type)))
604 /* -(A + B) -> (-B) - A. */
605 if (negate_expr_p (TREE_OPERAND (t, 1)))
607 tem = negate_expr (TREE_OPERAND (t, 1));
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 tem, TREE_OPERAND (t, 0));
612 /* -(A + B) -> (-A) - B. */
613 if (negate_expr_p (TREE_OPERAND (t, 0)))
615 tem = negate_expr (TREE_OPERAND (t, 0));
616 return fold_build2_loc (loc, MINUS_EXPR, type,
617 tem, TREE_OPERAND (t, 1));
620 break;
622 case MINUS_EXPR:
623 /* - (A - B) -> B - A */
624 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
625 && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
628 break;
630 case MULT_EXPR:
631 if (TYPE_UNSIGNED (type))
632 break;
634 /* Fall through. */
636 case RDIV_EXPR:
637 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
639 tem = TREE_OPERAND (t, 1);
640 if (negate_expr_p (tem))
641 return fold_build2_loc (loc, TREE_CODE (t), type,
642 TREE_OPERAND (t, 0), negate_expr (tem));
643 tem = TREE_OPERAND (t, 0);
644 if (negate_expr_p (tem))
645 return fold_build2_loc (loc, TREE_CODE (t), type,
646 negate_expr (tem), TREE_OPERAND (t, 1));
648 break;
650 case TRUNC_DIV_EXPR:
651 case ROUND_DIV_EXPR:
652 case EXACT_DIV_EXPR:
653 if (TYPE_UNSIGNED (type))
654 break;
655 if (negate_expr_p (TREE_OPERAND (t, 0)))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (TREE_OPERAND (t, 0)),
658 TREE_OPERAND (t, 1));
659 /* In general we can't negate B in A / B, because if A is INT_MIN and
660 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 and actually traps on some architectures. */
662 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
663 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 && ! integer_onep (TREE_OPERAND (t, 1))))
666 && negate_expr_p (TREE_OPERAND (t, 1)))
667 return fold_build2_loc (loc, TREE_CODE (t), type,
668 TREE_OPERAND (t, 0),
669 negate_expr (TREE_OPERAND (t, 1)));
670 break;
672 case NOP_EXPR:
673 /* Convert -((double)float) into (double)(-float). */
674 if (TREE_CODE (type) == REAL_TYPE)
676 tem = strip_float_extensions (t);
677 if (tem != t && negate_expr_p (tem))
678 return fold_convert_loc (loc, type, negate_expr (tem));
680 break;
682 case CALL_EXPR:
683 /* Negate -f(x) as f(-x). */
684 if (negate_mathfn_p (get_call_combined_fn (t))
685 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
687 tree fndecl, arg;
689 fndecl = get_callee_fndecl (t);
690 arg = negate_expr (CALL_EXPR_ARG (t, 0));
691 return build_call_expr_loc (loc, fndecl, 1, arg);
693 break;
695 case RSHIFT_EXPR:
696 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
697 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
699 tree op1 = TREE_OPERAND (t, 1);
700 if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
702 tree ntype = TYPE_UNSIGNED (type)
703 ? signed_type_for (type)
704 : unsigned_type_for (type);
705 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
706 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
707 return fold_convert_loc (loc, type, temp);
710 break;
712 default:
713 break;
716 return NULL_TREE;
719 /* A wrapper for fold_negate_expr_1. */
721 static tree
722 fold_negate_expr (location_t loc, tree t)
724 tree type = TREE_TYPE (t);
725 STRIP_SIGN_NOPS (t);
726 tree tem = fold_negate_expr_1 (loc, t);
727 if (tem == NULL_TREE)
728 return NULL_TREE;
729 return fold_convert_loc (loc, type, tem);
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
734 return NULL_TREE. */
736 static tree
737 negate_expr (tree t)
739 tree type, tem;
740 location_t loc;
742 if (t == NULL_TREE)
743 return NULL_TREE;
745 loc = EXPR_LOCATION (t);
746 type = TREE_TYPE (t);
747 STRIP_SIGN_NOPS (t);
749 tem = fold_negate_expr (loc, t);
750 if (!tem)
751 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752 return fold_convert_loc (loc, type, tem);
755 /* Split a tree IN into a constant, literal and variable parts that could be
756 combined with CODE to make IN. "constant" means an expression with
757 TREE_CONSTANT but that isn't an actual constant. CODE must be a
758 commutative arithmetic operation. Store the constant part into *CONP,
759 the literal in *LITP and return the variable part. If a part isn't
760 present, set it to null. If the tree does not decompose in this way,
761 return the entire tree as the variable part and the other parts as null.
763 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
764 case, we negate an operand that was subtracted. Except if it is a
765 literal for which we use *MINUS_LITP instead.
767 If NEGATE_P is true, we are negating all of IN, again except a literal
768 for which we use *MINUS_LITP instead. If a variable part is of pointer
769 type, it is negated after converting to TYPE. This prevents us from
770 generating illegal MINUS pointer expression. LOC is the location of
771 the converted variable part.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
778 static tree
779 split_tree (tree in, tree type, enum tree_code code,
780 tree *minus_varp, tree *conp, tree *minus_conp,
781 tree *litp, tree *minus_litp, int negate_p)
783 tree var = 0;
784 *minus_varp = 0;
785 *conp = 0;
786 *minus_conp = 0;
787 *litp = 0;
788 *minus_litp = 0;
790 /* Strip any conversions that don't change the machine mode or signedness. */
791 STRIP_SIGN_NOPS (in);
793 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
794 || TREE_CODE (in) == FIXED_CST)
795 *litp = in;
796 else if (TREE_CODE (in) == code
797 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
798 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
799 /* We can associate addition and subtraction together (even
800 though the C standard doesn't say so) for integers because
801 the value is not affected. For reals, the value might be
802 affected, so we can't. */
803 && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
804 || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR
806 && (TREE_CODE (in) == PLUS_EXPR
807 || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
809 tree op0 = TREE_OPERAND (in, 0);
810 tree op1 = TREE_OPERAND (in, 1);
811 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
812 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
814 /* First see if either of the operands is a literal, then a constant. */
815 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
816 || TREE_CODE (op0) == FIXED_CST)
817 *litp = op0, op0 = 0;
818 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
819 || TREE_CODE (op1) == FIXED_CST)
820 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
822 if (op0 != 0 && TREE_CONSTANT (op0))
823 *conp = op0, op0 = 0;
824 else if (op1 != 0 && TREE_CONSTANT (op1))
825 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
827 /* If we haven't dealt with either operand, this is not a case we can
828 decompose. Otherwise, VAR is either of the ones remaining, if any. */
829 if (op0 != 0 && op1 != 0)
830 var = in;
831 else if (op0 != 0)
832 var = op0;
833 else
834 var = op1, neg_var_p = neg1_p;
836 /* Now do any needed negations. */
837 if (neg_litp_p)
838 *minus_litp = *litp, *litp = 0;
839 if (neg_conp_p && *conp)
840 *minus_conp = *conp, *conp = 0;
841 if (neg_var_p && var)
842 *minus_varp = var, var = 0;
844 else if (TREE_CONSTANT (in))
845 *conp = in;
846 else if (TREE_CODE (in) == BIT_NOT_EXPR
847 && code == PLUS_EXPR)
849 /* -1 - X is folded to ~X, undo that here. Do _not_ do this
850 when IN is constant. */
851 *litp = build_minus_one_cst (type);
852 *minus_varp = TREE_OPERAND (in, 0);
854 else
855 var = in;
857 if (negate_p)
859 if (*litp)
860 *minus_litp = *litp, *litp = 0;
861 else if (*minus_litp)
862 *litp = *minus_litp, *minus_litp = 0;
863 if (*conp)
864 *minus_conp = *conp, *conp = 0;
865 else if (*minus_conp)
866 *conp = *minus_conp, *minus_conp = 0;
867 if (var)
868 *minus_varp = var, var = 0;
869 else if (*minus_varp)
870 var = *minus_varp, *minus_varp = 0;
873 if (*litp
874 && TREE_OVERFLOW_P (*litp))
875 *litp = drop_tree_overflow (*litp);
876 if (*minus_litp
877 && TREE_OVERFLOW_P (*minus_litp))
878 *minus_litp = drop_tree_overflow (*minus_litp);
880 return var;
883 /* Re-associate trees split by the above function. T1 and T2 are
884 either expressions to associate or null. Return the new
885 expression, if any. LOC is the location of the new expression. If
886 we build an operation, do it in TYPE and with CODE. */
888 static tree
889 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
891 if (t1 == 0)
893 gcc_assert (t2 == 0 || code != MINUS_EXPR);
894 return t2;
896 else if (t2 == 0)
897 return t1;
899 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900 try to fold this since we will have infinite recursion. But do
901 deal with any NEGATE_EXPRs. */
902 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903 || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
904 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
906 if (code == PLUS_EXPR)
908 if (TREE_CODE (t1) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t2),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t1, 0)));
913 else if (TREE_CODE (t2) == NEGATE_EXPR)
914 return build2_loc (loc, MINUS_EXPR, type,
915 fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type,
917 TREE_OPERAND (t2, 0)));
918 else if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
921 else if (code == MINUS_EXPR)
923 if (integer_zerop (t2))
924 return fold_convert_loc (loc, type, t1);
927 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
928 fold_convert_loc (loc, type, t2));
931 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
932 fold_convert_loc (loc, type, t2));
935 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
936 for use in int_const_binop, size_binop and size_diffop. */
938 static bool
939 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
941 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
942 return false;
943 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
944 return false;
946 switch (code)
948 case LSHIFT_EXPR:
949 case RSHIFT_EXPR:
950 case LROTATE_EXPR:
951 case RROTATE_EXPR:
952 return true;
954 default:
955 break;
958 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
959 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
960 && TYPE_MODE (type1) == TYPE_MODE (type2);
964 /* Combine two integer constants PARG1 and PARG2 under operation CODE
965 to produce a new constant. Return NULL_TREE if we don't know how
966 to evaluate CODE at compile-time. */
968 static tree
969 int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
970 int overflowable)
972 wide_int res;
973 tree t;
974 tree type = TREE_TYPE (parg1);
975 signop sign = TYPE_SIGN (type);
976 bool overflow = false;
978 wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
979 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
981 switch (code)
983 case BIT_IOR_EXPR:
984 res = wi::bit_or (arg1, arg2);
985 break;
987 case BIT_XOR_EXPR:
988 res = wi::bit_xor (arg1, arg2);
989 break;
991 case BIT_AND_EXPR:
992 res = wi::bit_and (arg1, arg2);
993 break;
995 case RSHIFT_EXPR:
996 case LSHIFT_EXPR:
997 if (wi::neg_p (arg2))
999 arg2 = -arg2;
1000 if (code == RSHIFT_EXPR)
1001 code = LSHIFT_EXPR;
1002 else
1003 code = RSHIFT_EXPR;
1006 if (code == RSHIFT_EXPR)
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = wi::rshift (arg1, arg2, sign);
1011 else
1012 res = wi::lshift (arg1, arg2);
1013 break;
1015 case RROTATE_EXPR:
1016 case LROTATE_EXPR:
1017 if (wi::neg_p (arg2))
1019 arg2 = -arg2;
1020 if (code == RROTATE_EXPR)
1021 code = LROTATE_EXPR;
1022 else
1023 code = RROTATE_EXPR;
1026 if (code == RROTATE_EXPR)
1027 res = wi::rrotate (arg1, arg2);
1028 else
1029 res = wi::lrotate (arg1, arg2);
1030 break;
1032 case PLUS_EXPR:
1033 res = wi::add (arg1, arg2, sign, &overflow);
1034 break;
1036 case MINUS_EXPR:
1037 res = wi::sub (arg1, arg2, sign, &overflow);
1038 break;
1040 case MULT_EXPR:
1041 res = wi::mul (arg1, arg2, sign, &overflow);
1042 break;
1044 case MULT_HIGHPART_EXPR:
1045 res = wi::mul_high (arg1, arg2, sign);
1046 break;
1048 case TRUNC_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1053 break;
1055 case FLOOR_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_floor (arg1, arg2, sign, &overflow);
1059 break;
1061 case CEIL_DIV_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1065 break;
1067 case ROUND_DIV_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::div_round (arg1, arg2, sign, &overflow);
1071 break;
1073 case TRUNC_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1077 break;
1079 case FLOOR_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1083 break;
1085 case CEIL_MOD_EXPR:
1086 if (arg2 == 0)
1087 return NULL_TREE;
1088 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1089 break;
1091 case ROUND_MOD_EXPR:
1092 if (arg2 == 0)
1093 return NULL_TREE;
1094 res = wi::mod_round (arg1, arg2, sign, &overflow);
1095 break;
1097 case MIN_EXPR:
1098 res = wi::min (arg1, arg2, sign);
1099 break;
1101 case MAX_EXPR:
1102 res = wi::max (arg1, arg2, sign);
1103 break;
1105 default:
1106 return NULL_TREE;
1109 t = force_fit_type (type, res, overflowable,
1110 (((sign == SIGNED || overflowable == -1)
1111 && overflow)
1112 | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1114 return t;
1117 tree
1118 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1120 return int_const_binop_1 (code, arg1, arg2, 1);
1123 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1124 constant. We assume ARG1 and ARG2 have the same data type, or at least
1125 are the same kind of constant and the same machine mode. Return zero if
1126 combining the constants is not allowed in the current operating mode. */
1128 static tree
1129 const_binop (enum tree_code code, tree arg1, tree arg2)
1131 /* Sanity check for the recursive cases. */
1132 if (!arg1 || !arg2)
1133 return NULL_TREE;
1135 STRIP_NOPS (arg1);
1136 STRIP_NOPS (arg2);
1138 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1140 if (code == POINTER_PLUS_EXPR)
1141 return int_const_binop (PLUS_EXPR,
1142 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1144 return int_const_binop (code, arg1, arg2);
1147 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1149 machine_mode mode;
1150 REAL_VALUE_TYPE d1;
1151 REAL_VALUE_TYPE d2;
1152 REAL_VALUE_TYPE value;
1153 REAL_VALUE_TYPE result;
1154 bool inexact;
1155 tree t, type;
1157 /* The following codes are handled by real_arithmetic. */
1158 switch (code)
1160 case PLUS_EXPR:
1161 case MINUS_EXPR:
1162 case MULT_EXPR:
1163 case RDIV_EXPR:
1164 case MIN_EXPR:
1165 case MAX_EXPR:
1166 break;
1168 default:
1169 return NULL_TREE;
1172 d1 = TREE_REAL_CST (arg1);
1173 d2 = TREE_REAL_CST (arg2);
1175 type = TREE_TYPE (arg1);
1176 mode = TYPE_MODE (type);
1178 /* Don't perform operation if we honor signaling NaNs and
1179 either operand is a signaling NaN. */
1180 if (HONOR_SNANS (mode)
1181 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1182 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1183 return NULL_TREE;
1185 /* Don't perform operation if it would raise a division
1186 by zero exception. */
1187 if (code == RDIV_EXPR
1188 && real_equal (&d2, &dconst0)
1189 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1190 return NULL_TREE;
1192 /* If either operand is a NaN, just return it. Otherwise, set up
1193 for floating-point trap; we return an overflow. */
1194 if (REAL_VALUE_ISNAN (d1))
1196 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1197 is off. */
1198 d1.signalling = 0;
1199 t = build_real (type, d1);
1200 return t;
1202 else if (REAL_VALUE_ISNAN (d2))
1204 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1205 is off. */
1206 d2.signalling = 0;
1207 t = build_real (type, d2);
1208 return t;
1211 inexact = real_arithmetic (&value, code, &d1, &d2);
1212 real_convert (&result, mode, &value);
1214 /* Don't constant fold this floating point operation if
1215 the result has overflowed and flag_trapping_math. */
1216 if (flag_trapping_math
1217 && MODE_HAS_INFINITIES (mode)
1218 && REAL_VALUE_ISINF (result)
1219 && !REAL_VALUE_ISINF (d1)
1220 && !REAL_VALUE_ISINF (d2))
1221 return NULL_TREE;
1223 /* Don't constant fold this floating point operation if the
1224 result may dependent upon the run-time rounding mode and
1225 flag_rounding_math is set, or if GCC's software emulation
1226 is unable to accurately represent the result. */
1227 if ((flag_rounding_math
1228 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1229 && (inexact || !real_identical (&result, &value)))
1230 return NULL_TREE;
1232 t = build_real (type, result);
1234 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1235 return t;
1238 if (TREE_CODE (arg1) == FIXED_CST)
1240 FIXED_VALUE_TYPE f1;
1241 FIXED_VALUE_TYPE f2;
1242 FIXED_VALUE_TYPE result;
1243 tree t, type;
1244 int sat_p;
1245 bool overflow_p;
1247 /* The following codes are handled by fixed_arithmetic. */
1248 switch (code)
1250 case PLUS_EXPR:
1251 case MINUS_EXPR:
1252 case MULT_EXPR:
1253 case TRUNC_DIV_EXPR:
1254 if (TREE_CODE (arg2) != FIXED_CST)
1255 return NULL_TREE;
1256 f2 = TREE_FIXED_CST (arg2);
1257 break;
1259 case LSHIFT_EXPR:
1260 case RSHIFT_EXPR:
1262 if (TREE_CODE (arg2) != INTEGER_CST)
1263 return NULL_TREE;
1264 wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1265 f2.data.high = w2.elt (1);
1266 f2.data.low = w2.ulow ();
1267 f2.mode = SImode;
1269 break;
1271 default:
1272 return NULL_TREE;
1275 f1 = TREE_FIXED_CST (arg1);
1276 type = TREE_TYPE (arg1);
1277 sat_p = TYPE_SATURATING (type);
1278 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1279 t = build_fixed (type, result);
1280 /* Propagate overflow flags. */
1281 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1282 TREE_OVERFLOW (t) = 1;
1283 return t;
1286 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1288 tree type = TREE_TYPE (arg1);
1289 tree r1 = TREE_REALPART (arg1);
1290 tree i1 = TREE_IMAGPART (arg1);
1291 tree r2 = TREE_REALPART (arg2);
1292 tree i2 = TREE_IMAGPART (arg2);
1293 tree real, imag;
1295 switch (code)
1297 case PLUS_EXPR:
1298 case MINUS_EXPR:
1299 real = const_binop (code, r1, r2);
1300 imag = const_binop (code, i1, i2);
1301 break;
1303 case MULT_EXPR:
1304 if (COMPLEX_FLOAT_TYPE_P (type))
1305 return do_mpc_arg2 (arg1, arg2, type,
1306 /* do_nonfinite= */ folding_initializer,
1307 mpc_mul);
1309 real = const_binop (MINUS_EXPR,
1310 const_binop (MULT_EXPR, r1, r2),
1311 const_binop (MULT_EXPR, i1, i2));
1312 imag = const_binop (PLUS_EXPR,
1313 const_binop (MULT_EXPR, r1, i2),
1314 const_binop (MULT_EXPR, i1, r2));
1315 break;
1317 case RDIV_EXPR:
1318 if (COMPLEX_FLOAT_TYPE_P (type))
1319 return do_mpc_arg2 (arg1, arg2, type,
1320 /* do_nonfinite= */ folding_initializer,
1321 mpc_div);
1322 /* Fallthru. */
1323 case TRUNC_DIV_EXPR:
1324 case CEIL_DIV_EXPR:
1325 case FLOOR_DIV_EXPR:
1326 case ROUND_DIV_EXPR:
1327 if (flag_complex_method == 0)
1329 /* Keep this algorithm in sync with
1330 tree-complex.c:expand_complex_div_straight().
1332 Expand complex division to scalars, straightforward algorithm.
1333 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1334 t = br*br + bi*bi
1336 tree magsquared
1337 = const_binop (PLUS_EXPR,
1338 const_binop (MULT_EXPR, r2, r2),
1339 const_binop (MULT_EXPR, i2, i2));
1340 tree t1
1341 = const_binop (PLUS_EXPR,
1342 const_binop (MULT_EXPR, r1, r2),
1343 const_binop (MULT_EXPR, i1, i2));
1344 tree t2
1345 = const_binop (MINUS_EXPR,
1346 const_binop (MULT_EXPR, i1, r2),
1347 const_binop (MULT_EXPR, r1, i2));
1349 real = const_binop (code, t1, magsquared);
1350 imag = const_binop (code, t2, magsquared);
1352 else
1354 /* Keep this algorithm in sync with
1355 tree-complex.c:expand_complex_div_wide().
1357 Expand complex division to scalars, modified algorithm to minimize
1358 overflow with wide input ranges. */
1359 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1360 fold_abs_const (r2, TREE_TYPE (type)),
1361 fold_abs_const (i2, TREE_TYPE (type)));
1363 if (integer_nonzerop (compare))
1365 /* In the TRUE branch, we compute
1366 ratio = br/bi;
1367 div = (br * ratio) + bi;
1368 tr = (ar * ratio) + ai;
1369 ti = (ai * ratio) - ar;
1370 tr = tr / div;
1371 ti = ti / div; */
1372 tree ratio = const_binop (code, r2, i2);
1373 tree div = const_binop (PLUS_EXPR, i2,
1374 const_binop (MULT_EXPR, r2, ratio));
1375 real = const_binop (MULT_EXPR, r1, ratio);
1376 real = const_binop (PLUS_EXPR, real, i1);
1377 real = const_binop (code, real, div);
1379 imag = const_binop (MULT_EXPR, i1, ratio);
1380 imag = const_binop (MINUS_EXPR, imag, r1);
1381 imag = const_binop (code, imag, div);
1383 else
1385 /* In the FALSE branch, we compute
1386 ratio = d/c;
1387 divisor = (d * ratio) + c;
1388 tr = (b * ratio) + a;
1389 ti = b - (a * ratio);
1390 tr = tr / div;
1391 ti = ti / div; */
1392 tree ratio = const_binop (code, i2, r2);
1393 tree div = const_binop (PLUS_EXPR, r2,
1394 const_binop (MULT_EXPR, i2, ratio));
1396 real = const_binop (MULT_EXPR, i1, ratio);
1397 real = const_binop (PLUS_EXPR, real, r1);
1398 real = const_binop (code, real, div);
1400 imag = const_binop (MULT_EXPR, r1, ratio);
1401 imag = const_binop (MINUS_EXPR, i1, imag);
1402 imag = const_binop (code, imag, div);
1405 break;
1407 default:
1408 return NULL_TREE;
1411 if (real && imag)
1412 return build_complex (type, real, imag);
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == VECTOR_CST)
1418 tree type = TREE_TYPE (arg1);
1419 int count = VECTOR_CST_NELTS (arg1), i;
1421 auto_vec<tree, 32> elts (count);
1422 for (i = 0; i < count; i++)
1424 tree elem1 = VECTOR_CST_ELT (arg1, i);
1425 tree elem2 = VECTOR_CST_ELT (arg2, i);
1427 tree elt = const_binop (code, elem1, elem2);
1429 /* It is possible that const_binop cannot handle the given
1430 code and return NULL_TREE */
1431 if (elt == NULL_TREE)
1432 return NULL_TREE;
1433 elts.quick_push (elt);
1436 return build_vector (type, elts);
1439 /* Shifts allow a scalar offset for a vector. */
1440 if (TREE_CODE (arg1) == VECTOR_CST
1441 && TREE_CODE (arg2) == INTEGER_CST)
1443 tree type = TREE_TYPE (arg1);
1444 int count = VECTOR_CST_NELTS (arg1), i;
1446 auto_vec<tree, 32> elts (count);
1447 for (i = 0; i < count; i++)
1449 tree elem1 = VECTOR_CST_ELT (arg1, i);
1451 tree elt = const_binop (code, elem1, arg2);
1453 /* It is possible that const_binop cannot handle the given
1454 code and return NULL_TREE. */
1455 if (elt == NULL_TREE)
1456 return NULL_TREE;
1457 elts.quick_push (elt);
1460 return build_vector (type, elts);
1462 return NULL_TREE;
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1486 case VEC_PACK_TRUNC_EXPR:
1487 case VEC_PACK_FIX_TRUNC_EXPR:
1489 unsigned int out_nelts, in_nelts, i;
1491 if (TREE_CODE (arg1) != VECTOR_CST
1492 || TREE_CODE (arg2) != VECTOR_CST)
1493 return NULL_TREE;
1495 in_nelts = VECTOR_CST_NELTS (arg1);
1496 out_nelts = in_nelts * 2;
1497 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1498 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1500 auto_vec<tree, 32> elts (out_nelts);
1501 for (i = 0; i < out_nelts; i++)
1503 tree elt = (i < in_nelts
1504 ? VECTOR_CST_ELT (arg1, i)
1505 : VECTOR_CST_ELT (arg2, i - in_nelts));
1506 elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1507 ? NOP_EXPR : FIX_TRUNC_EXPR,
1508 TREE_TYPE (type), elt);
1509 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1510 return NULL_TREE;
1511 elts.quick_push (elt);
1514 return build_vector (type, elts);
1517 case VEC_WIDEN_MULT_LO_EXPR:
1518 case VEC_WIDEN_MULT_HI_EXPR:
1519 case VEC_WIDEN_MULT_EVEN_EXPR:
1520 case VEC_WIDEN_MULT_ODD_EXPR:
1522 unsigned int out_nelts, in_nelts, out, ofs, scale;
1524 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1525 return NULL_TREE;
1527 in_nelts = VECTOR_CST_NELTS (arg1);
1528 out_nelts = in_nelts / 2;
1529 gcc_assert (in_nelts == VECTOR_CST_NELTS (arg2)
1530 && out_nelts == TYPE_VECTOR_SUBPARTS (type));
1532 if (code == VEC_WIDEN_MULT_LO_EXPR)
1533 scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1534 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1536 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1537 scale = 1, ofs = 0;
1538 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1539 scale = 1, ofs = 1;
1541 auto_vec<tree, 32> elts (out_nelts);
1542 for (out = 0; out < out_nelts; out++)
1544 unsigned int in = (out << scale) + ofs;
1545 tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1546 VECTOR_CST_ELT (arg1, in));
1547 tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1548 VECTOR_CST_ELT (arg2, in));
1550 if (t1 == NULL_TREE || t2 == NULL_TREE)
1551 return NULL_TREE;
1552 tree elt = const_binop (MULT_EXPR, t1, t2);
1553 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1554 return NULL_TREE;
1555 elts.quick_push (elt);
1558 return build_vector (type, elts);
1561 default:;
1564 if (TREE_CODE_CLASS (code) != tcc_binary)
1565 return NULL_TREE;
1567 /* Make sure type and arg0 have the same saturating flag. */
1568 gcc_checking_assert (TYPE_SATURATING (type)
1569 == TYPE_SATURATING (TREE_TYPE (arg1)));
1571 return const_binop (code, arg1, arg2);
1574 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1575 Return zero if computing the constants is not possible. */
1577 tree
1578 const_unop (enum tree_code code, tree type, tree arg0)
1580 /* Don't perform the operation, other than NEGATE and ABS, if
1581 flag_signaling_nans is on and the operand is a signaling NaN. */
1582 if (TREE_CODE (arg0) == REAL_CST
1583 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1584 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1585 && code != NEGATE_EXPR
1586 && code != ABS_EXPR)
1587 return NULL_TREE;
1589 switch (code)
1591 CASE_CONVERT:
1592 case FLOAT_EXPR:
1593 case FIX_TRUNC_EXPR:
1594 case FIXED_CONVERT_EXPR:
1595 return fold_convert_const (code, type, arg0);
1597 case ADDR_SPACE_CONVERT_EXPR:
1598 /* If the source address is 0, and the source address space
1599 cannot have a valid object at 0, fold to dest type null. */
1600 if (integer_zerop (arg0)
1601 && !(targetm.addr_space.zero_address_valid
1602 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1603 return fold_convert_const (code, type, arg0);
1604 break;
1606 case VIEW_CONVERT_EXPR:
1607 return fold_view_convert_expr (type, arg0);
1609 case NEGATE_EXPR:
1611 /* Can't call fold_negate_const directly here as that doesn't
1612 handle all cases and we might not be able to negate some
1613 constants. */
1614 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1615 if (tem && CONSTANT_CLASS_P (tem))
1616 return tem;
1617 break;
1620 case ABS_EXPR:
1621 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1622 return fold_abs_const (arg0, type);
1623 break;
1625 case CONJ_EXPR:
1626 if (TREE_CODE (arg0) == COMPLEX_CST)
1628 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1629 TREE_TYPE (type));
1630 return build_complex (type, TREE_REALPART (arg0), ipart);
1632 break;
1634 case BIT_NOT_EXPR:
1635 if (TREE_CODE (arg0) == INTEGER_CST)
1636 return fold_not_const (arg0, type);
1637 /* Perform BIT_NOT_EXPR on each element individually. */
1638 else if (TREE_CODE (arg0) == VECTOR_CST)
1640 tree elem;
1641 unsigned count = VECTOR_CST_NELTS (arg0), i;
1643 auto_vec<tree, 32> elements (count);
1644 for (i = 0; i < count; i++)
1646 elem = VECTOR_CST_ELT (arg0, i);
1647 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1648 if (elem == NULL_TREE)
1649 break;
1650 elements.quick_push (elem);
1652 if (i == count)
1653 return build_vector (type, elements);
1655 break;
1657 case TRUTH_NOT_EXPR:
1658 if (TREE_CODE (arg0) == INTEGER_CST)
1659 return constant_boolean_node (integer_zerop (arg0), type);
1660 break;
1662 case REALPART_EXPR:
1663 if (TREE_CODE (arg0) == COMPLEX_CST)
1664 return fold_convert (type, TREE_REALPART (arg0));
1665 break;
1667 case IMAGPART_EXPR:
1668 if (TREE_CODE (arg0) == COMPLEX_CST)
1669 return fold_convert (type, TREE_IMAGPART (arg0));
1670 break;
1672 case VEC_UNPACK_LO_EXPR:
1673 case VEC_UNPACK_HI_EXPR:
1674 case VEC_UNPACK_FLOAT_LO_EXPR:
1675 case VEC_UNPACK_FLOAT_HI_EXPR:
1677 unsigned int out_nelts, in_nelts, i;
1678 enum tree_code subcode;
1680 if (TREE_CODE (arg0) != VECTOR_CST)
1681 return NULL_TREE;
1683 in_nelts = VECTOR_CST_NELTS (arg0);
1684 out_nelts = in_nelts / 2;
1685 gcc_assert (out_nelts == TYPE_VECTOR_SUBPARTS (type));
1687 unsigned int offset = 0;
1688 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1689 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1690 offset = out_nelts;
1692 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1693 subcode = NOP_EXPR;
1694 else
1695 subcode = FLOAT_EXPR;
1697 auto_vec<tree, 32> elts (out_nelts);
1698 for (i = 0; i < out_nelts; i++)
1700 tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1701 VECTOR_CST_ELT (arg0, i + offset));
1702 if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 return NULL_TREE;
1704 elts.quick_push (elt);
1707 return build_vector (type, elts);
1710 case REDUC_MIN_EXPR:
1711 case REDUC_MAX_EXPR:
1712 case REDUC_PLUS_EXPR:
1714 unsigned int nelts, i;
1715 enum tree_code subcode;
1717 if (TREE_CODE (arg0) != VECTOR_CST)
1718 return NULL_TREE;
1719 nelts = VECTOR_CST_NELTS (arg0);
1721 switch (code)
1723 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1724 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1725 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1726 default: gcc_unreachable ();
1729 tree res = VECTOR_CST_ELT (arg0, 0);
1730 for (i = 1; i < nelts; i++)
1732 res = const_binop (subcode, res, VECTOR_CST_ELT (arg0, i));
1733 if (res == NULL_TREE || !CONSTANT_CLASS_P (res))
1734 return NULL_TREE;
1737 return res;
1740 default:
1741 break;
1744 return NULL_TREE;
1747 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1748 indicates which particular sizetype to create. */
1750 tree
1751 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1753 return build_int_cst (sizetype_tab[(int) kind], number);
1756 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1757 is a tree code. The type of the result is taken from the operands.
1758 Both must be equivalent integer types, ala int_binop_types_match_p.
1759 If the operands are constant, so is the result. */
1761 tree
1762 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1764 tree type = TREE_TYPE (arg0);
1766 if (arg0 == error_mark_node || arg1 == error_mark_node)
1767 return error_mark_node;
1769 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1770 TREE_TYPE (arg1)));
1772 /* Handle the special case of two integer constants faster. */
1773 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1775 /* And some specific cases even faster than that. */
1776 if (code == PLUS_EXPR)
1778 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1779 return arg1;
1780 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1781 return arg0;
1783 else if (code == MINUS_EXPR)
1785 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1786 return arg0;
1788 else if (code == MULT_EXPR)
1790 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1791 return arg1;
1794 /* Handle general case of two integer constants. For sizetype
1795 constant calculations we always want to know about overflow,
1796 even in the unsigned case. */
1797 return int_const_binop_1 (code, arg0, arg1, -1);
1800 return fold_build2_loc (loc, code, type, arg0, arg1);
1803 /* Given two values, either both of sizetype or both of bitsizetype,
1804 compute the difference between the two values. Return the value
1805 in signed type corresponding to the type of the operands. */
1807 tree
1808 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1810 tree type = TREE_TYPE (arg0);
1811 tree ctype;
1813 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1814 TREE_TYPE (arg1)));
1816 /* If the type is already signed, just do the simple thing. */
1817 if (!TYPE_UNSIGNED (type))
1818 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1820 if (type == sizetype)
1821 ctype = ssizetype;
1822 else if (type == bitsizetype)
1823 ctype = sbitsizetype;
1824 else
1825 ctype = signed_type_for (type);
1827 /* If either operand is not a constant, do the conversions to the signed
1828 type and subtract. The hardware will do the right thing with any
1829 overflow in the subtraction. */
1830 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1831 return size_binop_loc (loc, MINUS_EXPR,
1832 fold_convert_loc (loc, ctype, arg0),
1833 fold_convert_loc (loc, ctype, arg1));
1835 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1836 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1837 overflow) and negate (which can't either). Special-case a result
1838 of zero while we're here. */
1839 if (tree_int_cst_equal (arg0, arg1))
1840 return build_int_cst (ctype, 0);
1841 else if (tree_int_cst_lt (arg1, arg0))
1842 return fold_convert_loc (loc, ctype,
1843 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1844 else
1845 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1846 fold_convert_loc (loc, ctype,
1847 size_binop_loc (loc,
1848 MINUS_EXPR,
1849 arg1, arg0)));
1852 /* A subroutine of fold_convert_const handling conversions of an
1853 INTEGER_CST to another integer type. */
1855 static tree
1856 fold_convert_const_int_from_int (tree type, const_tree arg1)
1858 /* Given an integer constant, make new constant with new type,
1859 appropriately sign-extended or truncated. Use widest_int
1860 so that any extension is done according ARG1's type. */
1861 return force_fit_type (type, wi::to_widest (arg1),
1862 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1863 TREE_OVERFLOW (arg1));
1866 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1867 to an integer type. */
1869 static tree
1870 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1872 bool overflow = false;
1873 tree t;
1875 /* The following code implements the floating point to integer
1876 conversion rules required by the Java Language Specification,
1877 that IEEE NaNs are mapped to zero and values that overflow
1878 the target precision saturate, i.e. values greater than
1879 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1880 are mapped to INT_MIN. These semantics are allowed by the
1881 C and C++ standards that simply state that the behavior of
1882 FP-to-integer conversion is unspecified upon overflow. */
1884 wide_int val;
1885 REAL_VALUE_TYPE r;
1886 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1888 switch (code)
1890 case FIX_TRUNC_EXPR:
1891 real_trunc (&r, VOIDmode, &x);
1892 break;
1894 default:
1895 gcc_unreachable ();
1898 /* If R is NaN, return zero and show we have an overflow. */
1899 if (REAL_VALUE_ISNAN (r))
1901 overflow = true;
1902 val = wi::zero (TYPE_PRECISION (type));
1905 /* See if R is less than the lower bound or greater than the
1906 upper bound. */
1908 if (! overflow)
1910 tree lt = TYPE_MIN_VALUE (type);
1911 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1912 if (real_less (&r, &l))
1914 overflow = true;
1915 val = wi::to_wide (lt);
1919 if (! overflow)
1921 tree ut = TYPE_MAX_VALUE (type);
1922 if (ut)
1924 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1925 if (real_less (&u, &r))
1927 overflow = true;
1928 val = wi::to_wide (ut);
1933 if (! overflow)
1934 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1936 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1937 return t;
1940 /* A subroutine of fold_convert_const handling conversions of a
1941 FIXED_CST to an integer type. */
1943 static tree
1944 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1946 tree t;
1947 double_int temp, temp_trunc;
1948 scalar_mode mode;
1950 /* Right shift FIXED_CST to temp by fbit. */
1951 temp = TREE_FIXED_CST (arg1).data;
1952 mode = TREE_FIXED_CST (arg1).mode;
1953 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1955 temp = temp.rshift (GET_MODE_FBIT (mode),
1956 HOST_BITS_PER_DOUBLE_INT,
1957 SIGNED_FIXED_POINT_MODE_P (mode));
1959 /* Left shift temp to temp_trunc by fbit. */
1960 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1961 HOST_BITS_PER_DOUBLE_INT,
1962 SIGNED_FIXED_POINT_MODE_P (mode));
1964 else
1966 temp = double_int_zero;
1967 temp_trunc = double_int_zero;
1970 /* If FIXED_CST is negative, we need to round the value toward 0.
1971 By checking if the fractional bits are not zero to add 1 to temp. */
1972 if (SIGNED_FIXED_POINT_MODE_P (mode)
1973 && temp_trunc.is_negative ()
1974 && TREE_FIXED_CST (arg1).data != temp_trunc)
1975 temp += double_int_one;
1977 /* Given a fixed-point constant, make new constant with new type,
1978 appropriately sign-extended or truncated. */
1979 t = force_fit_type (type, temp, -1,
1980 (temp.is_negative ()
1981 && (TYPE_UNSIGNED (type)
1982 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1983 | TREE_OVERFLOW (arg1));
1985 return t;
1988 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1989 to another floating point type. */
1991 static tree
1992 fold_convert_const_real_from_real (tree type, const_tree arg1)
1994 REAL_VALUE_TYPE value;
1995 tree t;
1997 /* Don't perform the operation if flag_signaling_nans is on
1998 and the operand is a signaling NaN. */
1999 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2000 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2001 return NULL_TREE;
2003 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2004 t = build_real (type, value);
2006 /* If converting an infinity or NAN to a representation that doesn't
2007 have one, set the overflow bit so that we can produce some kind of
2008 error message at the appropriate point if necessary. It's not the
2009 most user-friendly message, but it's better than nothing. */
2010 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2011 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2012 TREE_OVERFLOW (t) = 1;
2013 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2014 && !MODE_HAS_NANS (TYPE_MODE (type)))
2015 TREE_OVERFLOW (t) = 1;
2016 /* Regular overflow, conversion produced an infinity in a mode that
2017 can't represent them. */
2018 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2019 && REAL_VALUE_ISINF (value)
2020 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2021 TREE_OVERFLOW (t) = 1;
2022 else
2023 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2024 return t;
2027 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2028 to a floating point type. */
2030 static tree
2031 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2033 REAL_VALUE_TYPE value;
2034 tree t;
2036 real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2037 &TREE_FIXED_CST (arg1));
2038 t = build_real (type, value);
2040 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2041 return t;
2044 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2045 to another fixed-point type. */
2047 static tree
2048 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2050 FIXED_VALUE_TYPE value;
2051 tree t;
2052 bool overflow_p;
2054 overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2055 &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2056 t = build_fixed (type, value);
2058 /* Propagate overflow flags. */
2059 if (overflow_p | TREE_OVERFLOW (arg1))
2060 TREE_OVERFLOW (t) = 1;
2061 return t;
2064 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2065 to a fixed-point type. */
2067 static tree
2068 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2070 FIXED_VALUE_TYPE value;
2071 tree t;
2072 bool overflow_p;
2073 double_int di;
2075 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2077 di.low = TREE_INT_CST_ELT (arg1, 0);
2078 if (TREE_INT_CST_NUNITS (arg1) == 1)
2079 di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2080 else
2081 di.high = TREE_INT_CST_ELT (arg1, 1);
2083 overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2084 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2085 TYPE_SATURATING (type));
2086 t = build_fixed (type, value);
2088 /* Propagate overflow flags. */
2089 if (overflow_p | TREE_OVERFLOW (arg1))
2090 TREE_OVERFLOW (t) = 1;
2091 return t;
2094 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2095 to a fixed-point type. */
2097 static tree
2098 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2100 FIXED_VALUE_TYPE value;
2101 tree t;
2102 bool overflow_p;
2104 overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2105 &TREE_REAL_CST (arg1),
2106 TYPE_SATURATING (type));
2107 t = build_fixed (type, value);
2109 /* Propagate overflow flags. */
2110 if (overflow_p | TREE_OVERFLOW (arg1))
2111 TREE_OVERFLOW (t) = 1;
2112 return t;
2115 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2116 type TYPE. If no simplification can be done return NULL_TREE. */
2118 static tree
2119 fold_convert_const (enum tree_code code, tree type, tree arg1)
2121 if (TREE_TYPE (arg1) == type)
2122 return arg1;
2124 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2125 || TREE_CODE (type) == OFFSET_TYPE)
2127 if (TREE_CODE (arg1) == INTEGER_CST)
2128 return fold_convert_const_int_from_int (type, arg1);
2129 else if (TREE_CODE (arg1) == REAL_CST)
2130 return fold_convert_const_int_from_real (code, type, arg1);
2131 else if (TREE_CODE (arg1) == FIXED_CST)
2132 return fold_convert_const_int_from_fixed (type, arg1);
2134 else if (TREE_CODE (type) == REAL_TYPE)
2136 if (TREE_CODE (arg1) == INTEGER_CST)
2137 return build_real_from_int_cst (type, arg1);
2138 else if (TREE_CODE (arg1) == REAL_CST)
2139 return fold_convert_const_real_from_real (type, arg1);
2140 else if (TREE_CODE (arg1) == FIXED_CST)
2141 return fold_convert_const_real_from_fixed (type, arg1);
2143 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2145 if (TREE_CODE (arg1) == FIXED_CST)
2146 return fold_convert_const_fixed_from_fixed (type, arg1);
2147 else if (TREE_CODE (arg1) == INTEGER_CST)
2148 return fold_convert_const_fixed_from_int (type, arg1);
2149 else if (TREE_CODE (arg1) == REAL_CST)
2150 return fold_convert_const_fixed_from_real (type, arg1);
2152 else if (TREE_CODE (type) == VECTOR_TYPE)
2154 if (TREE_CODE (arg1) == VECTOR_CST
2155 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2157 int len = VECTOR_CST_NELTS (arg1);
2158 tree elttype = TREE_TYPE (type);
2159 auto_vec<tree, 32> v (len);
2160 for (int i = 0; i < len; ++i)
2162 tree elt = VECTOR_CST_ELT (arg1, i);
2163 tree cvt = fold_convert_const (code, elttype, elt);
2164 if (cvt == NULL_TREE)
2165 return NULL_TREE;
2166 v.quick_push (cvt);
2168 return build_vector (type, v);
2171 return NULL_TREE;
2174 /* Construct a vector of zero elements of vector type TYPE. */
2176 static tree
2177 build_zero_vector (tree type)
2179 tree t;
2181 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2182 return build_vector_from_val (type, t);
2185 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2187 bool
2188 fold_convertible_p (const_tree type, const_tree arg)
2190 tree orig = TREE_TYPE (arg);
2192 if (type == orig)
2193 return true;
2195 if (TREE_CODE (arg) == ERROR_MARK
2196 || TREE_CODE (type) == ERROR_MARK
2197 || TREE_CODE (orig) == ERROR_MARK)
2198 return false;
2200 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2201 return true;
2203 switch (TREE_CODE (type))
2205 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2206 case POINTER_TYPE: case REFERENCE_TYPE:
2207 case OFFSET_TYPE:
2208 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2209 || TREE_CODE (orig) == OFFSET_TYPE);
2211 case REAL_TYPE:
2212 case FIXED_POINT_TYPE:
2213 case VECTOR_TYPE:
2214 case VOID_TYPE:
2215 return TREE_CODE (type) == TREE_CODE (orig);
2217 default:
2218 return false;
2222 /* Convert expression ARG to type TYPE. Used by the middle-end for
2223 simple conversions in preference to calling the front-end's convert. */
2225 tree
2226 fold_convert_loc (location_t loc, tree type, tree arg)
2228 tree orig = TREE_TYPE (arg);
2229 tree tem;
2231 if (type == orig)
2232 return arg;
2234 if (TREE_CODE (arg) == ERROR_MARK
2235 || TREE_CODE (type) == ERROR_MARK
2236 || TREE_CODE (orig) == ERROR_MARK)
2237 return error_mark_node;
2239 switch (TREE_CODE (type))
2241 case POINTER_TYPE:
2242 case REFERENCE_TYPE:
2243 /* Handle conversions between pointers to different address spaces. */
2244 if (POINTER_TYPE_P (orig)
2245 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2246 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2247 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2248 /* fall through */
2250 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2251 case OFFSET_TYPE:
2252 if (TREE_CODE (arg) == INTEGER_CST)
2254 tem = fold_convert_const (NOP_EXPR, type, arg);
2255 if (tem != NULL_TREE)
2256 return tem;
2258 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2259 || TREE_CODE (orig) == OFFSET_TYPE)
2260 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2261 if (TREE_CODE (orig) == COMPLEX_TYPE)
2262 return fold_convert_loc (loc, type,
2263 fold_build1_loc (loc, REALPART_EXPR,
2264 TREE_TYPE (orig), arg));
2265 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2266 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2267 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2269 case REAL_TYPE:
2270 if (TREE_CODE (arg) == INTEGER_CST)
2272 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2273 if (tem != NULL_TREE)
2274 return tem;
2276 else if (TREE_CODE (arg) == REAL_CST)
2278 tem = fold_convert_const (NOP_EXPR, type, arg);
2279 if (tem != NULL_TREE)
2280 return tem;
2282 else if (TREE_CODE (arg) == FIXED_CST)
2284 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2285 if (tem != NULL_TREE)
2286 return tem;
2289 switch (TREE_CODE (orig))
2291 case INTEGER_TYPE:
2292 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2293 case POINTER_TYPE: case REFERENCE_TYPE:
2294 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2296 case REAL_TYPE:
2297 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2299 case FIXED_POINT_TYPE:
2300 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2302 case COMPLEX_TYPE:
2303 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2304 return fold_convert_loc (loc, type, tem);
2306 default:
2307 gcc_unreachable ();
2310 case FIXED_POINT_TYPE:
2311 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2312 || TREE_CODE (arg) == REAL_CST)
2314 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2315 if (tem != NULL_TREE)
2316 goto fold_convert_exit;
2319 switch (TREE_CODE (orig))
2321 case FIXED_POINT_TYPE:
2322 case INTEGER_TYPE:
2323 case ENUMERAL_TYPE:
2324 case BOOLEAN_TYPE:
2325 case REAL_TYPE:
2326 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2328 case COMPLEX_TYPE:
2329 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2330 return fold_convert_loc (loc, type, tem);
2332 default:
2333 gcc_unreachable ();
2336 case COMPLEX_TYPE:
2337 switch (TREE_CODE (orig))
2339 case INTEGER_TYPE:
2340 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2341 case POINTER_TYPE: case REFERENCE_TYPE:
2342 case REAL_TYPE:
2343 case FIXED_POINT_TYPE:
2344 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2345 fold_convert_loc (loc, TREE_TYPE (type), arg),
2346 fold_convert_loc (loc, TREE_TYPE (type),
2347 integer_zero_node));
2348 case COMPLEX_TYPE:
2350 tree rpart, ipart;
2352 if (TREE_CODE (arg) == COMPLEX_EXPR)
2354 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2355 TREE_OPERAND (arg, 0));
2356 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2357 TREE_OPERAND (arg, 1));
2358 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2361 arg = save_expr (arg);
2362 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2363 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2364 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2365 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2366 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2369 default:
2370 gcc_unreachable ();
2373 case VECTOR_TYPE:
2374 if (integer_zerop (arg))
2375 return build_zero_vector (type);
2376 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2377 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2378 || TREE_CODE (orig) == VECTOR_TYPE);
2379 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2381 case VOID_TYPE:
2382 tem = fold_ignored_result (arg);
2383 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2385 default:
2386 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2387 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2388 gcc_unreachable ();
2390 fold_convert_exit:
2391 protected_set_expr_location_unshare (tem, loc);
2392 return tem;
2395 /* Return false if expr can be assumed not to be an lvalue, true
2396 otherwise. */
2398 static bool
2399 maybe_lvalue_p (const_tree x)
2401 /* We only need to wrap lvalue tree codes. */
2402 switch (TREE_CODE (x))
2404 case VAR_DECL:
2405 case PARM_DECL:
2406 case RESULT_DECL:
2407 case LABEL_DECL:
2408 case FUNCTION_DECL:
2409 case SSA_NAME:
2411 case COMPONENT_REF:
2412 case MEM_REF:
2413 case INDIRECT_REF:
2414 case ARRAY_REF:
2415 case ARRAY_RANGE_REF:
2416 case BIT_FIELD_REF:
2417 case OBJ_TYPE_REF:
2419 case REALPART_EXPR:
2420 case IMAGPART_EXPR:
2421 case PREINCREMENT_EXPR:
2422 case PREDECREMENT_EXPR:
2423 case SAVE_EXPR:
2424 case TRY_CATCH_EXPR:
2425 case WITH_CLEANUP_EXPR:
2426 case COMPOUND_EXPR:
2427 case MODIFY_EXPR:
2428 case TARGET_EXPR:
2429 case COND_EXPR:
2430 case BIND_EXPR:
2431 break;
2433 default:
2434 /* Assume the worst for front-end tree codes. */
2435 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2436 break;
2437 return false;
2440 return true;
2443 /* Return an expr equal to X but certainly not valid as an lvalue. */
2445 tree
2446 non_lvalue_loc (location_t loc, tree x)
2448 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2449 us. */
2450 if (in_gimple_form)
2451 return x;
2453 if (! maybe_lvalue_p (x))
2454 return x;
2455 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2458 /* When pedantic, return an expr equal to X but certainly not valid as a
2459 pedantic lvalue. Otherwise, return X. */
2461 static tree
2462 pedantic_non_lvalue_loc (location_t loc, tree x)
2464 return protected_set_expr_location_unshare (x, loc);
2467 /* Given a tree comparison code, return the code that is the logical inverse.
2468 It is generally not safe to do this for floating-point comparisons, except
2469 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2470 ERROR_MARK in this case. */
2472 enum tree_code
2473 invert_tree_comparison (enum tree_code code, bool honor_nans)
2475 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2476 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2477 return ERROR_MARK;
2479 switch (code)
2481 case EQ_EXPR:
2482 return NE_EXPR;
2483 case NE_EXPR:
2484 return EQ_EXPR;
2485 case GT_EXPR:
2486 return honor_nans ? UNLE_EXPR : LE_EXPR;
2487 case GE_EXPR:
2488 return honor_nans ? UNLT_EXPR : LT_EXPR;
2489 case LT_EXPR:
2490 return honor_nans ? UNGE_EXPR : GE_EXPR;
2491 case LE_EXPR:
2492 return honor_nans ? UNGT_EXPR : GT_EXPR;
2493 case LTGT_EXPR:
2494 return UNEQ_EXPR;
2495 case UNEQ_EXPR:
2496 return LTGT_EXPR;
2497 case UNGT_EXPR:
2498 return LE_EXPR;
2499 case UNGE_EXPR:
2500 return LT_EXPR;
2501 case UNLT_EXPR:
2502 return GE_EXPR;
2503 case UNLE_EXPR:
2504 return GT_EXPR;
2505 case ORDERED_EXPR:
2506 return UNORDERED_EXPR;
2507 case UNORDERED_EXPR:
2508 return ORDERED_EXPR;
2509 default:
2510 gcc_unreachable ();
2514 /* Similar, but return the comparison that results if the operands are
2515 swapped. This is safe for floating-point. */
2517 enum tree_code
2518 swap_tree_comparison (enum tree_code code)
2520 switch (code)
2522 case EQ_EXPR:
2523 case NE_EXPR:
2524 case ORDERED_EXPR:
2525 case UNORDERED_EXPR:
2526 case LTGT_EXPR:
2527 case UNEQ_EXPR:
2528 return code;
2529 case GT_EXPR:
2530 return LT_EXPR;
2531 case GE_EXPR:
2532 return LE_EXPR;
2533 case LT_EXPR:
2534 return GT_EXPR;
2535 case LE_EXPR:
2536 return GE_EXPR;
2537 case UNGT_EXPR:
2538 return UNLT_EXPR;
2539 case UNGE_EXPR:
2540 return UNLE_EXPR;
2541 case UNLT_EXPR:
2542 return UNGT_EXPR;
2543 case UNLE_EXPR:
2544 return UNGE_EXPR;
2545 default:
2546 gcc_unreachable ();
2551 /* Convert a comparison tree code from an enum tree_code representation
2552 into a compcode bit-based encoding. This function is the inverse of
2553 compcode_to_comparison. */
2555 static enum comparison_code
2556 comparison_to_compcode (enum tree_code code)
2558 switch (code)
2560 case LT_EXPR:
2561 return COMPCODE_LT;
2562 case EQ_EXPR:
2563 return COMPCODE_EQ;
2564 case LE_EXPR:
2565 return COMPCODE_LE;
2566 case GT_EXPR:
2567 return COMPCODE_GT;
2568 case NE_EXPR:
2569 return COMPCODE_NE;
2570 case GE_EXPR:
2571 return COMPCODE_GE;
2572 case ORDERED_EXPR:
2573 return COMPCODE_ORD;
2574 case UNORDERED_EXPR:
2575 return COMPCODE_UNORD;
2576 case UNLT_EXPR:
2577 return COMPCODE_UNLT;
2578 case UNEQ_EXPR:
2579 return COMPCODE_UNEQ;
2580 case UNLE_EXPR:
2581 return COMPCODE_UNLE;
2582 case UNGT_EXPR:
2583 return COMPCODE_UNGT;
2584 case LTGT_EXPR:
2585 return COMPCODE_LTGT;
2586 case UNGE_EXPR:
2587 return COMPCODE_UNGE;
2588 default:
2589 gcc_unreachable ();
2593 /* Convert a compcode bit-based encoding of a comparison operator back
2594 to GCC's enum tree_code representation. This function is the
2595 inverse of comparison_to_compcode. */
2597 static enum tree_code
2598 compcode_to_comparison (enum comparison_code code)
2600 switch (code)
2602 case COMPCODE_LT:
2603 return LT_EXPR;
2604 case COMPCODE_EQ:
2605 return EQ_EXPR;
2606 case COMPCODE_LE:
2607 return LE_EXPR;
2608 case COMPCODE_GT:
2609 return GT_EXPR;
2610 case COMPCODE_NE:
2611 return NE_EXPR;
2612 case COMPCODE_GE:
2613 return GE_EXPR;
2614 case COMPCODE_ORD:
2615 return ORDERED_EXPR;
2616 case COMPCODE_UNORD:
2617 return UNORDERED_EXPR;
2618 case COMPCODE_UNLT:
2619 return UNLT_EXPR;
2620 case COMPCODE_UNEQ:
2621 return UNEQ_EXPR;
2622 case COMPCODE_UNLE:
2623 return UNLE_EXPR;
2624 case COMPCODE_UNGT:
2625 return UNGT_EXPR;
2626 case COMPCODE_LTGT:
2627 return LTGT_EXPR;
2628 case COMPCODE_UNGE:
2629 return UNGE_EXPR;
2630 default:
2631 gcc_unreachable ();
2635 /* Return a tree for the comparison which is the combination of
2636 doing the AND or OR (depending on CODE) of the two operations LCODE
2637 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2638 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2639 if this makes the transformation invalid. */
2641 tree
2642 combine_comparisons (location_t loc,
2643 enum tree_code code, enum tree_code lcode,
2644 enum tree_code rcode, tree truth_type,
2645 tree ll_arg, tree lr_arg)
2647 bool honor_nans = HONOR_NANS (ll_arg);
2648 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2649 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2650 int compcode;
2652 switch (code)
2654 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2655 compcode = lcompcode & rcompcode;
2656 break;
2658 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2659 compcode = lcompcode | rcompcode;
2660 break;
2662 default:
2663 return NULL_TREE;
2666 if (!honor_nans)
2668 /* Eliminate unordered comparisons, as well as LTGT and ORD
2669 which are not used unless the mode has NaNs. */
2670 compcode &= ~COMPCODE_UNORD;
2671 if (compcode == COMPCODE_LTGT)
2672 compcode = COMPCODE_NE;
2673 else if (compcode == COMPCODE_ORD)
2674 compcode = COMPCODE_TRUE;
2676 else if (flag_trapping_math)
2678 /* Check that the original operation and the optimized ones will trap
2679 under the same condition. */
2680 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2681 && (lcompcode != COMPCODE_EQ)
2682 && (lcompcode != COMPCODE_ORD);
2683 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2684 && (rcompcode != COMPCODE_EQ)
2685 && (rcompcode != COMPCODE_ORD);
2686 bool trap = (compcode & COMPCODE_UNORD) == 0
2687 && (compcode != COMPCODE_EQ)
2688 && (compcode != COMPCODE_ORD);
2690 /* In a short-circuited boolean expression the LHS might be
2691 such that the RHS, if evaluated, will never trap. For
2692 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2693 if neither x nor y is NaN. (This is a mixed blessing: for
2694 example, the expression above will never trap, hence
2695 optimizing it to x < y would be invalid). */
2696 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2697 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2698 rtrap = false;
2700 /* If the comparison was short-circuited, and only the RHS
2701 trapped, we may now generate a spurious trap. */
2702 if (rtrap && !ltrap
2703 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2704 return NULL_TREE;
2706 /* If we changed the conditions that cause a trap, we lose. */
2707 if ((ltrap || rtrap) != trap)
2708 return NULL_TREE;
2711 if (compcode == COMPCODE_TRUE)
2712 return constant_boolean_node (true, truth_type);
2713 else if (compcode == COMPCODE_FALSE)
2714 return constant_boolean_node (false, truth_type);
2715 else
2717 enum tree_code tcode;
2719 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2720 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2724 /* Return nonzero if two operands (typically of the same tree node)
2725 are necessarily equal. FLAGS modifies behavior as follows:
2727 If OEP_ONLY_CONST is set, only return nonzero for constants.
2728 This function tests whether the operands are indistinguishable;
2729 it does not test whether they are equal using C's == operation.
2730 The distinction is important for IEEE floating point, because
2731 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2732 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2734 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2735 even though it may hold multiple values during a function.
2736 This is because a GCC tree node guarantees that nothing else is
2737 executed between the evaluation of its "operands" (which may often
2738 be evaluated in arbitrary order). Hence if the operands themselves
2739 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2740 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2741 unset means assuming isochronic (or instantaneous) tree equivalence.
2742 Unless comparing arbitrary expression trees, such as from different
2743 statements, this flag can usually be left unset.
2745 If OEP_PURE_SAME is set, then pure functions with identical arguments
2746 are considered the same. It is used when the caller has other ways
2747 to ensure that global memory is unchanged in between.
2749 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2750 not values of expressions.
2752 If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2753 such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2755 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2756 any operand with side effect. This is unnecesarily conservative in the
2757 case we know that arg0 and arg1 are in disjoint code paths (such as in
2758 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2759 addresses with TREE_CONSTANT flag set so we know that &var == &var
2760 even if var is volatile. */
2763 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2765 /* When checking, verify at the outermost operand_equal_p call that
2766 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2767 hash value. */
2768 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2770 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2772 if (arg0 != arg1)
2774 inchash::hash hstate0 (0), hstate1 (0);
2775 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2776 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2777 hashval_t h0 = hstate0.end ();
2778 hashval_t h1 = hstate1.end ();
2779 gcc_assert (h0 == h1);
2781 return 1;
2783 else
2784 return 0;
2787 /* If either is ERROR_MARK, they aren't equal. */
2788 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2789 || TREE_TYPE (arg0) == error_mark_node
2790 || TREE_TYPE (arg1) == error_mark_node)
2791 return 0;
2793 /* Similar, if either does not have a type (like a released SSA name),
2794 they aren't equal. */
2795 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2796 return 0;
2798 /* We cannot consider pointers to different address space equal. */
2799 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2800 && POINTER_TYPE_P (TREE_TYPE (arg1))
2801 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2802 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2803 return 0;
2805 /* Check equality of integer constants before bailing out due to
2806 precision differences. */
2807 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2809 /* Address of INTEGER_CST is not defined; check that we did not forget
2810 to drop the OEP_ADDRESS_OF flags. */
2811 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2812 return tree_int_cst_equal (arg0, arg1);
2815 if (!(flags & OEP_ADDRESS_OF))
2817 /* If both types don't have the same signedness, then we can't consider
2818 them equal. We must check this before the STRIP_NOPS calls
2819 because they may change the signedness of the arguments. As pointers
2820 strictly don't have a signedness, require either two pointers or
2821 two non-pointers as well. */
2822 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2823 || POINTER_TYPE_P (TREE_TYPE (arg0))
2824 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2825 return 0;
2827 /* If both types don't have the same precision, then it is not safe
2828 to strip NOPs. */
2829 if (element_precision (TREE_TYPE (arg0))
2830 != element_precision (TREE_TYPE (arg1)))
2831 return 0;
2833 STRIP_NOPS (arg0);
2834 STRIP_NOPS (arg1);
2836 #if 0
2837 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2838 sanity check once the issue is solved. */
2839 else
2840 /* Addresses of conversions and SSA_NAMEs (and many other things)
2841 are not defined. Check that we did not forget to drop the
2842 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2843 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2844 && TREE_CODE (arg0) != SSA_NAME);
2845 #endif
2847 /* In case both args are comparisons but with different comparison
2848 code, try to swap the comparison operands of one arg to produce
2849 a match and compare that variant. */
2850 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2851 && COMPARISON_CLASS_P (arg0)
2852 && COMPARISON_CLASS_P (arg1))
2854 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2856 if (TREE_CODE (arg0) == swap_code)
2857 return operand_equal_p (TREE_OPERAND (arg0, 0),
2858 TREE_OPERAND (arg1, 1), flags)
2859 && operand_equal_p (TREE_OPERAND (arg0, 1),
2860 TREE_OPERAND (arg1, 0), flags);
2863 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2865 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2866 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2868 else if (flags & OEP_ADDRESS_OF)
2870 /* If we are interested in comparing addresses ignore
2871 MEM_REF wrappings of the base that can appear just for
2872 TBAA reasons. */
2873 if (TREE_CODE (arg0) == MEM_REF
2874 && DECL_P (arg1)
2875 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2876 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2877 && integer_zerop (TREE_OPERAND (arg0, 1)))
2878 return 1;
2879 else if (TREE_CODE (arg1) == MEM_REF
2880 && DECL_P (arg0)
2881 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2882 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2883 && integer_zerop (TREE_OPERAND (arg1, 1)))
2884 return 1;
2885 return 0;
2887 else
2888 return 0;
2891 /* When not checking adddresses, this is needed for conversions and for
2892 COMPONENT_REF. Might as well play it safe and always test this. */
2893 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2894 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2895 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2896 && !(flags & OEP_ADDRESS_OF)))
2897 return 0;
2899 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2900 We don't care about side effects in that case because the SAVE_EXPR
2901 takes care of that for us. In all other cases, two expressions are
2902 equal if they have no side effects. If we have two identical
2903 expressions with side effects that should be treated the same due
2904 to the only side effects being identical SAVE_EXPR's, that will
2905 be detected in the recursive calls below.
2906 If we are taking an invariant address of two identical objects
2907 they are necessarily equal as well. */
2908 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2909 && (TREE_CODE (arg0) == SAVE_EXPR
2910 || (flags & OEP_MATCH_SIDE_EFFECTS)
2911 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2912 return 1;
2914 /* Next handle constant cases, those for which we can return 1 even
2915 if ONLY_CONST is set. */
2916 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2917 switch (TREE_CODE (arg0))
2919 case INTEGER_CST:
2920 return tree_int_cst_equal (arg0, arg1);
2922 case FIXED_CST:
2923 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2924 TREE_FIXED_CST (arg1));
2926 case REAL_CST:
2927 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2928 return 1;
2931 if (!HONOR_SIGNED_ZEROS (arg0))
2933 /* If we do not distinguish between signed and unsigned zero,
2934 consider them equal. */
2935 if (real_zerop (arg0) && real_zerop (arg1))
2936 return 1;
2938 return 0;
2940 case VECTOR_CST:
2942 unsigned i;
2944 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2945 return 0;
2947 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2949 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2950 VECTOR_CST_ELT (arg1, i), flags))
2951 return 0;
2953 return 1;
2956 case COMPLEX_CST:
2957 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2958 flags)
2959 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2960 flags));
2962 case STRING_CST:
2963 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2964 && ! memcmp (TREE_STRING_POINTER (arg0),
2965 TREE_STRING_POINTER (arg1),
2966 TREE_STRING_LENGTH (arg0)));
2968 case ADDR_EXPR:
2969 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2970 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2971 flags | OEP_ADDRESS_OF
2972 | OEP_MATCH_SIDE_EFFECTS);
2973 case CONSTRUCTOR:
2974 /* In GIMPLE empty constructors are allowed in initializers of
2975 aggregates. */
2976 return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
2977 default:
2978 break;
2981 if (flags & OEP_ONLY_CONST)
2982 return 0;
2984 /* Define macros to test an operand from arg0 and arg1 for equality and a
2985 variant that allows null and views null as being different from any
2986 non-null value. In the latter case, if either is null, the both
2987 must be; otherwise, do the normal comparison. */
2988 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2989 TREE_OPERAND (arg1, N), flags)
2991 #define OP_SAME_WITH_NULL(N) \
2992 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2993 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2995 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2997 case tcc_unary:
2998 /* Two conversions are equal only if signedness and modes match. */
2999 switch (TREE_CODE (arg0))
3001 CASE_CONVERT:
3002 case FIX_TRUNC_EXPR:
3003 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3004 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3005 return 0;
3006 break;
3007 default:
3008 break;
3011 return OP_SAME (0);
3014 case tcc_comparison:
3015 case tcc_binary:
3016 if (OP_SAME (0) && OP_SAME (1))
3017 return 1;
3019 /* For commutative ops, allow the other order. */
3020 return (commutative_tree_code (TREE_CODE (arg0))
3021 && operand_equal_p (TREE_OPERAND (arg0, 0),
3022 TREE_OPERAND (arg1, 1), flags)
3023 && operand_equal_p (TREE_OPERAND (arg0, 1),
3024 TREE_OPERAND (arg1, 0), flags));
3026 case tcc_reference:
3027 /* If either of the pointer (or reference) expressions we are
3028 dereferencing contain a side effect, these cannot be equal,
3029 but their addresses can be. */
3030 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3031 && (TREE_SIDE_EFFECTS (arg0)
3032 || TREE_SIDE_EFFECTS (arg1)))
3033 return 0;
3035 switch (TREE_CODE (arg0))
3037 case INDIRECT_REF:
3038 if (!(flags & OEP_ADDRESS_OF)
3039 && (TYPE_ALIGN (TREE_TYPE (arg0))
3040 != TYPE_ALIGN (TREE_TYPE (arg1))))
3041 return 0;
3042 flags &= ~OEP_ADDRESS_OF;
3043 return OP_SAME (0);
3045 case IMAGPART_EXPR:
3046 /* Require the same offset. */
3047 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3048 TYPE_SIZE (TREE_TYPE (arg1)),
3049 flags & ~OEP_ADDRESS_OF))
3050 return 0;
3052 /* Fallthru. */
3053 case REALPART_EXPR:
3054 case VIEW_CONVERT_EXPR:
3055 return OP_SAME (0);
3057 case TARGET_MEM_REF:
3058 case MEM_REF:
3059 if (!(flags & OEP_ADDRESS_OF))
3061 /* Require equal access sizes */
3062 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3063 && (!TYPE_SIZE (TREE_TYPE (arg0))
3064 || !TYPE_SIZE (TREE_TYPE (arg1))
3065 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3066 TYPE_SIZE (TREE_TYPE (arg1)),
3067 flags)))
3068 return 0;
3069 /* Verify that access happens in similar types. */
3070 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3071 return 0;
3072 /* Verify that accesses are TBAA compatible. */
3073 if (!alias_ptr_types_compatible_p
3074 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3075 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3076 || (MR_DEPENDENCE_CLIQUE (arg0)
3077 != MR_DEPENDENCE_CLIQUE (arg1))
3078 || (MR_DEPENDENCE_BASE (arg0)
3079 != MR_DEPENDENCE_BASE (arg1)))
3080 return 0;
3081 /* Verify that alignment is compatible. */
3082 if (TYPE_ALIGN (TREE_TYPE (arg0))
3083 != TYPE_ALIGN (TREE_TYPE (arg1)))
3084 return 0;
3086 flags &= ~OEP_ADDRESS_OF;
3087 return (OP_SAME (0) && OP_SAME (1)
3088 /* TARGET_MEM_REF require equal extra operands. */
3089 && (TREE_CODE (arg0) != TARGET_MEM_REF
3090 || (OP_SAME_WITH_NULL (2)
3091 && OP_SAME_WITH_NULL (3)
3092 && OP_SAME_WITH_NULL (4))));
3094 case ARRAY_REF:
3095 case ARRAY_RANGE_REF:
3096 if (!OP_SAME (0))
3097 return 0;
3098 flags &= ~OEP_ADDRESS_OF;
3099 /* Compare the array index by value if it is constant first as we
3100 may have different types but same value here. */
3101 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3102 TREE_OPERAND (arg1, 1))
3103 || OP_SAME (1))
3104 && OP_SAME_WITH_NULL (2)
3105 && OP_SAME_WITH_NULL (3)
3106 /* Compare low bound and element size as with OEP_ADDRESS_OF
3107 we have to account for the offset of the ref. */
3108 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3109 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3110 || (operand_equal_p (array_ref_low_bound
3111 (CONST_CAST_TREE (arg0)),
3112 array_ref_low_bound
3113 (CONST_CAST_TREE (arg1)), flags)
3114 && operand_equal_p (array_ref_element_size
3115 (CONST_CAST_TREE (arg0)),
3116 array_ref_element_size
3117 (CONST_CAST_TREE (arg1)),
3118 flags))));
3120 case COMPONENT_REF:
3121 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3122 may be NULL when we're called to compare MEM_EXPRs. */
3123 if (!OP_SAME_WITH_NULL (0)
3124 || !OP_SAME (1))
3125 return 0;
3126 flags &= ~OEP_ADDRESS_OF;
3127 return OP_SAME_WITH_NULL (2);
3129 case BIT_FIELD_REF:
3130 if (!OP_SAME (0))
3131 return 0;
3132 flags &= ~OEP_ADDRESS_OF;
3133 return OP_SAME (1) && OP_SAME (2);
3135 default:
3136 return 0;
3139 case tcc_expression:
3140 switch (TREE_CODE (arg0))
3142 case ADDR_EXPR:
3143 /* Be sure we pass right ADDRESS_OF flag. */
3144 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3145 return operand_equal_p (TREE_OPERAND (arg0, 0),
3146 TREE_OPERAND (arg1, 0),
3147 flags | OEP_ADDRESS_OF);
3149 case TRUTH_NOT_EXPR:
3150 return OP_SAME (0);
3152 case TRUTH_ANDIF_EXPR:
3153 case TRUTH_ORIF_EXPR:
3154 return OP_SAME (0) && OP_SAME (1);
3156 case FMA_EXPR:
3157 case WIDEN_MULT_PLUS_EXPR:
3158 case WIDEN_MULT_MINUS_EXPR:
3159 if (!OP_SAME (2))
3160 return 0;
3161 /* The multiplcation operands are commutative. */
3162 /* FALLTHRU */
3164 case TRUTH_AND_EXPR:
3165 case TRUTH_OR_EXPR:
3166 case TRUTH_XOR_EXPR:
3167 if (OP_SAME (0) && OP_SAME (1))
3168 return 1;
3170 /* Otherwise take into account this is a commutative operation. */
3171 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3172 TREE_OPERAND (arg1, 1), flags)
3173 && operand_equal_p (TREE_OPERAND (arg0, 1),
3174 TREE_OPERAND (arg1, 0), flags));
3176 case COND_EXPR:
3177 if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3178 return 0;
3179 flags &= ~OEP_ADDRESS_OF;
3180 return OP_SAME (0);
3182 case BIT_INSERT_EXPR:
3183 /* BIT_INSERT_EXPR has an implict operand as the type precision
3184 of op1. Need to check to make sure they are the same. */
3185 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3186 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3187 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3188 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3189 return false;
3190 /* FALLTHRU */
3192 case VEC_COND_EXPR:
3193 case DOT_PROD_EXPR:
3194 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3196 case MODIFY_EXPR:
3197 case INIT_EXPR:
3198 case COMPOUND_EXPR:
3199 case PREDECREMENT_EXPR:
3200 case PREINCREMENT_EXPR:
3201 case POSTDECREMENT_EXPR:
3202 case POSTINCREMENT_EXPR:
3203 if (flags & OEP_LEXICOGRAPHIC)
3204 return OP_SAME (0) && OP_SAME (1);
3205 return 0;
3207 case CLEANUP_POINT_EXPR:
3208 case EXPR_STMT:
3209 if (flags & OEP_LEXICOGRAPHIC)
3210 return OP_SAME (0);
3211 return 0;
3213 default:
3214 return 0;
3217 case tcc_vl_exp:
3218 switch (TREE_CODE (arg0))
3220 case CALL_EXPR:
3221 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3222 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3223 /* If not both CALL_EXPRs are either internal or normal function
3224 functions, then they are not equal. */
3225 return 0;
3226 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3228 /* If the CALL_EXPRs call different internal functions, then they
3229 are not equal. */
3230 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3231 return 0;
3233 else
3235 /* If the CALL_EXPRs call different functions, then they are not
3236 equal. */
3237 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3238 flags))
3239 return 0;
3242 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3244 unsigned int cef = call_expr_flags (arg0);
3245 if (flags & OEP_PURE_SAME)
3246 cef &= ECF_CONST | ECF_PURE;
3247 else
3248 cef &= ECF_CONST;
3249 if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3250 return 0;
3253 /* Now see if all the arguments are the same. */
3255 const_call_expr_arg_iterator iter0, iter1;
3256 const_tree a0, a1;
3257 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3258 a1 = first_const_call_expr_arg (arg1, &iter1);
3259 a0 && a1;
3260 a0 = next_const_call_expr_arg (&iter0),
3261 a1 = next_const_call_expr_arg (&iter1))
3262 if (! operand_equal_p (a0, a1, flags))
3263 return 0;
3265 /* If we get here and both argument lists are exhausted
3266 then the CALL_EXPRs are equal. */
3267 return ! (a0 || a1);
3269 default:
3270 return 0;
3273 case tcc_declaration:
3274 /* Consider __builtin_sqrt equal to sqrt. */
3275 return (TREE_CODE (arg0) == FUNCTION_DECL
3276 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3277 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3278 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3280 case tcc_exceptional:
3281 if (TREE_CODE (arg0) == CONSTRUCTOR)
3283 /* In GIMPLE constructors are used only to build vectors from
3284 elements. Individual elements in the constructor must be
3285 indexed in increasing order and form an initial sequence.
3287 We make no effort to compare constructors in generic.
3288 (see sem_variable::equals in ipa-icf which can do so for
3289 constants). */
3290 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3291 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3292 return 0;
3294 /* Be sure that vectors constructed have the same representation.
3295 We only tested element precision and modes to match.
3296 Vectors may be BLKmode and thus also check that the number of
3297 parts match. */
3298 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3299 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3300 return 0;
3302 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3303 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3304 unsigned int len = vec_safe_length (v0);
3306 if (len != vec_safe_length (v1))
3307 return 0;
3309 for (unsigned int i = 0; i < len; i++)
3311 constructor_elt *c0 = &(*v0)[i];
3312 constructor_elt *c1 = &(*v1)[i];
3314 if (!operand_equal_p (c0->value, c1->value, flags)
3315 /* In GIMPLE the indexes can be either NULL or matching i.
3316 Double check this so we won't get false
3317 positives for GENERIC. */
3318 || (c0->index
3319 && (TREE_CODE (c0->index) != INTEGER_CST
3320 || !compare_tree_int (c0->index, i)))
3321 || (c1->index
3322 && (TREE_CODE (c1->index) != INTEGER_CST
3323 || !compare_tree_int (c1->index, i))))
3324 return 0;
3326 return 1;
3328 else if (TREE_CODE (arg0) == STATEMENT_LIST
3329 && (flags & OEP_LEXICOGRAPHIC))
3331 /* Compare the STATEMENT_LISTs. */
3332 tree_stmt_iterator tsi1, tsi2;
3333 tree body1 = CONST_CAST_TREE (arg0);
3334 tree body2 = CONST_CAST_TREE (arg1);
3335 for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3336 tsi_next (&tsi1), tsi_next (&tsi2))
3338 /* The lists don't have the same number of statements. */
3339 if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3340 return 0;
3341 if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3342 return 1;
3343 if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3344 OEP_LEXICOGRAPHIC))
3345 return 0;
3348 return 0;
3350 case tcc_statement:
3351 switch (TREE_CODE (arg0))
3353 case RETURN_EXPR:
3354 if (flags & OEP_LEXICOGRAPHIC)
3355 return OP_SAME_WITH_NULL (0);
3356 return 0;
3357 default:
3358 return 0;
3361 default:
3362 return 0;
3365 #undef OP_SAME
3366 #undef OP_SAME_WITH_NULL
3369 /* Similar to operand_equal_p, but strip nops first. */
3371 static bool
3372 operand_equal_for_comparison_p (tree arg0, tree arg1)
3374 if (operand_equal_p (arg0, arg1, 0))
3375 return true;
3377 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3378 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3379 return false;
3381 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3382 and see if the inner values are the same. This removes any
3383 signedness comparison, which doesn't matter here. */
3384 STRIP_NOPS (arg0);
3385 STRIP_NOPS (arg1);
3386 if (operand_equal_p (arg0, arg1, 0))
3387 return true;
3389 return false;
3392 /* See if ARG is an expression that is either a comparison or is performing
3393 arithmetic on comparisons. The comparisons must only be comparing
3394 two different values, which will be stored in *CVAL1 and *CVAL2; if
3395 they are nonzero it means that some operands have already been found.
3396 No variables may be used anywhere else in the expression except in the
3397 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3398 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3400 If this is true, return 1. Otherwise, return zero. */
3402 static int
3403 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3405 enum tree_code code = TREE_CODE (arg);
3406 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3408 /* We can handle some of the tcc_expression cases here. */
3409 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3410 tclass = tcc_unary;
3411 else if (tclass == tcc_expression
3412 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3413 || code == COMPOUND_EXPR))
3414 tclass = tcc_binary;
3416 else if (tclass == tcc_expression && code == SAVE_EXPR
3417 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3419 /* If we've already found a CVAL1 or CVAL2, this expression is
3420 two complex to handle. */
3421 if (*cval1 || *cval2)
3422 return 0;
3424 tclass = tcc_unary;
3425 *save_p = 1;
3428 switch (tclass)
3430 case tcc_unary:
3431 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3433 case tcc_binary:
3434 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3435 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3436 cval1, cval2, save_p));
3438 case tcc_constant:
3439 return 1;
3441 case tcc_expression:
3442 if (code == COND_EXPR)
3443 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3444 cval1, cval2, save_p)
3445 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3446 cval1, cval2, save_p)
3447 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3448 cval1, cval2, save_p));
3449 return 0;
3451 case tcc_comparison:
3452 /* First see if we can handle the first operand, then the second. For
3453 the second operand, we know *CVAL1 can't be zero. It must be that
3454 one side of the comparison is each of the values; test for the
3455 case where this isn't true by failing if the two operands
3456 are the same. */
3458 if (operand_equal_p (TREE_OPERAND (arg, 0),
3459 TREE_OPERAND (arg, 1), 0))
3460 return 0;
3462 if (*cval1 == 0)
3463 *cval1 = TREE_OPERAND (arg, 0);
3464 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3466 else if (*cval2 == 0)
3467 *cval2 = TREE_OPERAND (arg, 0);
3468 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3470 else
3471 return 0;
3473 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3475 else if (*cval2 == 0)
3476 *cval2 = TREE_OPERAND (arg, 1);
3477 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3479 else
3480 return 0;
3482 return 1;
3484 default:
3485 return 0;
3489 /* ARG is a tree that is known to contain just arithmetic operations and
3490 comparisons. Evaluate the operations in the tree substituting NEW0 for
3491 any occurrence of OLD0 as an operand of a comparison and likewise for
3492 NEW1 and OLD1. */
3494 static tree
3495 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3496 tree old1, tree new1)
3498 tree type = TREE_TYPE (arg);
3499 enum tree_code code = TREE_CODE (arg);
3500 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3502 /* We can handle some of the tcc_expression cases here. */
3503 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3504 tclass = tcc_unary;
3505 else if (tclass == tcc_expression
3506 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3507 tclass = tcc_binary;
3509 switch (tclass)
3511 case tcc_unary:
3512 return fold_build1_loc (loc, code, type,
3513 eval_subst (loc, TREE_OPERAND (arg, 0),
3514 old0, new0, old1, new1));
3516 case tcc_binary:
3517 return fold_build2_loc (loc, code, type,
3518 eval_subst (loc, TREE_OPERAND (arg, 0),
3519 old0, new0, old1, new1),
3520 eval_subst (loc, TREE_OPERAND (arg, 1),
3521 old0, new0, old1, new1));
3523 case tcc_expression:
3524 switch (code)
3526 case SAVE_EXPR:
3527 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3528 old1, new1);
3530 case COMPOUND_EXPR:
3531 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3532 old1, new1);
3534 case COND_EXPR:
3535 return fold_build3_loc (loc, code, type,
3536 eval_subst (loc, TREE_OPERAND (arg, 0),
3537 old0, new0, old1, new1),
3538 eval_subst (loc, TREE_OPERAND (arg, 1),
3539 old0, new0, old1, new1),
3540 eval_subst (loc, TREE_OPERAND (arg, 2),
3541 old0, new0, old1, new1));
3542 default:
3543 break;
3545 /* Fall through - ??? */
3547 case tcc_comparison:
3549 tree arg0 = TREE_OPERAND (arg, 0);
3550 tree arg1 = TREE_OPERAND (arg, 1);
3552 /* We need to check both for exact equality and tree equality. The
3553 former will be true if the operand has a side-effect. In that
3554 case, we know the operand occurred exactly once. */
3556 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3557 arg0 = new0;
3558 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3559 arg0 = new1;
3561 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3562 arg1 = new0;
3563 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3564 arg1 = new1;
3566 return fold_build2_loc (loc, code, type, arg0, arg1);
3569 default:
3570 return arg;
3574 /* Return a tree for the case when the result of an expression is RESULT
3575 converted to TYPE and OMITTED was previously an operand of the expression
3576 but is now not needed (e.g., we folded OMITTED * 0).
3578 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3579 the conversion of RESULT to TYPE. */
3581 tree
3582 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3584 tree t = fold_convert_loc (loc, type, result);
3586 /* If the resulting operand is an empty statement, just return the omitted
3587 statement casted to void. */
3588 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3589 return build1_loc (loc, NOP_EXPR, void_type_node,
3590 fold_ignored_result (omitted));
3592 if (TREE_SIDE_EFFECTS (omitted))
3593 return build2_loc (loc, COMPOUND_EXPR, type,
3594 fold_ignored_result (omitted), t);
3596 return non_lvalue_loc (loc, t);
3599 /* Return a tree for the case when the result of an expression is RESULT
3600 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3601 of the expression but are now not needed.
3603 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3604 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3605 evaluated before OMITTED2. Otherwise, if neither has side effects,
3606 just do the conversion of RESULT to TYPE. */
3608 tree
3609 omit_two_operands_loc (location_t loc, tree type, tree result,
3610 tree omitted1, tree omitted2)
3612 tree t = fold_convert_loc (loc, type, result);
3614 if (TREE_SIDE_EFFECTS (omitted2))
3615 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3616 if (TREE_SIDE_EFFECTS (omitted1))
3617 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3619 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3623 /* Return a simplified tree node for the truth-negation of ARG. This
3624 never alters ARG itself. We assume that ARG is an operation that
3625 returns a truth value (0 or 1).
3627 FIXME: one would think we would fold the result, but it causes
3628 problems with the dominator optimizer. */
3630 static tree
3631 fold_truth_not_expr (location_t loc, tree arg)
3633 tree type = TREE_TYPE (arg);
3634 enum tree_code code = TREE_CODE (arg);
3635 location_t loc1, loc2;
3637 /* If this is a comparison, we can simply invert it, except for
3638 floating-point non-equality comparisons, in which case we just
3639 enclose a TRUTH_NOT_EXPR around what we have. */
3641 if (TREE_CODE_CLASS (code) == tcc_comparison)
3643 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3644 if (FLOAT_TYPE_P (op_type)
3645 && flag_trapping_math
3646 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3647 && code != NE_EXPR && code != EQ_EXPR)
3648 return NULL_TREE;
3650 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3651 if (code == ERROR_MARK)
3652 return NULL_TREE;
3654 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3655 TREE_OPERAND (arg, 1));
3656 if (TREE_NO_WARNING (arg))
3657 TREE_NO_WARNING (ret) = 1;
3658 return ret;
3661 switch (code)
3663 case INTEGER_CST:
3664 return constant_boolean_node (integer_zerop (arg), type);
3666 case TRUTH_AND_EXPR:
3667 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3668 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3669 return build2_loc (loc, TRUTH_OR_EXPR, type,
3670 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3671 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3673 case TRUTH_OR_EXPR:
3674 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3675 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3676 return build2_loc (loc, TRUTH_AND_EXPR, type,
3677 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3678 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3680 case TRUTH_XOR_EXPR:
3681 /* Here we can invert either operand. We invert the first operand
3682 unless the second operand is a TRUTH_NOT_EXPR in which case our
3683 result is the XOR of the first operand with the inside of the
3684 negation of the second operand. */
3686 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3687 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3688 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3689 else
3690 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3691 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3692 TREE_OPERAND (arg, 1));
3694 case TRUTH_ANDIF_EXPR:
3695 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3696 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3697 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3698 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3699 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3701 case TRUTH_ORIF_EXPR:
3702 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3703 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3704 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3705 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3706 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3708 case TRUTH_NOT_EXPR:
3709 return TREE_OPERAND (arg, 0);
3711 case COND_EXPR:
3713 tree arg1 = TREE_OPERAND (arg, 1);
3714 tree arg2 = TREE_OPERAND (arg, 2);
3716 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3717 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3719 /* A COND_EXPR may have a throw as one operand, which
3720 then has void type. Just leave void operands
3721 as they are. */
3722 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3723 VOID_TYPE_P (TREE_TYPE (arg1))
3724 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3725 VOID_TYPE_P (TREE_TYPE (arg2))
3726 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3729 case COMPOUND_EXPR:
3730 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3731 return build2_loc (loc, COMPOUND_EXPR, type,
3732 TREE_OPERAND (arg, 0),
3733 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3735 case NON_LVALUE_EXPR:
3736 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3737 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3739 CASE_CONVERT:
3740 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3741 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3743 /* fall through */
3745 case FLOAT_EXPR:
3746 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3747 return build1_loc (loc, TREE_CODE (arg), type,
3748 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3750 case BIT_AND_EXPR:
3751 if (!integer_onep (TREE_OPERAND (arg, 1)))
3752 return NULL_TREE;
3753 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3755 case SAVE_EXPR:
3756 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3758 case CLEANUP_POINT_EXPR:
3759 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3760 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3761 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3763 default:
3764 return NULL_TREE;
3768 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3769 assume that ARG is an operation that returns a truth value (0 or 1
3770 for scalars, 0 or -1 for vectors). Return the folded expression if
3771 folding is successful. Otherwise, return NULL_TREE. */
3773 static tree
3774 fold_invert_truthvalue (location_t loc, tree arg)
3776 tree type = TREE_TYPE (arg);
3777 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3778 ? BIT_NOT_EXPR
3779 : TRUTH_NOT_EXPR,
3780 type, arg);
3783 /* Return a simplified tree node for the truth-negation of ARG. This
3784 never alters ARG itself. We assume that ARG is an operation that
3785 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3787 tree
3788 invert_truthvalue_loc (location_t loc, tree arg)
3790 if (TREE_CODE (arg) == ERROR_MARK)
3791 return arg;
3793 tree type = TREE_TYPE (arg);
3794 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3795 ? BIT_NOT_EXPR
3796 : TRUTH_NOT_EXPR,
3797 type, arg);
3800 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3801 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3802 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3803 is the original memory reference used to preserve the alias set of
3804 the access. */
3806 static tree
3807 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3808 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3809 int unsignedp, int reversep)
3811 tree result, bftype;
3813 /* Attempt not to lose the access path if possible. */
3814 if (TREE_CODE (orig_inner) == COMPONENT_REF)
3816 tree ninner = TREE_OPERAND (orig_inner, 0);
3817 machine_mode nmode;
3818 HOST_WIDE_INT nbitsize, nbitpos;
3819 tree noffset;
3820 int nunsignedp, nreversep, nvolatilep = 0;
3821 tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3822 &noffset, &nmode, &nunsignedp,
3823 &nreversep, &nvolatilep);
3824 if (base == inner
3825 && noffset == NULL_TREE
3826 && nbitsize >= bitsize
3827 && nbitpos <= bitpos
3828 && bitpos + bitsize <= nbitpos + nbitsize
3829 && !reversep
3830 && !nreversep
3831 && !nvolatilep)
3833 inner = ninner;
3834 bitpos -= nbitpos;
3838 alias_set_type iset = get_alias_set (orig_inner);
3839 if (iset == 0 && get_alias_set (inner) != iset)
3840 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3841 build_fold_addr_expr (inner),
3842 build_int_cst (ptr_type_node, 0));
3844 if (bitpos == 0 && !reversep)
3846 tree size = TYPE_SIZE (TREE_TYPE (inner));
3847 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3848 || POINTER_TYPE_P (TREE_TYPE (inner)))
3849 && tree_fits_shwi_p (size)
3850 && tree_to_shwi (size) == bitsize)
3851 return fold_convert_loc (loc, type, inner);
3854 bftype = type;
3855 if (TYPE_PRECISION (bftype) != bitsize
3856 || TYPE_UNSIGNED (bftype) == !unsignedp)
3857 bftype = build_nonstandard_integer_type (bitsize, 0);
3859 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3860 bitsize_int (bitsize), bitsize_int (bitpos));
3861 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3863 if (bftype != type)
3864 result = fold_convert_loc (loc, type, result);
3866 return result;
3869 /* Optimize a bit-field compare.
3871 There are two cases: First is a compare against a constant and the
3872 second is a comparison of two items where the fields are at the same
3873 bit position relative to the start of a chunk (byte, halfword, word)
3874 large enough to contain it. In these cases we can avoid the shift
3875 implicit in bitfield extractions.
3877 For constants, we emit a compare of the shifted constant with the
3878 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3879 compared. For two fields at the same position, we do the ANDs with the
3880 similar mask and compare the result of the ANDs.
3882 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3883 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3884 are the left and right operands of the comparison, respectively.
3886 If the optimization described above can be done, we return the resulting
3887 tree. Otherwise we return zero. */
3889 static tree
3890 optimize_bit_field_compare (location_t loc, enum tree_code code,
3891 tree compare_type, tree lhs, tree rhs)
3893 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3894 tree type = TREE_TYPE (lhs);
3895 tree unsigned_type;
3896 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3897 machine_mode lmode, rmode;
3898 scalar_int_mode nmode;
3899 int lunsignedp, runsignedp;
3900 int lreversep, rreversep;
3901 int lvolatilep = 0, rvolatilep = 0;
3902 tree linner, rinner = NULL_TREE;
3903 tree mask;
3904 tree offset;
3906 /* Get all the information about the extractions being done. If the bit size
3907 if the same as the size of the underlying object, we aren't doing an
3908 extraction at all and so can do nothing. We also don't want to
3909 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3910 then will no longer be able to replace it. */
3911 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3912 &lunsignedp, &lreversep, &lvolatilep);
3913 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3914 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3915 return 0;
3917 if (const_p)
3918 rreversep = lreversep;
3919 else
3921 /* If this is not a constant, we can only do something if bit positions,
3922 sizes, signedness and storage order are the same. */
3923 rinner
3924 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3925 &runsignedp, &rreversep, &rvolatilep);
3927 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3928 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3929 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3930 return 0;
3933 /* Honor the C++ memory model and mimic what RTL expansion does. */
3934 unsigned HOST_WIDE_INT bitstart = 0;
3935 unsigned HOST_WIDE_INT bitend = 0;
3936 if (TREE_CODE (lhs) == COMPONENT_REF)
3938 get_bit_range (&bitstart, &bitend, lhs, &lbitpos, &offset);
3939 if (offset != NULL_TREE)
3940 return 0;
3943 /* See if we can find a mode to refer to this field. We should be able to,
3944 but fail if we can't. */
3945 if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
3946 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3947 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3948 TYPE_ALIGN (TREE_TYPE (rinner))),
3949 BITS_PER_WORD, false, &nmode))
3950 return 0;
3952 /* Set signed and unsigned types of the precision of this mode for the
3953 shifts below. */
3954 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3956 /* Compute the bit position and size for the new reference and our offset
3957 within it. If the new reference is the same size as the original, we
3958 won't optimize anything, so return zero. */
3959 nbitsize = GET_MODE_BITSIZE (nmode);
3960 nbitpos = lbitpos & ~ (nbitsize - 1);
3961 lbitpos -= nbitpos;
3962 if (nbitsize == lbitsize)
3963 return 0;
3965 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3966 lbitpos = nbitsize - lbitsize - lbitpos;
3968 /* Make the mask to be used against the extracted field. */
3969 mask = build_int_cst_type (unsigned_type, -1);
3970 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3971 mask = const_binop (RSHIFT_EXPR, mask,
3972 size_int (nbitsize - lbitsize - lbitpos));
3974 if (! const_p)
3976 if (nbitpos < 0)
3977 return 0;
3979 /* If not comparing with constant, just rework the comparison
3980 and return. */
3981 tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
3982 nbitsize, nbitpos, 1, lreversep);
3983 t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
3984 tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
3985 nbitsize, nbitpos, 1, rreversep);
3986 t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
3987 return fold_build2_loc (loc, code, compare_type, t1, t2);
3990 /* Otherwise, we are handling the constant case. See if the constant is too
3991 big for the field. Warn and return a tree for 0 (false) if so. We do
3992 this not only for its own sake, but to avoid having to test for this
3993 error case below. If we didn't, we might generate wrong code.
3995 For unsigned fields, the constant shifted right by the field length should
3996 be all zero. For signed fields, the high-order bits should agree with
3997 the sign bit. */
3999 if (lunsignedp)
4001 if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4003 warning (0, "comparison is always %d due to width of bit-field",
4004 code == NE_EXPR);
4005 return constant_boolean_node (code == NE_EXPR, compare_type);
4008 else
4010 wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4011 if (tem != 0 && tem != -1)
4013 warning (0, "comparison is always %d due to width of bit-field",
4014 code == NE_EXPR);
4015 return constant_boolean_node (code == NE_EXPR, compare_type);
4019 if (nbitpos < 0)
4020 return 0;
4022 /* Single-bit compares should always be against zero. */
4023 if (lbitsize == 1 && ! integer_zerop (rhs))
4025 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4026 rhs = build_int_cst (type, 0);
4029 /* Make a new bitfield reference, shift the constant over the
4030 appropriate number of bits and mask it with the computed mask
4031 (in case this was a signed field). If we changed it, make a new one. */
4032 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4033 nbitsize, nbitpos, 1, lreversep);
4035 rhs = const_binop (BIT_AND_EXPR,
4036 const_binop (LSHIFT_EXPR,
4037 fold_convert_loc (loc, unsigned_type, rhs),
4038 size_int (lbitpos)),
4039 mask);
4041 lhs = build2_loc (loc, code, compare_type,
4042 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4043 return lhs;
4046 /* Subroutine for fold_truth_andor_1: decode a field reference.
4048 If EXP is a comparison reference, we return the innermost reference.
4050 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4051 set to the starting bit number.
4053 If the innermost field can be completely contained in a mode-sized
4054 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4056 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4057 otherwise it is not changed.
4059 *PUNSIGNEDP is set to the signedness of the field.
4061 *PREVERSEP is set to the storage order of the field.
4063 *PMASK is set to the mask used. This is either contained in a
4064 BIT_AND_EXPR or derived from the width of the field.
4066 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4068 Return 0 if this is not a component reference or is one that we can't
4069 do anything with. */
4071 static tree
4072 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4073 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4074 int *punsignedp, int *preversep, int *pvolatilep,
4075 tree *pmask, tree *pand_mask)
4077 tree exp = *exp_;
4078 tree outer_type = 0;
4079 tree and_mask = 0;
4080 tree mask, inner, offset;
4081 tree unsigned_type;
4082 unsigned int precision;
4084 /* All the optimizations using this function assume integer fields.
4085 There are problems with FP fields since the type_for_size call
4086 below can fail for, e.g., XFmode. */
4087 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4088 return 0;
4090 /* We are interested in the bare arrangement of bits, so strip everything
4091 that doesn't affect the machine mode. However, record the type of the
4092 outermost expression if it may matter below. */
4093 if (CONVERT_EXPR_P (exp)
4094 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4095 outer_type = TREE_TYPE (exp);
4096 STRIP_NOPS (exp);
4098 if (TREE_CODE (exp) == BIT_AND_EXPR)
4100 and_mask = TREE_OPERAND (exp, 1);
4101 exp = TREE_OPERAND (exp, 0);
4102 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4103 if (TREE_CODE (and_mask) != INTEGER_CST)
4104 return 0;
4107 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4108 punsignedp, preversep, pvolatilep);
4109 if ((inner == exp && and_mask == 0)
4110 || *pbitsize < 0 || offset != 0
4111 || TREE_CODE (inner) == PLACEHOLDER_EXPR
4112 /* Reject out-of-bound accesses (PR79731). */
4113 || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4114 && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4115 *pbitpos + *pbitsize) < 0))
4116 return 0;
4118 *exp_ = exp;
4120 /* If the number of bits in the reference is the same as the bitsize of
4121 the outer type, then the outer type gives the signedness. Otherwise
4122 (in case of a small bitfield) the signedness is unchanged. */
4123 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4124 *punsignedp = TYPE_UNSIGNED (outer_type);
4126 /* Compute the mask to access the bitfield. */
4127 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4128 precision = TYPE_PRECISION (unsigned_type);
4130 mask = build_int_cst_type (unsigned_type, -1);
4132 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4133 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4135 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4136 if (and_mask != 0)
4137 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4138 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4140 *pmask = mask;
4141 *pand_mask = and_mask;
4142 return inner;
4145 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4146 bit positions and MASK is SIGNED. */
4148 static int
4149 all_ones_mask_p (const_tree mask, unsigned int size)
4151 tree type = TREE_TYPE (mask);
4152 unsigned int precision = TYPE_PRECISION (type);
4154 /* If this function returns true when the type of the mask is
4155 UNSIGNED, then there will be errors. In particular see
4156 gcc.c-torture/execute/990326-1.c. There does not appear to be
4157 any documentation paper trail as to why this is so. But the pre
4158 wide-int worked with that restriction and it has been preserved
4159 here. */
4160 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4161 return false;
4163 return wi::mask (size, false, precision) == wi::to_wide (mask);
4166 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4167 represents the sign bit of EXP's type. If EXP represents a sign
4168 or zero extension, also test VAL against the unextended type.
4169 The return value is the (sub)expression whose sign bit is VAL,
4170 or NULL_TREE otherwise. */
4172 tree
4173 sign_bit_p (tree exp, const_tree val)
4175 int width;
4176 tree t;
4178 /* Tree EXP must have an integral type. */
4179 t = TREE_TYPE (exp);
4180 if (! INTEGRAL_TYPE_P (t))
4181 return NULL_TREE;
4183 /* Tree VAL must be an integer constant. */
4184 if (TREE_CODE (val) != INTEGER_CST
4185 || TREE_OVERFLOW (val))
4186 return NULL_TREE;
4188 width = TYPE_PRECISION (t);
4189 if (wi::only_sign_bit_p (wi::to_wide (val), width))
4190 return exp;
4192 /* Handle extension from a narrower type. */
4193 if (TREE_CODE (exp) == NOP_EXPR
4194 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4195 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4197 return NULL_TREE;
4200 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4201 to be evaluated unconditionally. */
4203 static int
4204 simple_operand_p (const_tree exp)
4206 /* Strip any conversions that don't change the machine mode. */
4207 STRIP_NOPS (exp);
4209 return (CONSTANT_CLASS_P (exp)
4210 || TREE_CODE (exp) == SSA_NAME
4211 || (DECL_P (exp)
4212 && ! TREE_ADDRESSABLE (exp)
4213 && ! TREE_THIS_VOLATILE (exp)
4214 && ! DECL_NONLOCAL (exp)
4215 /* Don't regard global variables as simple. They may be
4216 allocated in ways unknown to the compiler (shared memory,
4217 #pragma weak, etc). */
4218 && ! TREE_PUBLIC (exp)
4219 && ! DECL_EXTERNAL (exp)
4220 /* Weakrefs are not safe to be read, since they can be NULL.
4221 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4222 have DECL_WEAK flag set. */
4223 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4224 /* Loading a static variable is unduly expensive, but global
4225 registers aren't expensive. */
4226 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4229 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4230 to be evaluated unconditionally.
4231 I addition to simple_operand_p, we assume that comparisons, conversions,
4232 and logic-not operations are simple, if their operands are simple, too. */
4234 static bool
4235 simple_operand_p_2 (tree exp)
4237 enum tree_code code;
4239 if (TREE_SIDE_EFFECTS (exp)
4240 || tree_could_trap_p (exp))
4241 return false;
4243 while (CONVERT_EXPR_P (exp))
4244 exp = TREE_OPERAND (exp, 0);
4246 code = TREE_CODE (exp);
4248 if (TREE_CODE_CLASS (code) == tcc_comparison)
4249 return (simple_operand_p (TREE_OPERAND (exp, 0))
4250 && simple_operand_p (TREE_OPERAND (exp, 1)));
4252 if (code == TRUTH_NOT_EXPR)
4253 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4255 return simple_operand_p (exp);
4259 /* The following functions are subroutines to fold_range_test and allow it to
4260 try to change a logical combination of comparisons into a range test.
4262 For example, both
4263 X == 2 || X == 3 || X == 4 || X == 5
4265 X >= 2 && X <= 5
4266 are converted to
4267 (unsigned) (X - 2) <= 3
4269 We describe each set of comparisons as being either inside or outside
4270 a range, using a variable named like IN_P, and then describe the
4271 range with a lower and upper bound. If one of the bounds is omitted,
4272 it represents either the highest or lowest value of the type.
4274 In the comments below, we represent a range by two numbers in brackets
4275 preceded by a "+" to designate being inside that range, or a "-" to
4276 designate being outside that range, so the condition can be inverted by
4277 flipping the prefix. An omitted bound is represented by a "-". For
4278 example, "- [-, 10]" means being outside the range starting at the lowest
4279 possible value and ending at 10, in other words, being greater than 10.
4280 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4281 always false.
4283 We set up things so that the missing bounds are handled in a consistent
4284 manner so neither a missing bound nor "true" and "false" need to be
4285 handled using a special case. */
4287 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4288 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4289 and UPPER1_P are nonzero if the respective argument is an upper bound
4290 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4291 must be specified for a comparison. ARG1 will be converted to ARG0's
4292 type if both are specified. */
4294 static tree
4295 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4296 tree arg1, int upper1_p)
4298 tree tem;
4299 int result;
4300 int sgn0, sgn1;
4302 /* If neither arg represents infinity, do the normal operation.
4303 Else, if not a comparison, return infinity. Else handle the special
4304 comparison rules. Note that most of the cases below won't occur, but
4305 are handled for consistency. */
4307 if (arg0 != 0 && arg1 != 0)
4309 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4310 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4311 STRIP_NOPS (tem);
4312 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4315 if (TREE_CODE_CLASS (code) != tcc_comparison)
4316 return 0;
4318 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4319 for neither. In real maths, we cannot assume open ended ranges are
4320 the same. But, this is computer arithmetic, where numbers are finite.
4321 We can therefore make the transformation of any unbounded range with
4322 the value Z, Z being greater than any representable number. This permits
4323 us to treat unbounded ranges as equal. */
4324 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4325 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4326 switch (code)
4328 case EQ_EXPR:
4329 result = sgn0 == sgn1;
4330 break;
4331 case NE_EXPR:
4332 result = sgn0 != sgn1;
4333 break;
4334 case LT_EXPR:
4335 result = sgn0 < sgn1;
4336 break;
4337 case LE_EXPR:
4338 result = sgn0 <= sgn1;
4339 break;
4340 case GT_EXPR:
4341 result = sgn0 > sgn1;
4342 break;
4343 case GE_EXPR:
4344 result = sgn0 >= sgn1;
4345 break;
4346 default:
4347 gcc_unreachable ();
4350 return constant_boolean_node (result, type);
4353 /* Helper routine for make_range. Perform one step for it, return
4354 new expression if the loop should continue or NULL_TREE if it should
4355 stop. */
4357 tree
4358 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4359 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4360 bool *strict_overflow_p)
4362 tree arg0_type = TREE_TYPE (arg0);
4363 tree n_low, n_high, low = *p_low, high = *p_high;
4364 int in_p = *p_in_p, n_in_p;
4366 switch (code)
4368 case TRUTH_NOT_EXPR:
4369 /* We can only do something if the range is testing for zero. */
4370 if (low == NULL_TREE || high == NULL_TREE
4371 || ! integer_zerop (low) || ! integer_zerop (high))
4372 return NULL_TREE;
4373 *p_in_p = ! in_p;
4374 return arg0;
4376 case EQ_EXPR: case NE_EXPR:
4377 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4378 /* We can only do something if the range is testing for zero
4379 and if the second operand is an integer constant. Note that
4380 saying something is "in" the range we make is done by
4381 complementing IN_P since it will set in the initial case of
4382 being not equal to zero; "out" is leaving it alone. */
4383 if (low == NULL_TREE || high == NULL_TREE
4384 || ! integer_zerop (low) || ! integer_zerop (high)
4385 || TREE_CODE (arg1) != INTEGER_CST)
4386 return NULL_TREE;
4388 switch (code)
4390 case NE_EXPR: /* - [c, c] */
4391 low = high = arg1;
4392 break;
4393 case EQ_EXPR: /* + [c, c] */
4394 in_p = ! in_p, low = high = arg1;
4395 break;
4396 case GT_EXPR: /* - [-, c] */
4397 low = 0, high = arg1;
4398 break;
4399 case GE_EXPR: /* + [c, -] */
4400 in_p = ! in_p, low = arg1, high = 0;
4401 break;
4402 case LT_EXPR: /* - [c, -] */
4403 low = arg1, high = 0;
4404 break;
4405 case LE_EXPR: /* + [-, c] */
4406 in_p = ! in_p, low = 0, high = arg1;
4407 break;
4408 default:
4409 gcc_unreachable ();
4412 /* If this is an unsigned comparison, we also know that EXP is
4413 greater than or equal to zero. We base the range tests we make
4414 on that fact, so we record it here so we can parse existing
4415 range tests. We test arg0_type since often the return type
4416 of, e.g. EQ_EXPR, is boolean. */
4417 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4419 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4420 in_p, low, high, 1,
4421 build_int_cst (arg0_type, 0),
4422 NULL_TREE))
4423 return NULL_TREE;
4425 in_p = n_in_p, low = n_low, high = n_high;
4427 /* If the high bound is missing, but we have a nonzero low
4428 bound, reverse the range so it goes from zero to the low bound
4429 minus 1. */
4430 if (high == 0 && low && ! integer_zerop (low))
4432 in_p = ! in_p;
4433 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4434 build_int_cst (TREE_TYPE (low), 1), 0);
4435 low = build_int_cst (arg0_type, 0);
4439 *p_low = low;
4440 *p_high = high;
4441 *p_in_p = in_p;
4442 return arg0;
4444 case NEGATE_EXPR:
4445 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4446 low and high are non-NULL, then normalize will DTRT. */
4447 if (!TYPE_UNSIGNED (arg0_type)
4448 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4450 if (low == NULL_TREE)
4451 low = TYPE_MIN_VALUE (arg0_type);
4452 if (high == NULL_TREE)
4453 high = TYPE_MAX_VALUE (arg0_type);
4456 /* (-x) IN [a,b] -> x in [-b, -a] */
4457 n_low = range_binop (MINUS_EXPR, exp_type,
4458 build_int_cst (exp_type, 0),
4459 0, high, 1);
4460 n_high = range_binop (MINUS_EXPR, exp_type,
4461 build_int_cst (exp_type, 0),
4462 0, low, 0);
4463 if (n_high != 0 && TREE_OVERFLOW (n_high))
4464 return NULL_TREE;
4465 goto normalize;
4467 case BIT_NOT_EXPR:
4468 /* ~ X -> -X - 1 */
4469 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4470 build_int_cst (exp_type, 1));
4472 case PLUS_EXPR:
4473 case MINUS_EXPR:
4474 if (TREE_CODE (arg1) != INTEGER_CST)
4475 return NULL_TREE;
4477 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4478 move a constant to the other side. */
4479 if (!TYPE_UNSIGNED (arg0_type)
4480 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4481 return NULL_TREE;
4483 /* If EXP is signed, any overflow in the computation is undefined,
4484 so we don't worry about it so long as our computations on
4485 the bounds don't overflow. For unsigned, overflow is defined
4486 and this is exactly the right thing. */
4487 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4488 arg0_type, low, 0, arg1, 0);
4489 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4490 arg0_type, high, 1, arg1, 0);
4491 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4492 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4493 return NULL_TREE;
4495 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4496 *strict_overflow_p = true;
4498 normalize:
4499 /* Check for an unsigned range which has wrapped around the maximum
4500 value thus making n_high < n_low, and normalize it. */
4501 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4503 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4504 build_int_cst (TREE_TYPE (n_high), 1), 0);
4505 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4506 build_int_cst (TREE_TYPE (n_low), 1), 0);
4508 /* If the range is of the form +/- [ x+1, x ], we won't
4509 be able to normalize it. But then, it represents the
4510 whole range or the empty set, so make it
4511 +/- [ -, - ]. */
4512 if (tree_int_cst_equal (n_low, low)
4513 && tree_int_cst_equal (n_high, high))
4514 low = high = 0;
4515 else
4516 in_p = ! in_p;
4518 else
4519 low = n_low, high = n_high;
4521 *p_low = low;
4522 *p_high = high;
4523 *p_in_p = in_p;
4524 return arg0;
4526 CASE_CONVERT:
4527 case NON_LVALUE_EXPR:
4528 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4529 return NULL_TREE;
4531 if (! INTEGRAL_TYPE_P (arg0_type)
4532 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4533 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4534 return NULL_TREE;
4536 n_low = low, n_high = high;
4538 if (n_low != 0)
4539 n_low = fold_convert_loc (loc, arg0_type, n_low);
4541 if (n_high != 0)
4542 n_high = fold_convert_loc (loc, arg0_type, n_high);
4544 /* If we're converting arg0 from an unsigned type, to exp,
4545 a signed type, we will be doing the comparison as unsigned.
4546 The tests above have already verified that LOW and HIGH
4547 are both positive.
4549 So we have to ensure that we will handle large unsigned
4550 values the same way that the current signed bounds treat
4551 negative values. */
4553 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4555 tree high_positive;
4556 tree equiv_type;
4557 /* For fixed-point modes, we need to pass the saturating flag
4558 as the 2nd parameter. */
4559 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4560 equiv_type
4561 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4562 TYPE_SATURATING (arg0_type));
4563 else
4564 equiv_type
4565 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4567 /* A range without an upper bound is, naturally, unbounded.
4568 Since convert would have cropped a very large value, use
4569 the max value for the destination type. */
4570 high_positive
4571 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4572 : TYPE_MAX_VALUE (arg0_type);
4574 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4575 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4576 fold_convert_loc (loc, arg0_type,
4577 high_positive),
4578 build_int_cst (arg0_type, 1));
4580 /* If the low bound is specified, "and" the range with the
4581 range for which the original unsigned value will be
4582 positive. */
4583 if (low != 0)
4585 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4586 1, fold_convert_loc (loc, arg0_type,
4587 integer_zero_node),
4588 high_positive))
4589 return NULL_TREE;
4591 in_p = (n_in_p == in_p);
4593 else
4595 /* Otherwise, "or" the range with the range of the input
4596 that will be interpreted as negative. */
4597 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4598 1, fold_convert_loc (loc, arg0_type,
4599 integer_zero_node),
4600 high_positive))
4601 return NULL_TREE;
4603 in_p = (in_p != n_in_p);
4607 *p_low = n_low;
4608 *p_high = n_high;
4609 *p_in_p = in_p;
4610 return arg0;
4612 default:
4613 return NULL_TREE;
4617 /* Given EXP, a logical expression, set the range it is testing into
4618 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4619 actually being tested. *PLOW and *PHIGH will be made of the same
4620 type as the returned expression. If EXP is not a comparison, we
4621 will most likely not be returning a useful value and range. Set
4622 *STRICT_OVERFLOW_P to true if the return value is only valid
4623 because signed overflow is undefined; otherwise, do not change
4624 *STRICT_OVERFLOW_P. */
4626 tree
4627 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4628 bool *strict_overflow_p)
4630 enum tree_code code;
4631 tree arg0, arg1 = NULL_TREE;
4632 tree exp_type, nexp;
4633 int in_p;
4634 tree low, high;
4635 location_t loc = EXPR_LOCATION (exp);
4637 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4638 and see if we can refine the range. Some of the cases below may not
4639 happen, but it doesn't seem worth worrying about this. We "continue"
4640 the outer loop when we've changed something; otherwise we "break"
4641 the switch, which will "break" the while. */
4643 in_p = 0;
4644 low = high = build_int_cst (TREE_TYPE (exp), 0);
4646 while (1)
4648 code = TREE_CODE (exp);
4649 exp_type = TREE_TYPE (exp);
4650 arg0 = NULL_TREE;
4652 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4654 if (TREE_OPERAND_LENGTH (exp) > 0)
4655 arg0 = TREE_OPERAND (exp, 0);
4656 if (TREE_CODE_CLASS (code) == tcc_binary
4657 || TREE_CODE_CLASS (code) == tcc_comparison
4658 || (TREE_CODE_CLASS (code) == tcc_expression
4659 && TREE_OPERAND_LENGTH (exp) > 1))
4660 arg1 = TREE_OPERAND (exp, 1);
4662 if (arg0 == NULL_TREE)
4663 break;
4665 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4666 &high, &in_p, strict_overflow_p);
4667 if (nexp == NULL_TREE)
4668 break;
4669 exp = nexp;
4672 /* If EXP is a constant, we can evaluate whether this is true or false. */
4673 if (TREE_CODE (exp) == INTEGER_CST)
4675 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4676 exp, 0, low, 0))
4677 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4678 exp, 1, high, 1)));
4679 low = high = 0;
4680 exp = 0;
4683 *pin_p = in_p, *plow = low, *phigh = high;
4684 return exp;
4687 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4688 a bitwise check i.e. when
4689 LOW == 0xXX...X00...0
4690 HIGH == 0xXX...X11...1
4691 Return corresponding mask in MASK and stem in VALUE. */
4693 static bool
4694 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4695 tree *value)
4697 if (TREE_CODE (low) != INTEGER_CST
4698 || TREE_CODE (high) != INTEGER_CST)
4699 return false;
4701 unsigned prec = TYPE_PRECISION (type);
4702 wide_int lo = wi::to_wide (low, prec);
4703 wide_int hi = wi::to_wide (high, prec);
4705 wide_int end_mask = lo ^ hi;
4706 if ((end_mask & (end_mask + 1)) != 0
4707 || (lo & end_mask) != 0)
4708 return false;
4710 wide_int stem_mask = ~end_mask;
4711 wide_int stem = lo & stem_mask;
4712 if (stem != (hi & stem_mask))
4713 return false;
4715 *mask = wide_int_to_tree (type, stem_mask);
4716 *value = wide_int_to_tree (type, stem);
4718 return true;
4721 /* Helper routine for build_range_check and match.pd. Return the type to
4722 perform the check or NULL if it shouldn't be optimized. */
4724 tree
4725 range_check_type (tree etype)
4727 /* First make sure that arithmetics in this type is valid, then make sure
4728 that it wraps around. */
4729 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4730 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4731 TYPE_UNSIGNED (etype));
4733 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4735 tree utype, minv, maxv;
4737 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4738 for the type in question, as we rely on this here. */
4739 utype = unsigned_type_for (etype);
4740 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4741 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4742 build_int_cst (TREE_TYPE (maxv), 1), 1);
4743 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4745 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4746 minv, 1, maxv, 1)))
4747 etype = utype;
4748 else
4749 return NULL_TREE;
4751 return etype;
4754 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4755 type, TYPE, return an expression to test if EXP is in (or out of, depending
4756 on IN_P) the range. Return 0 if the test couldn't be created. */
4758 tree
4759 build_range_check (location_t loc, tree type, tree exp, int in_p,
4760 tree low, tree high)
4762 tree etype = TREE_TYPE (exp), mask, value;
4764 /* Disable this optimization for function pointer expressions
4765 on targets that require function pointer canonicalization. */
4766 if (targetm.have_canonicalize_funcptr_for_compare ()
4767 && TREE_CODE (etype) == POINTER_TYPE
4768 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4769 return NULL_TREE;
4771 if (! in_p)
4773 value = build_range_check (loc, type, exp, 1, low, high);
4774 if (value != 0)
4775 return invert_truthvalue_loc (loc, value);
4777 return 0;
4780 if (low == 0 && high == 0)
4781 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4783 if (low == 0)
4784 return fold_build2_loc (loc, LE_EXPR, type, exp,
4785 fold_convert_loc (loc, etype, high));
4787 if (high == 0)
4788 return fold_build2_loc (loc, GE_EXPR, type, exp,
4789 fold_convert_loc (loc, etype, low));
4791 if (operand_equal_p (low, high, 0))
4792 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4793 fold_convert_loc (loc, etype, low));
4795 if (TREE_CODE (exp) == BIT_AND_EXPR
4796 && maskable_range_p (low, high, etype, &mask, &value))
4797 return fold_build2_loc (loc, EQ_EXPR, type,
4798 fold_build2_loc (loc, BIT_AND_EXPR, etype,
4799 exp, mask),
4800 value);
4802 if (integer_zerop (low))
4804 if (! TYPE_UNSIGNED (etype))
4806 etype = unsigned_type_for (etype);
4807 high = fold_convert_loc (loc, etype, high);
4808 exp = fold_convert_loc (loc, etype, exp);
4810 return build_range_check (loc, type, exp, 1, 0, high);
4813 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4814 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4816 int prec = TYPE_PRECISION (etype);
4818 if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4820 if (TYPE_UNSIGNED (etype))
4822 tree signed_etype = signed_type_for (etype);
4823 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4824 etype
4825 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4826 else
4827 etype = signed_etype;
4828 exp = fold_convert_loc (loc, etype, exp);
4830 return fold_build2_loc (loc, GT_EXPR, type, exp,
4831 build_int_cst (etype, 0));
4835 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4836 This requires wrap-around arithmetics for the type of the expression. */
4837 etype = range_check_type (etype);
4838 if (etype == NULL_TREE)
4839 return NULL_TREE;
4841 if (POINTER_TYPE_P (etype))
4842 etype = unsigned_type_for (etype);
4844 high = fold_convert_loc (loc, etype, high);
4845 low = fold_convert_loc (loc, etype, low);
4846 exp = fold_convert_loc (loc, etype, exp);
4848 value = const_binop (MINUS_EXPR, high, low);
4850 if (value != 0 && !TREE_OVERFLOW (value))
4851 return build_range_check (loc, type,
4852 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4853 1, build_int_cst (etype, 0), value);
4855 return 0;
4858 /* Return the predecessor of VAL in its type, handling the infinite case. */
4860 static tree
4861 range_predecessor (tree val)
4863 tree type = TREE_TYPE (val);
4865 if (INTEGRAL_TYPE_P (type)
4866 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4867 return 0;
4868 else
4869 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4870 build_int_cst (TREE_TYPE (val), 1), 0);
4873 /* Return the successor of VAL in its type, handling the infinite case. */
4875 static tree
4876 range_successor (tree val)
4878 tree type = TREE_TYPE (val);
4880 if (INTEGRAL_TYPE_P (type)
4881 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4882 return 0;
4883 else
4884 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4885 build_int_cst (TREE_TYPE (val), 1), 0);
4888 /* Given two ranges, see if we can merge them into one. Return 1 if we
4889 can, 0 if we can't. Set the output range into the specified parameters. */
4891 bool
4892 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4893 tree high0, int in1_p, tree low1, tree high1)
4895 int no_overlap;
4896 int subset;
4897 int temp;
4898 tree tem;
4899 int in_p;
4900 tree low, high;
4901 int lowequal = ((low0 == 0 && low1 == 0)
4902 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4903 low0, 0, low1, 0)));
4904 int highequal = ((high0 == 0 && high1 == 0)
4905 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4906 high0, 1, high1, 1)));
4908 /* Make range 0 be the range that starts first, or ends last if they
4909 start at the same value. Swap them if it isn't. */
4910 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4911 low0, 0, low1, 0))
4912 || (lowequal
4913 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4914 high1, 1, high0, 1))))
4916 temp = in0_p, in0_p = in1_p, in1_p = temp;
4917 tem = low0, low0 = low1, low1 = tem;
4918 tem = high0, high0 = high1, high1 = tem;
4921 /* Now flag two cases, whether the ranges are disjoint or whether the
4922 second range is totally subsumed in the first. Note that the tests
4923 below are simplified by the ones above. */
4924 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4925 high0, 1, low1, 0));
4926 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4927 high1, 1, high0, 1));
4929 /* We now have four cases, depending on whether we are including or
4930 excluding the two ranges. */
4931 if (in0_p && in1_p)
4933 /* If they don't overlap, the result is false. If the second range
4934 is a subset it is the result. Otherwise, the range is from the start
4935 of the second to the end of the first. */
4936 if (no_overlap)
4937 in_p = 0, low = high = 0;
4938 else if (subset)
4939 in_p = 1, low = low1, high = high1;
4940 else
4941 in_p = 1, low = low1, high = high0;
4944 else if (in0_p && ! in1_p)
4946 /* If they don't overlap, the result is the first range. If they are
4947 equal, the result is false. If the second range is a subset of the
4948 first, and the ranges begin at the same place, we go from just after
4949 the end of the second range to the end of the first. If the second
4950 range is not a subset of the first, or if it is a subset and both
4951 ranges end at the same place, the range starts at the start of the
4952 first range and ends just before the second range.
4953 Otherwise, we can't describe this as a single range. */
4954 if (no_overlap)
4955 in_p = 1, low = low0, high = high0;
4956 else if (lowequal && highequal)
4957 in_p = 0, low = high = 0;
4958 else if (subset && lowequal)
4960 low = range_successor (high1);
4961 high = high0;
4962 in_p = 1;
4963 if (low == 0)
4965 /* We are in the weird situation where high0 > high1 but
4966 high1 has no successor. Punt. */
4967 return 0;
4970 else if (! subset || highequal)
4972 low = low0;
4973 high = range_predecessor (low1);
4974 in_p = 1;
4975 if (high == 0)
4977 /* low0 < low1 but low1 has no predecessor. Punt. */
4978 return 0;
4981 else
4982 return 0;
4985 else if (! in0_p && in1_p)
4987 /* If they don't overlap, the result is the second range. If the second
4988 is a subset of the first, the result is false. Otherwise,
4989 the range starts just after the first range and ends at the
4990 end of the second. */
4991 if (no_overlap)
4992 in_p = 1, low = low1, high = high1;
4993 else if (subset || highequal)
4994 in_p = 0, low = high = 0;
4995 else
4997 low = range_successor (high0);
4998 high = high1;
4999 in_p = 1;
5000 if (low == 0)
5002 /* high1 > high0 but high0 has no successor. Punt. */
5003 return 0;
5008 else
5010 /* The case where we are excluding both ranges. Here the complex case
5011 is if they don't overlap. In that case, the only time we have a
5012 range is if they are adjacent. If the second is a subset of the
5013 first, the result is the first. Otherwise, the range to exclude
5014 starts at the beginning of the first range and ends at the end of the
5015 second. */
5016 if (no_overlap)
5018 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5019 range_successor (high0),
5020 1, low1, 0)))
5021 in_p = 0, low = low0, high = high1;
5022 else
5024 /* Canonicalize - [min, x] into - [-, x]. */
5025 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5026 switch (TREE_CODE (TREE_TYPE (low0)))
5028 case ENUMERAL_TYPE:
5029 if (TYPE_PRECISION (TREE_TYPE (low0))
5030 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5031 break;
5032 /* FALLTHROUGH */
5033 case INTEGER_TYPE:
5034 if (tree_int_cst_equal (low0,
5035 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5036 low0 = 0;
5037 break;
5038 case POINTER_TYPE:
5039 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5040 && integer_zerop (low0))
5041 low0 = 0;
5042 break;
5043 default:
5044 break;
5047 /* Canonicalize - [x, max] into - [x, -]. */
5048 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5049 switch (TREE_CODE (TREE_TYPE (high1)))
5051 case ENUMERAL_TYPE:
5052 if (TYPE_PRECISION (TREE_TYPE (high1))
5053 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5054 break;
5055 /* FALLTHROUGH */
5056 case INTEGER_TYPE:
5057 if (tree_int_cst_equal (high1,
5058 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5059 high1 = 0;
5060 break;
5061 case POINTER_TYPE:
5062 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5063 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5064 high1, 1,
5065 build_int_cst (TREE_TYPE (high1), 1),
5066 1)))
5067 high1 = 0;
5068 break;
5069 default:
5070 break;
5073 /* The ranges might be also adjacent between the maximum and
5074 minimum values of the given type. For
5075 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5076 return + [x + 1, y - 1]. */
5077 if (low0 == 0 && high1 == 0)
5079 low = range_successor (high0);
5080 high = range_predecessor (low1);
5081 if (low == 0 || high == 0)
5082 return 0;
5084 in_p = 1;
5086 else
5087 return 0;
5090 else if (subset)
5091 in_p = 0, low = low0, high = high0;
5092 else
5093 in_p = 0, low = low0, high = high1;
5096 *pin_p = in_p, *plow = low, *phigh = high;
5097 return 1;
5101 /* Subroutine of fold, looking inside expressions of the form
5102 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5103 of the COND_EXPR. This function is being used also to optimize
5104 A op B ? C : A, by reversing the comparison first.
5106 Return a folded expression whose code is not a COND_EXPR
5107 anymore, or NULL_TREE if no folding opportunity is found. */
5109 static tree
5110 fold_cond_expr_with_comparison (location_t loc, tree type,
5111 tree arg0, tree arg1, tree arg2)
5113 enum tree_code comp_code = TREE_CODE (arg0);
5114 tree arg00 = TREE_OPERAND (arg0, 0);
5115 tree arg01 = TREE_OPERAND (arg0, 1);
5116 tree arg1_type = TREE_TYPE (arg1);
5117 tree tem;
5119 STRIP_NOPS (arg1);
5120 STRIP_NOPS (arg2);
5122 /* If we have A op 0 ? A : -A, consider applying the following
5123 transformations:
5125 A == 0? A : -A same as -A
5126 A != 0? A : -A same as A
5127 A >= 0? A : -A same as abs (A)
5128 A > 0? A : -A same as abs (A)
5129 A <= 0? A : -A same as -abs (A)
5130 A < 0? A : -A same as -abs (A)
5132 None of these transformations work for modes with signed
5133 zeros. If A is +/-0, the first two transformations will
5134 change the sign of the result (from +0 to -0, or vice
5135 versa). The last four will fix the sign of the result,
5136 even though the original expressions could be positive or
5137 negative, depending on the sign of A.
5139 Note that all these transformations are correct if A is
5140 NaN, since the two alternatives (A and -A) are also NaNs. */
5141 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5142 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5143 ? real_zerop (arg01)
5144 : integer_zerop (arg01))
5145 && ((TREE_CODE (arg2) == NEGATE_EXPR
5146 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5147 /* In the case that A is of the form X-Y, '-A' (arg2) may
5148 have already been folded to Y-X, check for that. */
5149 || (TREE_CODE (arg1) == MINUS_EXPR
5150 && TREE_CODE (arg2) == MINUS_EXPR
5151 && operand_equal_p (TREE_OPERAND (arg1, 0),
5152 TREE_OPERAND (arg2, 1), 0)
5153 && operand_equal_p (TREE_OPERAND (arg1, 1),
5154 TREE_OPERAND (arg2, 0), 0))))
5155 switch (comp_code)
5157 case EQ_EXPR:
5158 case UNEQ_EXPR:
5159 tem = fold_convert_loc (loc, arg1_type, arg1);
5160 return fold_convert_loc (loc, type, negate_expr (tem));
5161 case NE_EXPR:
5162 case LTGT_EXPR:
5163 return fold_convert_loc (loc, type, arg1);
5164 case UNGE_EXPR:
5165 case UNGT_EXPR:
5166 if (flag_trapping_math)
5167 break;
5168 /* Fall through. */
5169 case GE_EXPR:
5170 case GT_EXPR:
5171 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5172 break;
5173 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5174 return fold_convert_loc (loc, type, tem);
5175 case UNLE_EXPR:
5176 case UNLT_EXPR:
5177 if (flag_trapping_math)
5178 break;
5179 /* FALLTHRU */
5180 case LE_EXPR:
5181 case LT_EXPR:
5182 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5183 break;
5184 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5185 return negate_expr (fold_convert_loc (loc, type, tem));
5186 default:
5187 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5188 break;
5191 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5192 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5193 both transformations are correct when A is NaN: A != 0
5194 is then true, and A == 0 is false. */
5196 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5197 && integer_zerop (arg01) && integer_zerop (arg2))
5199 if (comp_code == NE_EXPR)
5200 return fold_convert_loc (loc, type, arg1);
5201 else if (comp_code == EQ_EXPR)
5202 return build_zero_cst (type);
5205 /* Try some transformations of A op B ? A : B.
5207 A == B? A : B same as B
5208 A != B? A : B same as A
5209 A >= B? A : B same as max (A, B)
5210 A > B? A : B same as max (B, A)
5211 A <= B? A : B same as min (A, B)
5212 A < B? A : B same as min (B, A)
5214 As above, these transformations don't work in the presence
5215 of signed zeros. For example, if A and B are zeros of
5216 opposite sign, the first two transformations will change
5217 the sign of the result. In the last four, the original
5218 expressions give different results for (A=+0, B=-0) and
5219 (A=-0, B=+0), but the transformed expressions do not.
5221 The first two transformations are correct if either A or B
5222 is a NaN. In the first transformation, the condition will
5223 be false, and B will indeed be chosen. In the case of the
5224 second transformation, the condition A != B will be true,
5225 and A will be chosen.
5227 The conversions to max() and min() are not correct if B is
5228 a number and A is not. The conditions in the original
5229 expressions will be false, so all four give B. The min()
5230 and max() versions would give a NaN instead. */
5231 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5232 && operand_equal_for_comparison_p (arg01, arg2)
5233 /* Avoid these transformations if the COND_EXPR may be used
5234 as an lvalue in the C++ front-end. PR c++/19199. */
5235 && (in_gimple_form
5236 || VECTOR_TYPE_P (type)
5237 || (! lang_GNU_CXX ()
5238 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5239 || ! maybe_lvalue_p (arg1)
5240 || ! maybe_lvalue_p (arg2)))
5242 tree comp_op0 = arg00;
5243 tree comp_op1 = arg01;
5244 tree comp_type = TREE_TYPE (comp_op0);
5246 switch (comp_code)
5248 case EQ_EXPR:
5249 return fold_convert_loc (loc, type, arg2);
5250 case NE_EXPR:
5251 return fold_convert_loc (loc, type, arg1);
5252 case LE_EXPR:
5253 case LT_EXPR:
5254 case UNLE_EXPR:
5255 case UNLT_EXPR:
5256 /* In C++ a ?: expression can be an lvalue, so put the
5257 operand which will be used if they are equal first
5258 so that we can convert this back to the
5259 corresponding COND_EXPR. */
5260 if (!HONOR_NANS (arg1))
5262 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5263 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5264 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5265 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5266 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5267 comp_op1, comp_op0);
5268 return fold_convert_loc (loc, type, tem);
5270 break;
5271 case GE_EXPR:
5272 case GT_EXPR:
5273 case UNGE_EXPR:
5274 case UNGT_EXPR:
5275 if (!HONOR_NANS (arg1))
5277 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5278 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5279 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5280 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5281 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5282 comp_op1, comp_op0);
5283 return fold_convert_loc (loc, type, tem);
5285 break;
5286 case UNEQ_EXPR:
5287 if (!HONOR_NANS (arg1))
5288 return fold_convert_loc (loc, type, arg2);
5289 break;
5290 case LTGT_EXPR:
5291 if (!HONOR_NANS (arg1))
5292 return fold_convert_loc (loc, type, arg1);
5293 break;
5294 default:
5295 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5296 break;
5300 return NULL_TREE;
5305 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5306 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5307 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5308 false) >= 2)
5309 #endif
5311 /* EXP is some logical combination of boolean tests. See if we can
5312 merge it into some range test. Return the new tree if so. */
5314 static tree
5315 fold_range_test (location_t loc, enum tree_code code, tree type,
5316 tree op0, tree op1)
5318 int or_op = (code == TRUTH_ORIF_EXPR
5319 || code == TRUTH_OR_EXPR);
5320 int in0_p, in1_p, in_p;
5321 tree low0, low1, low, high0, high1, high;
5322 bool strict_overflow_p = false;
5323 tree tem, lhs, rhs;
5324 const char * const warnmsg = G_("assuming signed overflow does not occur "
5325 "when simplifying range test");
5327 if (!INTEGRAL_TYPE_P (type))
5328 return 0;
5330 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5331 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5333 /* If this is an OR operation, invert both sides; we will invert
5334 again at the end. */
5335 if (or_op)
5336 in0_p = ! in0_p, in1_p = ! in1_p;
5338 /* If both expressions are the same, if we can merge the ranges, and we
5339 can build the range test, return it or it inverted. If one of the
5340 ranges is always true or always false, consider it to be the same
5341 expression as the other. */
5342 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5343 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5344 in1_p, low1, high1)
5345 && 0 != (tem = (build_range_check (loc, type,
5346 lhs != 0 ? lhs
5347 : rhs != 0 ? rhs : integer_zero_node,
5348 in_p, low, high))))
5350 if (strict_overflow_p)
5351 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5352 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5355 /* On machines where the branch cost is expensive, if this is a
5356 short-circuited branch and the underlying object on both sides
5357 is the same, make a non-short-circuit operation. */
5358 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5359 && !flag_sanitize_coverage
5360 && lhs != 0 && rhs != 0
5361 && (code == TRUTH_ANDIF_EXPR
5362 || code == TRUTH_ORIF_EXPR)
5363 && operand_equal_p (lhs, rhs, 0))
5365 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5366 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5367 which cases we can't do this. */
5368 if (simple_operand_p (lhs))
5369 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5370 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5371 type, op0, op1);
5373 else if (!lang_hooks.decls.global_bindings_p ()
5374 && !CONTAINS_PLACEHOLDER_P (lhs))
5376 tree common = save_expr (lhs);
5378 if (0 != (lhs = build_range_check (loc, type, common,
5379 or_op ? ! in0_p : in0_p,
5380 low0, high0))
5381 && (0 != (rhs = build_range_check (loc, type, common,
5382 or_op ? ! in1_p : in1_p,
5383 low1, high1))))
5385 if (strict_overflow_p)
5386 fold_overflow_warning (warnmsg,
5387 WARN_STRICT_OVERFLOW_COMPARISON);
5388 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5389 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5390 type, lhs, rhs);
5395 return 0;
5398 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5399 bit value. Arrange things so the extra bits will be set to zero if and
5400 only if C is signed-extended to its full width. If MASK is nonzero,
5401 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5403 static tree
5404 unextend (tree c, int p, int unsignedp, tree mask)
5406 tree type = TREE_TYPE (c);
5407 int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5408 tree temp;
5410 if (p == modesize || unsignedp)
5411 return c;
5413 /* We work by getting just the sign bit into the low-order bit, then
5414 into the high-order bit, then sign-extend. We then XOR that value
5415 with C. */
5416 temp = build_int_cst (TREE_TYPE (c),
5417 wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5419 /* We must use a signed type in order to get an arithmetic right shift.
5420 However, we must also avoid introducing accidental overflows, so that
5421 a subsequent call to integer_zerop will work. Hence we must
5422 do the type conversion here. At this point, the constant is either
5423 zero or one, and the conversion to a signed type can never overflow.
5424 We could get an overflow if this conversion is done anywhere else. */
5425 if (TYPE_UNSIGNED (type))
5426 temp = fold_convert (signed_type_for (type), temp);
5428 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5429 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5430 if (mask != 0)
5431 temp = const_binop (BIT_AND_EXPR, temp,
5432 fold_convert (TREE_TYPE (c), mask));
5433 /* If necessary, convert the type back to match the type of C. */
5434 if (TYPE_UNSIGNED (type))
5435 temp = fold_convert (type, temp);
5437 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5440 /* For an expression that has the form
5441 (A && B) || ~B
5443 (A || B) && ~B,
5444 we can drop one of the inner expressions and simplify to
5445 A || ~B
5447 A && ~B
5448 LOC is the location of the resulting expression. OP is the inner
5449 logical operation; the left-hand side in the examples above, while CMPOP
5450 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5451 removing a condition that guards another, as in
5452 (A != NULL && A->...) || A == NULL
5453 which we must not transform. If RHS_ONLY is true, only eliminate the
5454 right-most operand of the inner logical operation. */
5456 static tree
5457 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5458 bool rhs_only)
5460 tree type = TREE_TYPE (cmpop);
5461 enum tree_code code = TREE_CODE (cmpop);
5462 enum tree_code truthop_code = TREE_CODE (op);
5463 tree lhs = TREE_OPERAND (op, 0);
5464 tree rhs = TREE_OPERAND (op, 1);
5465 tree orig_lhs = lhs, orig_rhs = rhs;
5466 enum tree_code rhs_code = TREE_CODE (rhs);
5467 enum tree_code lhs_code = TREE_CODE (lhs);
5468 enum tree_code inv_code;
5470 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5471 return NULL_TREE;
5473 if (TREE_CODE_CLASS (code) != tcc_comparison)
5474 return NULL_TREE;
5476 if (rhs_code == truthop_code)
5478 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5479 if (newrhs != NULL_TREE)
5481 rhs = newrhs;
5482 rhs_code = TREE_CODE (rhs);
5485 if (lhs_code == truthop_code && !rhs_only)
5487 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5488 if (newlhs != NULL_TREE)
5490 lhs = newlhs;
5491 lhs_code = TREE_CODE (lhs);
5495 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5496 if (inv_code == rhs_code
5497 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5498 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5499 return lhs;
5500 if (!rhs_only && inv_code == lhs_code
5501 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5502 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5503 return rhs;
5504 if (rhs != orig_rhs || lhs != orig_lhs)
5505 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5506 lhs, rhs);
5507 return NULL_TREE;
5510 /* Find ways of folding logical expressions of LHS and RHS:
5511 Try to merge two comparisons to the same innermost item.
5512 Look for range tests like "ch >= '0' && ch <= '9'".
5513 Look for combinations of simple terms on machines with expensive branches
5514 and evaluate the RHS unconditionally.
5516 For example, if we have p->a == 2 && p->b == 4 and we can make an
5517 object large enough to span both A and B, we can do this with a comparison
5518 against the object ANDed with the a mask.
5520 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5521 operations to do this with one comparison.
5523 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5524 function and the one above.
5526 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5527 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5529 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5530 two operands.
5532 We return the simplified tree or 0 if no optimization is possible. */
5534 static tree
5535 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5536 tree lhs, tree rhs)
5538 /* If this is the "or" of two comparisons, we can do something if
5539 the comparisons are NE_EXPR. If this is the "and", we can do something
5540 if the comparisons are EQ_EXPR. I.e.,
5541 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5543 WANTED_CODE is this operation code. For single bit fields, we can
5544 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5545 comparison for one-bit fields. */
5547 enum tree_code wanted_code;
5548 enum tree_code lcode, rcode;
5549 tree ll_arg, lr_arg, rl_arg, rr_arg;
5550 tree ll_inner, lr_inner, rl_inner, rr_inner;
5551 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5552 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5553 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5554 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5555 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5556 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5557 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5558 scalar_int_mode lnmode, rnmode;
5559 tree ll_mask, lr_mask, rl_mask, rr_mask;
5560 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5561 tree l_const, r_const;
5562 tree lntype, rntype, result;
5563 HOST_WIDE_INT first_bit, end_bit;
5564 int volatilep;
5566 /* Start by getting the comparison codes. Fail if anything is volatile.
5567 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5568 it were surrounded with a NE_EXPR. */
5570 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5571 return 0;
5573 lcode = TREE_CODE (lhs);
5574 rcode = TREE_CODE (rhs);
5576 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5578 lhs = build2 (NE_EXPR, truth_type, lhs,
5579 build_int_cst (TREE_TYPE (lhs), 0));
5580 lcode = NE_EXPR;
5583 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5585 rhs = build2 (NE_EXPR, truth_type, rhs,
5586 build_int_cst (TREE_TYPE (rhs), 0));
5587 rcode = NE_EXPR;
5590 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5591 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5592 return 0;
5594 ll_arg = TREE_OPERAND (lhs, 0);
5595 lr_arg = TREE_OPERAND (lhs, 1);
5596 rl_arg = TREE_OPERAND (rhs, 0);
5597 rr_arg = TREE_OPERAND (rhs, 1);
5599 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5600 if (simple_operand_p (ll_arg)
5601 && simple_operand_p (lr_arg))
5603 if (operand_equal_p (ll_arg, rl_arg, 0)
5604 && operand_equal_p (lr_arg, rr_arg, 0))
5606 result = combine_comparisons (loc, code, lcode, rcode,
5607 truth_type, ll_arg, lr_arg);
5608 if (result)
5609 return result;
5611 else if (operand_equal_p (ll_arg, rr_arg, 0)
5612 && operand_equal_p (lr_arg, rl_arg, 0))
5614 result = combine_comparisons (loc, code, lcode,
5615 swap_tree_comparison (rcode),
5616 truth_type, ll_arg, lr_arg);
5617 if (result)
5618 return result;
5622 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5623 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5625 /* If the RHS can be evaluated unconditionally and its operands are
5626 simple, it wins to evaluate the RHS unconditionally on machines
5627 with expensive branches. In this case, this isn't a comparison
5628 that can be merged. */
5630 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5631 false) >= 2
5632 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5633 && simple_operand_p (rl_arg)
5634 && simple_operand_p (rr_arg))
5636 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5637 if (code == TRUTH_OR_EXPR
5638 && lcode == NE_EXPR && integer_zerop (lr_arg)
5639 && rcode == NE_EXPR && integer_zerop (rr_arg)
5640 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5641 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5642 return build2_loc (loc, NE_EXPR, truth_type,
5643 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5644 ll_arg, rl_arg),
5645 build_int_cst (TREE_TYPE (ll_arg), 0));
5647 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5648 if (code == TRUTH_AND_EXPR
5649 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5650 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5651 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5652 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5653 return build2_loc (loc, EQ_EXPR, truth_type,
5654 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5655 ll_arg, rl_arg),
5656 build_int_cst (TREE_TYPE (ll_arg), 0));
5659 /* See if the comparisons can be merged. Then get all the parameters for
5660 each side. */
5662 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5663 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5664 return 0;
5666 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5667 volatilep = 0;
5668 ll_inner = decode_field_reference (loc, &ll_arg,
5669 &ll_bitsize, &ll_bitpos, &ll_mode,
5670 &ll_unsignedp, &ll_reversep, &volatilep,
5671 &ll_mask, &ll_and_mask);
5672 lr_inner = decode_field_reference (loc, &lr_arg,
5673 &lr_bitsize, &lr_bitpos, &lr_mode,
5674 &lr_unsignedp, &lr_reversep, &volatilep,
5675 &lr_mask, &lr_and_mask);
5676 rl_inner = decode_field_reference (loc, &rl_arg,
5677 &rl_bitsize, &rl_bitpos, &rl_mode,
5678 &rl_unsignedp, &rl_reversep, &volatilep,
5679 &rl_mask, &rl_and_mask);
5680 rr_inner = decode_field_reference (loc, &rr_arg,
5681 &rr_bitsize, &rr_bitpos, &rr_mode,
5682 &rr_unsignedp, &rr_reversep, &volatilep,
5683 &rr_mask, &rr_and_mask);
5685 /* It must be true that the inner operation on the lhs of each
5686 comparison must be the same if we are to be able to do anything.
5687 Then see if we have constants. If not, the same must be true for
5688 the rhs's. */
5689 if (volatilep
5690 || ll_reversep != rl_reversep
5691 || ll_inner == 0 || rl_inner == 0
5692 || ! operand_equal_p (ll_inner, rl_inner, 0))
5693 return 0;
5695 if (TREE_CODE (lr_arg) == INTEGER_CST
5696 && TREE_CODE (rr_arg) == INTEGER_CST)
5698 l_const = lr_arg, r_const = rr_arg;
5699 lr_reversep = ll_reversep;
5701 else if (lr_reversep != rr_reversep
5702 || lr_inner == 0 || rr_inner == 0
5703 || ! operand_equal_p (lr_inner, rr_inner, 0))
5704 return 0;
5705 else
5706 l_const = r_const = 0;
5708 /* If either comparison code is not correct for our logical operation,
5709 fail. However, we can convert a one-bit comparison against zero into
5710 the opposite comparison against that bit being set in the field. */
5712 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5713 if (lcode != wanted_code)
5715 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5717 /* Make the left operand unsigned, since we are only interested
5718 in the value of one bit. Otherwise we are doing the wrong
5719 thing below. */
5720 ll_unsignedp = 1;
5721 l_const = ll_mask;
5723 else
5724 return 0;
5727 /* This is analogous to the code for l_const above. */
5728 if (rcode != wanted_code)
5730 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5732 rl_unsignedp = 1;
5733 r_const = rl_mask;
5735 else
5736 return 0;
5739 /* See if we can find a mode that contains both fields being compared on
5740 the left. If we can't, fail. Otherwise, update all constants and masks
5741 to be relative to a field of that size. */
5742 first_bit = MIN (ll_bitpos, rl_bitpos);
5743 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5744 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5745 TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5746 volatilep, &lnmode))
5747 return 0;
5749 lnbitsize = GET_MODE_BITSIZE (lnmode);
5750 lnbitpos = first_bit & ~ (lnbitsize - 1);
5751 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5752 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5754 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5756 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5757 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5760 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5761 size_int (xll_bitpos));
5762 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5763 size_int (xrl_bitpos));
5765 if (l_const)
5767 l_const = fold_convert_loc (loc, lntype, l_const);
5768 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5769 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5770 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5771 fold_build1_loc (loc, BIT_NOT_EXPR,
5772 lntype, ll_mask))))
5774 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5776 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5779 if (r_const)
5781 r_const = fold_convert_loc (loc, lntype, r_const);
5782 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5783 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5784 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5785 fold_build1_loc (loc, BIT_NOT_EXPR,
5786 lntype, rl_mask))))
5788 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5790 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5794 /* If the right sides are not constant, do the same for it. Also,
5795 disallow this optimization if a size or signedness mismatch occurs
5796 between the left and right sides. */
5797 if (l_const == 0)
5799 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5800 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5801 /* Make sure the two fields on the right
5802 correspond to the left without being swapped. */
5803 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5804 return 0;
5806 first_bit = MIN (lr_bitpos, rr_bitpos);
5807 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5808 if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5809 TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5810 volatilep, &rnmode))
5811 return 0;
5813 rnbitsize = GET_MODE_BITSIZE (rnmode);
5814 rnbitpos = first_bit & ~ (rnbitsize - 1);
5815 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5816 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5818 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5820 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5821 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5824 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5825 rntype, lr_mask),
5826 size_int (xlr_bitpos));
5827 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5828 rntype, rr_mask),
5829 size_int (xrr_bitpos));
5831 /* Make a mask that corresponds to both fields being compared.
5832 Do this for both items being compared. If the operands are the
5833 same size and the bits being compared are in the same position
5834 then we can do this by masking both and comparing the masked
5835 results. */
5836 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5837 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5838 if (lnbitsize == rnbitsize
5839 && xll_bitpos == xlr_bitpos
5840 && lnbitpos >= 0
5841 && rnbitpos >= 0)
5843 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5844 lntype, lnbitsize, lnbitpos,
5845 ll_unsignedp || rl_unsignedp, ll_reversep);
5846 if (! all_ones_mask_p (ll_mask, lnbitsize))
5847 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5849 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5850 rntype, rnbitsize, rnbitpos,
5851 lr_unsignedp || rr_unsignedp, lr_reversep);
5852 if (! all_ones_mask_p (lr_mask, rnbitsize))
5853 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5855 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5858 /* There is still another way we can do something: If both pairs of
5859 fields being compared are adjacent, we may be able to make a wider
5860 field containing them both.
5862 Note that we still must mask the lhs/rhs expressions. Furthermore,
5863 the mask must be shifted to account for the shift done by
5864 make_bit_field_ref. */
5865 if (((ll_bitsize + ll_bitpos == rl_bitpos
5866 && lr_bitsize + lr_bitpos == rr_bitpos)
5867 || (ll_bitpos == rl_bitpos + rl_bitsize
5868 && lr_bitpos == rr_bitpos + rr_bitsize))
5869 && ll_bitpos >= 0
5870 && rl_bitpos >= 0
5871 && lr_bitpos >= 0
5872 && rr_bitpos >= 0)
5874 tree type;
5876 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5877 ll_bitsize + rl_bitsize,
5878 MIN (ll_bitpos, rl_bitpos),
5879 ll_unsignedp, ll_reversep);
5880 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5881 lr_bitsize + rr_bitsize,
5882 MIN (lr_bitpos, rr_bitpos),
5883 lr_unsignedp, lr_reversep);
5885 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5886 size_int (MIN (xll_bitpos, xrl_bitpos)));
5887 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5888 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5890 /* Convert to the smaller type before masking out unwanted bits. */
5891 type = lntype;
5892 if (lntype != rntype)
5894 if (lnbitsize > rnbitsize)
5896 lhs = fold_convert_loc (loc, rntype, lhs);
5897 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5898 type = rntype;
5900 else if (lnbitsize < rnbitsize)
5902 rhs = fold_convert_loc (loc, lntype, rhs);
5903 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5904 type = lntype;
5908 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5909 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5911 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5912 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5914 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5917 return 0;
5920 /* Handle the case of comparisons with constants. If there is something in
5921 common between the masks, those bits of the constants must be the same.
5922 If not, the condition is always false. Test for this to avoid generating
5923 incorrect code below. */
5924 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5925 if (! integer_zerop (result)
5926 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5927 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5929 if (wanted_code == NE_EXPR)
5931 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5932 return constant_boolean_node (true, truth_type);
5934 else
5936 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5937 return constant_boolean_node (false, truth_type);
5941 if (lnbitpos < 0)
5942 return 0;
5944 /* Construct the expression we will return. First get the component
5945 reference we will make. Unless the mask is all ones the width of
5946 that field, perform the mask operation. Then compare with the
5947 merged constant. */
5948 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5949 lntype, lnbitsize, lnbitpos,
5950 ll_unsignedp || rl_unsignedp, ll_reversep);
5952 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5953 if (! all_ones_mask_p (ll_mask, lnbitsize))
5954 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5956 return build2_loc (loc, wanted_code, truth_type, result,
5957 const_binop (BIT_IOR_EXPR, l_const, r_const));
5960 /* T is an integer expression that is being multiplied, divided, or taken a
5961 modulus (CODE says which and what kind of divide or modulus) by a
5962 constant C. See if we can eliminate that operation by folding it with
5963 other operations already in T. WIDE_TYPE, if non-null, is a type that
5964 should be used for the computation if wider than our type.
5966 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5967 (X * 2) + (Y * 4). We must, however, be assured that either the original
5968 expression would not overflow or that overflow is undefined for the type
5969 in the language in question.
5971 If we return a non-null expression, it is an equivalent form of the
5972 original computation, but need not be in the original type.
5974 We set *STRICT_OVERFLOW_P to true if the return values depends on
5975 signed overflow being undefined. Otherwise we do not change
5976 *STRICT_OVERFLOW_P. */
5978 static tree
5979 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5980 bool *strict_overflow_p)
5982 /* To avoid exponential search depth, refuse to allow recursion past
5983 three levels. Beyond that (1) it's highly unlikely that we'll find
5984 something interesting and (2) we've probably processed it before
5985 when we built the inner expression. */
5987 static int depth;
5988 tree ret;
5990 if (depth > 3)
5991 return NULL;
5993 depth++;
5994 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5995 depth--;
5997 return ret;
6000 static tree
6001 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6002 bool *strict_overflow_p)
6004 tree type = TREE_TYPE (t);
6005 enum tree_code tcode = TREE_CODE (t);
6006 tree ctype = (wide_type != 0
6007 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6008 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6009 ? wide_type : type);
6010 tree t1, t2;
6011 int same_p = tcode == code;
6012 tree op0 = NULL_TREE, op1 = NULL_TREE;
6013 bool sub_strict_overflow_p;
6015 /* Don't deal with constants of zero here; they confuse the code below. */
6016 if (integer_zerop (c))
6017 return NULL_TREE;
6019 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6020 op0 = TREE_OPERAND (t, 0);
6022 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6023 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6025 /* Note that we need not handle conditional operations here since fold
6026 already handles those cases. So just do arithmetic here. */
6027 switch (tcode)
6029 case INTEGER_CST:
6030 /* For a constant, we can always simplify if we are a multiply
6031 or (for divide and modulus) if it is a multiple of our constant. */
6032 if (code == MULT_EXPR
6033 || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6034 TYPE_SIGN (type)))
6036 tree tem = const_binop (code, fold_convert (ctype, t),
6037 fold_convert (ctype, c));
6038 /* If the multiplication overflowed, we lost information on it.
6039 See PR68142 and PR69845. */
6040 if (TREE_OVERFLOW (tem))
6041 return NULL_TREE;
6042 return tem;
6044 break;
6046 CASE_CONVERT: case NON_LVALUE_EXPR:
6047 /* If op0 is an expression ... */
6048 if ((COMPARISON_CLASS_P (op0)
6049 || UNARY_CLASS_P (op0)
6050 || BINARY_CLASS_P (op0)
6051 || VL_EXP_CLASS_P (op0)
6052 || EXPRESSION_CLASS_P (op0))
6053 /* ... and has wrapping overflow, and its type is smaller
6054 than ctype, then we cannot pass through as widening. */
6055 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6056 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6057 && (TYPE_PRECISION (ctype)
6058 > TYPE_PRECISION (TREE_TYPE (op0))))
6059 /* ... or this is a truncation (t is narrower than op0),
6060 then we cannot pass through this narrowing. */
6061 || (TYPE_PRECISION (type)
6062 < TYPE_PRECISION (TREE_TYPE (op0)))
6063 /* ... or signedness changes for division or modulus,
6064 then we cannot pass through this conversion. */
6065 || (code != MULT_EXPR
6066 && (TYPE_UNSIGNED (ctype)
6067 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6068 /* ... or has undefined overflow while the converted to
6069 type has not, we cannot do the operation in the inner type
6070 as that would introduce undefined overflow. */
6071 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6072 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6073 && !TYPE_OVERFLOW_UNDEFINED (type))))
6074 break;
6076 /* Pass the constant down and see if we can make a simplification. If
6077 we can, replace this expression with the inner simplification for
6078 possible later conversion to our or some other type. */
6079 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6080 && TREE_CODE (t2) == INTEGER_CST
6081 && !TREE_OVERFLOW (t2)
6082 && (0 != (t1 = extract_muldiv (op0, t2, code,
6083 code == MULT_EXPR
6084 ? ctype : NULL_TREE,
6085 strict_overflow_p))))
6086 return t1;
6087 break;
6089 case ABS_EXPR:
6090 /* If widening the type changes it from signed to unsigned, then we
6091 must avoid building ABS_EXPR itself as unsigned. */
6092 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6094 tree cstype = (*signed_type_for) (ctype);
6095 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6096 != 0)
6098 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6099 return fold_convert (ctype, t1);
6101 break;
6103 /* If the constant is negative, we cannot simplify this. */
6104 if (tree_int_cst_sgn (c) == -1)
6105 break;
6106 /* FALLTHROUGH */
6107 case NEGATE_EXPR:
6108 /* For division and modulus, type can't be unsigned, as e.g.
6109 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6110 For signed types, even with wrapping overflow, this is fine. */
6111 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6112 break;
6113 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6114 != 0)
6115 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6116 break;
6118 case MIN_EXPR: case MAX_EXPR:
6119 /* If widening the type changes the signedness, then we can't perform
6120 this optimization as that changes the result. */
6121 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6122 break;
6124 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6125 sub_strict_overflow_p = false;
6126 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6127 &sub_strict_overflow_p)) != 0
6128 && (t2 = extract_muldiv (op1, c, code, wide_type,
6129 &sub_strict_overflow_p)) != 0)
6131 if (tree_int_cst_sgn (c) < 0)
6132 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6133 if (sub_strict_overflow_p)
6134 *strict_overflow_p = true;
6135 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6136 fold_convert (ctype, t2));
6138 break;
6140 case LSHIFT_EXPR: case RSHIFT_EXPR:
6141 /* If the second operand is constant, this is a multiplication
6142 or floor division, by a power of two, so we can treat it that
6143 way unless the multiplier or divisor overflows. Signed
6144 left-shift overflow is implementation-defined rather than
6145 undefined in C90, so do not convert signed left shift into
6146 multiplication. */
6147 if (TREE_CODE (op1) == INTEGER_CST
6148 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6149 /* const_binop may not detect overflow correctly,
6150 so check for it explicitly here. */
6151 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6152 wi::to_wide (op1))
6153 && 0 != (t1 = fold_convert (ctype,
6154 const_binop (LSHIFT_EXPR,
6155 size_one_node,
6156 op1)))
6157 && !TREE_OVERFLOW (t1))
6158 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6159 ? MULT_EXPR : FLOOR_DIV_EXPR,
6160 ctype,
6161 fold_convert (ctype, op0),
6162 t1),
6163 c, code, wide_type, strict_overflow_p);
6164 break;
6166 case PLUS_EXPR: case MINUS_EXPR:
6167 /* See if we can eliminate the operation on both sides. If we can, we
6168 can return a new PLUS or MINUS. If we can't, the only remaining
6169 cases where we can do anything are if the second operand is a
6170 constant. */
6171 sub_strict_overflow_p = false;
6172 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6173 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6174 if (t1 != 0 && t2 != 0
6175 && TYPE_OVERFLOW_WRAPS (ctype)
6176 && (code == MULT_EXPR
6177 /* If not multiplication, we can only do this if both operands
6178 are divisible by c. */
6179 || (multiple_of_p (ctype, op0, c)
6180 && multiple_of_p (ctype, op1, c))))
6182 if (sub_strict_overflow_p)
6183 *strict_overflow_p = true;
6184 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6185 fold_convert (ctype, t2));
6188 /* If this was a subtraction, negate OP1 and set it to be an addition.
6189 This simplifies the logic below. */
6190 if (tcode == MINUS_EXPR)
6192 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6193 /* If OP1 was not easily negatable, the constant may be OP0. */
6194 if (TREE_CODE (op0) == INTEGER_CST)
6196 std::swap (op0, op1);
6197 std::swap (t1, t2);
6201 if (TREE_CODE (op1) != INTEGER_CST)
6202 break;
6204 /* If either OP1 or C are negative, this optimization is not safe for
6205 some of the division and remainder types while for others we need
6206 to change the code. */
6207 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6209 if (code == CEIL_DIV_EXPR)
6210 code = FLOOR_DIV_EXPR;
6211 else if (code == FLOOR_DIV_EXPR)
6212 code = CEIL_DIV_EXPR;
6213 else if (code != MULT_EXPR
6214 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6215 break;
6218 /* If it's a multiply or a division/modulus operation of a multiple
6219 of our constant, do the operation and verify it doesn't overflow. */
6220 if (code == MULT_EXPR
6221 || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6222 TYPE_SIGN (type)))
6224 op1 = const_binop (code, fold_convert (ctype, op1),
6225 fold_convert (ctype, c));
6226 /* We allow the constant to overflow with wrapping semantics. */
6227 if (op1 == 0
6228 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6229 break;
6231 else
6232 break;
6234 /* If we have an unsigned type, we cannot widen the operation since it
6235 will change the result if the original computation overflowed. */
6236 if (TYPE_UNSIGNED (ctype) && ctype != type)
6237 break;
6239 /* The last case is if we are a multiply. In that case, we can
6240 apply the distributive law to commute the multiply and addition
6241 if the multiplication of the constants doesn't overflow
6242 and overflow is defined. With undefined overflow
6243 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6244 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6245 return fold_build2 (tcode, ctype,
6246 fold_build2 (code, ctype,
6247 fold_convert (ctype, op0),
6248 fold_convert (ctype, c)),
6249 op1);
6251 break;
6253 case MULT_EXPR:
6254 /* We have a special case here if we are doing something like
6255 (C * 8) % 4 since we know that's zero. */
6256 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6257 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6258 /* If the multiplication can overflow we cannot optimize this. */
6259 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6260 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6261 && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6262 TYPE_SIGN (type)))
6264 *strict_overflow_p = true;
6265 return omit_one_operand (type, integer_zero_node, op0);
6268 /* ... fall through ... */
6270 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6271 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6272 /* If we can extract our operation from the LHS, do so and return a
6273 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6274 do something only if the second operand is a constant. */
6275 if (same_p
6276 && TYPE_OVERFLOW_WRAPS (ctype)
6277 && (t1 = extract_muldiv (op0, c, code, wide_type,
6278 strict_overflow_p)) != 0)
6279 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6280 fold_convert (ctype, op1));
6281 else if (tcode == MULT_EXPR && code == MULT_EXPR
6282 && TYPE_OVERFLOW_WRAPS (ctype)
6283 && (t1 = extract_muldiv (op1, c, code, wide_type,
6284 strict_overflow_p)) != 0)
6285 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6286 fold_convert (ctype, t1));
6287 else if (TREE_CODE (op1) != INTEGER_CST)
6288 return 0;
6290 /* If these are the same operation types, we can associate them
6291 assuming no overflow. */
6292 if (tcode == code)
6294 bool overflow_p = false;
6295 bool overflow_mul_p;
6296 signop sign = TYPE_SIGN (ctype);
6297 unsigned prec = TYPE_PRECISION (ctype);
6298 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6299 wi::to_wide (c, prec),
6300 sign, &overflow_mul_p);
6301 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6302 if (overflow_mul_p
6303 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6304 overflow_p = true;
6305 if (!overflow_p)
6306 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6307 wide_int_to_tree (ctype, mul));
6310 /* If these operations "cancel" each other, we have the main
6311 optimizations of this pass, which occur when either constant is a
6312 multiple of the other, in which case we replace this with either an
6313 operation or CODE or TCODE.
6315 If we have an unsigned type, we cannot do this since it will change
6316 the result if the original computation overflowed. */
6317 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6318 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6319 || (tcode == MULT_EXPR
6320 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6321 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6322 && code != MULT_EXPR)))
6324 if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6325 TYPE_SIGN (type)))
6327 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6328 *strict_overflow_p = true;
6329 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6330 fold_convert (ctype,
6331 const_binop (TRUNC_DIV_EXPR,
6332 op1, c)));
6334 else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6335 TYPE_SIGN (type)))
6337 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6338 *strict_overflow_p = true;
6339 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6340 fold_convert (ctype,
6341 const_binop (TRUNC_DIV_EXPR,
6342 c, op1)));
6345 break;
6347 default:
6348 break;
6351 return 0;
6354 /* Return a node which has the indicated constant VALUE (either 0 or
6355 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6356 and is of the indicated TYPE. */
6358 tree
6359 constant_boolean_node (bool value, tree type)
6361 if (type == integer_type_node)
6362 return value ? integer_one_node : integer_zero_node;
6363 else if (type == boolean_type_node)
6364 return value ? boolean_true_node : boolean_false_node;
6365 else if (TREE_CODE (type) == VECTOR_TYPE)
6366 return build_vector_from_val (type,
6367 build_int_cst (TREE_TYPE (type),
6368 value ? -1 : 0));
6369 else
6370 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6374 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6375 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6376 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6377 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6378 COND is the first argument to CODE; otherwise (as in the example
6379 given here), it is the second argument. TYPE is the type of the
6380 original expression. Return NULL_TREE if no simplification is
6381 possible. */
6383 static tree
6384 fold_binary_op_with_conditional_arg (location_t loc,
6385 enum tree_code code,
6386 tree type, tree op0, tree op1,
6387 tree cond, tree arg, int cond_first_p)
6389 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6390 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6391 tree test, true_value, false_value;
6392 tree lhs = NULL_TREE;
6393 tree rhs = NULL_TREE;
6394 enum tree_code cond_code = COND_EXPR;
6396 if (TREE_CODE (cond) == COND_EXPR
6397 || TREE_CODE (cond) == VEC_COND_EXPR)
6399 test = TREE_OPERAND (cond, 0);
6400 true_value = TREE_OPERAND (cond, 1);
6401 false_value = TREE_OPERAND (cond, 2);
6402 /* If this operand throws an expression, then it does not make
6403 sense to try to perform a logical or arithmetic operation
6404 involving it. */
6405 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6406 lhs = true_value;
6407 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6408 rhs = false_value;
6410 else if (!(TREE_CODE (type) != VECTOR_TYPE
6411 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6413 tree testtype = TREE_TYPE (cond);
6414 test = cond;
6415 true_value = constant_boolean_node (true, testtype);
6416 false_value = constant_boolean_node (false, testtype);
6418 else
6419 /* Detect the case of mixing vector and scalar types - bail out. */
6420 return NULL_TREE;
6422 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6423 cond_code = VEC_COND_EXPR;
6425 /* This transformation is only worthwhile if we don't have to wrap ARG
6426 in a SAVE_EXPR and the operation can be simplified without recursing
6427 on at least one of the branches once its pushed inside the COND_EXPR. */
6428 if (!TREE_CONSTANT (arg)
6429 && (TREE_SIDE_EFFECTS (arg)
6430 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6431 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6432 return NULL_TREE;
6434 arg = fold_convert_loc (loc, arg_type, arg);
6435 if (lhs == 0)
6437 true_value = fold_convert_loc (loc, cond_type, true_value);
6438 if (cond_first_p)
6439 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6440 else
6441 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6443 if (rhs == 0)
6445 false_value = fold_convert_loc (loc, cond_type, false_value);
6446 if (cond_first_p)
6447 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6448 else
6449 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6452 /* Check that we have simplified at least one of the branches. */
6453 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6454 return NULL_TREE;
6456 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6460 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6462 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6463 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6464 ADDEND is the same as X.
6466 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6467 and finite. The problematic cases are when X is zero, and its mode
6468 has signed zeros. In the case of rounding towards -infinity,
6469 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6470 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6472 bool
6473 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6475 if (!real_zerop (addend))
6476 return false;
6478 /* Don't allow the fold with -fsignaling-nans. */
6479 if (HONOR_SNANS (element_mode (type)))
6480 return false;
6482 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6483 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6484 return true;
6486 /* In a vector or complex, we would need to check the sign of all zeros. */
6487 if (TREE_CODE (addend) != REAL_CST)
6488 return false;
6490 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6491 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6492 negate = !negate;
6494 /* The mode has signed zeros, and we have to honor their sign.
6495 In this situation, there is only one case we can return true for.
6496 X - 0 is the same as X unless rounding towards -infinity is
6497 supported. */
6498 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6501 /* Subroutine of match.pd that optimizes comparisons of a division by
6502 a nonzero integer constant against an integer constant, i.e.
6503 X/C1 op C2.
6505 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6506 GE_EXPR or LE_EXPR. ARG01 and ARG1 must be a INTEGER_CST. */
6508 enum tree_code
6509 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6510 tree *hi, bool *neg_overflow)
6512 tree prod, tmp, type = TREE_TYPE (c1);
6513 signop sign = TYPE_SIGN (type);
6514 bool overflow;
6516 /* We have to do this the hard way to detect unsigned overflow.
6517 prod = int_const_binop (MULT_EXPR, c1, c2); */
6518 wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6519 prod = force_fit_type (type, val, -1, overflow);
6520 *neg_overflow = false;
6522 if (sign == UNSIGNED)
6524 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6525 *lo = prod;
6527 /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6528 val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6529 *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6531 else if (tree_int_cst_sgn (c1) >= 0)
6533 tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6534 switch (tree_int_cst_sgn (c2))
6536 case -1:
6537 *neg_overflow = true;
6538 *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6539 *hi = prod;
6540 break;
6542 case 0:
6543 *lo = fold_negate_const (tmp, type);
6544 *hi = tmp;
6545 break;
6547 case 1:
6548 *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6549 *lo = prod;
6550 break;
6552 default:
6553 gcc_unreachable ();
6556 else
6558 /* A negative divisor reverses the relational operators. */
6559 code = swap_tree_comparison (code);
6561 tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6562 switch (tree_int_cst_sgn (c2))
6564 case -1:
6565 *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6566 *lo = prod;
6567 break;
6569 case 0:
6570 *hi = fold_negate_const (tmp, type);
6571 *lo = tmp;
6572 break;
6574 case 1:
6575 *neg_overflow = true;
6576 *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6577 *hi = prod;
6578 break;
6580 default:
6581 gcc_unreachable ();
6585 if (code != EQ_EXPR && code != NE_EXPR)
6586 return code;
6588 if (TREE_OVERFLOW (*lo)
6589 || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6590 *lo = NULL_TREE;
6591 if (TREE_OVERFLOW (*hi)
6592 || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6593 *hi = NULL_TREE;
6595 return code;
6599 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6600 equality/inequality test, then return a simplified form of the test
6601 using a sign testing. Otherwise return NULL. TYPE is the desired
6602 result type. */
6604 static tree
6605 fold_single_bit_test_into_sign_test (location_t loc,
6606 enum tree_code code, tree arg0, tree arg1,
6607 tree result_type)
6609 /* If this is testing a single bit, we can optimize the test. */
6610 if ((code == NE_EXPR || code == EQ_EXPR)
6611 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6612 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6614 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6615 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6616 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6618 if (arg00 != NULL_TREE
6619 /* This is only a win if casting to a signed type is cheap,
6620 i.e. when arg00's type is not a partial mode. */
6621 && type_has_mode_precision_p (TREE_TYPE (arg00)))
6623 tree stype = signed_type_for (TREE_TYPE (arg00));
6624 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6625 result_type,
6626 fold_convert_loc (loc, stype, arg00),
6627 build_int_cst (stype, 0));
6631 return NULL_TREE;
6634 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6635 equality/inequality test, then return a simplified form of
6636 the test using shifts and logical operations. Otherwise return
6637 NULL. TYPE is the desired result type. */
6639 tree
6640 fold_single_bit_test (location_t loc, enum tree_code code,
6641 tree arg0, tree arg1, tree result_type)
6643 /* If this is testing a single bit, we can optimize the test. */
6644 if ((code == NE_EXPR || code == EQ_EXPR)
6645 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6646 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6648 tree inner = TREE_OPERAND (arg0, 0);
6649 tree type = TREE_TYPE (arg0);
6650 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6651 scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6652 int ops_unsigned;
6653 tree signed_type, unsigned_type, intermediate_type;
6654 tree tem, one;
6656 /* First, see if we can fold the single bit test into a sign-bit
6657 test. */
6658 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6659 result_type);
6660 if (tem)
6661 return tem;
6663 /* Otherwise we have (A & C) != 0 where C is a single bit,
6664 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6665 Similarly for (A & C) == 0. */
6667 /* If INNER is a right shift of a constant and it plus BITNUM does
6668 not overflow, adjust BITNUM and INNER. */
6669 if (TREE_CODE (inner) == RSHIFT_EXPR
6670 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6671 && bitnum < TYPE_PRECISION (type)
6672 && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6673 TYPE_PRECISION (type) - bitnum))
6675 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6676 inner = TREE_OPERAND (inner, 0);
6679 /* If we are going to be able to omit the AND below, we must do our
6680 operations as unsigned. If we must use the AND, we have a choice.
6681 Normally unsigned is faster, but for some machines signed is. */
6682 ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6683 && !flag_syntax_only) ? 0 : 1;
6685 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6686 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6687 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6688 inner = fold_convert_loc (loc, intermediate_type, inner);
6690 if (bitnum != 0)
6691 inner = build2 (RSHIFT_EXPR, intermediate_type,
6692 inner, size_int (bitnum));
6694 one = build_int_cst (intermediate_type, 1);
6696 if (code == EQ_EXPR)
6697 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6699 /* Put the AND last so it can combine with more things. */
6700 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6702 /* Make sure to return the proper type. */
6703 inner = fold_convert_loc (loc, result_type, inner);
6705 return inner;
6707 return NULL_TREE;
6710 /* Test whether it is preferable two swap two operands, ARG0 and
6711 ARG1, for example because ARG0 is an integer constant and ARG1
6712 isn't. */
6714 bool
6715 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6717 if (CONSTANT_CLASS_P (arg1))
6718 return 0;
6719 if (CONSTANT_CLASS_P (arg0))
6720 return 1;
6722 STRIP_NOPS (arg0);
6723 STRIP_NOPS (arg1);
6725 if (TREE_CONSTANT (arg1))
6726 return 0;
6727 if (TREE_CONSTANT (arg0))
6728 return 1;
6730 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6731 for commutative and comparison operators. Ensuring a canonical
6732 form allows the optimizers to find additional redundancies without
6733 having to explicitly check for both orderings. */
6734 if (TREE_CODE (arg0) == SSA_NAME
6735 && TREE_CODE (arg1) == SSA_NAME
6736 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6737 return 1;
6739 /* Put SSA_NAMEs last. */
6740 if (TREE_CODE (arg1) == SSA_NAME)
6741 return 0;
6742 if (TREE_CODE (arg0) == SSA_NAME)
6743 return 1;
6745 /* Put variables last. */
6746 if (DECL_P (arg1))
6747 return 0;
6748 if (DECL_P (arg0))
6749 return 1;
6751 return 0;
6755 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6756 means A >= Y && A != MAX, but in this case we know that
6757 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6759 static tree
6760 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6762 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6764 if (TREE_CODE (bound) == LT_EXPR)
6765 a = TREE_OPERAND (bound, 0);
6766 else if (TREE_CODE (bound) == GT_EXPR)
6767 a = TREE_OPERAND (bound, 1);
6768 else
6769 return NULL_TREE;
6771 typea = TREE_TYPE (a);
6772 if (!INTEGRAL_TYPE_P (typea)
6773 && !POINTER_TYPE_P (typea))
6774 return NULL_TREE;
6776 if (TREE_CODE (ineq) == LT_EXPR)
6778 a1 = TREE_OPERAND (ineq, 1);
6779 y = TREE_OPERAND (ineq, 0);
6781 else if (TREE_CODE (ineq) == GT_EXPR)
6783 a1 = TREE_OPERAND (ineq, 0);
6784 y = TREE_OPERAND (ineq, 1);
6786 else
6787 return NULL_TREE;
6789 if (TREE_TYPE (a1) != typea)
6790 return NULL_TREE;
6792 if (POINTER_TYPE_P (typea))
6794 /* Convert the pointer types into integer before taking the difference. */
6795 tree ta = fold_convert_loc (loc, ssizetype, a);
6796 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6797 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6799 else
6800 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6802 if (!diff || !integer_onep (diff))
6803 return NULL_TREE;
6805 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6808 /* Fold a sum or difference of at least one multiplication.
6809 Returns the folded tree or NULL if no simplification could be made. */
6811 static tree
6812 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6813 tree arg0, tree arg1)
6815 tree arg00, arg01, arg10, arg11;
6816 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6818 /* (A * C) +- (B * C) -> (A+-B) * C.
6819 (A * C) +- A -> A * (C+-1).
6820 We are most concerned about the case where C is a constant,
6821 but other combinations show up during loop reduction. Since
6822 it is not difficult, try all four possibilities. */
6824 if (TREE_CODE (arg0) == MULT_EXPR)
6826 arg00 = TREE_OPERAND (arg0, 0);
6827 arg01 = TREE_OPERAND (arg0, 1);
6829 else if (TREE_CODE (arg0) == INTEGER_CST)
6831 arg00 = build_one_cst (type);
6832 arg01 = arg0;
6834 else
6836 /* We cannot generate constant 1 for fract. */
6837 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6838 return NULL_TREE;
6839 arg00 = arg0;
6840 arg01 = build_one_cst (type);
6842 if (TREE_CODE (arg1) == MULT_EXPR)
6844 arg10 = TREE_OPERAND (arg1, 0);
6845 arg11 = TREE_OPERAND (arg1, 1);
6847 else if (TREE_CODE (arg1) == INTEGER_CST)
6849 arg10 = build_one_cst (type);
6850 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6851 the purpose of this canonicalization. */
6852 if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
6853 && negate_expr_p (arg1)
6854 && code == PLUS_EXPR)
6856 arg11 = negate_expr (arg1);
6857 code = MINUS_EXPR;
6859 else
6860 arg11 = arg1;
6862 else
6864 /* We cannot generate constant 1 for fract. */
6865 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6866 return NULL_TREE;
6867 arg10 = arg1;
6868 arg11 = build_one_cst (type);
6870 same = NULL_TREE;
6872 /* Prefer factoring a common non-constant. */
6873 if (operand_equal_p (arg00, arg10, 0))
6874 same = arg00, alt0 = arg01, alt1 = arg11;
6875 else if (operand_equal_p (arg01, arg11, 0))
6876 same = arg01, alt0 = arg00, alt1 = arg10;
6877 else if (operand_equal_p (arg00, arg11, 0))
6878 same = arg00, alt0 = arg01, alt1 = arg10;
6879 else if (operand_equal_p (arg01, arg10, 0))
6880 same = arg01, alt0 = arg00, alt1 = arg11;
6882 /* No identical multiplicands; see if we can find a common
6883 power-of-two factor in non-power-of-two multiplies. This
6884 can help in multi-dimensional array access. */
6885 else if (tree_fits_shwi_p (arg01)
6886 && tree_fits_shwi_p (arg11))
6888 HOST_WIDE_INT int01, int11, tmp;
6889 bool swap = false;
6890 tree maybe_same;
6891 int01 = tree_to_shwi (arg01);
6892 int11 = tree_to_shwi (arg11);
6894 /* Move min of absolute values to int11. */
6895 if (absu_hwi (int01) < absu_hwi (int11))
6897 tmp = int01, int01 = int11, int11 = tmp;
6898 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6899 maybe_same = arg01;
6900 swap = true;
6902 else
6903 maybe_same = arg11;
6905 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
6906 /* The remainder should not be a constant, otherwise we
6907 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
6908 increased the number of multiplications necessary. */
6909 && TREE_CODE (arg10) != INTEGER_CST)
6911 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
6912 build_int_cst (TREE_TYPE (arg00),
6913 int01 / int11));
6914 alt1 = arg10;
6915 same = maybe_same;
6916 if (swap)
6917 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6921 if (!same)
6922 return NULL_TREE;
6924 if (! INTEGRAL_TYPE_P (type)
6925 || TYPE_OVERFLOW_WRAPS (type)
6926 /* We are neither factoring zero nor minus one. */
6927 || TREE_CODE (same) == INTEGER_CST)
6928 return fold_build2_loc (loc, MULT_EXPR, type,
6929 fold_build2_loc (loc, code, type,
6930 fold_convert_loc (loc, type, alt0),
6931 fold_convert_loc (loc, type, alt1)),
6932 fold_convert_loc (loc, type, same));
6934 /* Same may be zero and thus the operation 'code' may overflow. Likewise
6935 same may be minus one and thus the multiplication may overflow. Perform
6936 the operations in an unsigned type. */
6937 tree utype = unsigned_type_for (type);
6938 tree tem = fold_build2_loc (loc, code, utype,
6939 fold_convert_loc (loc, utype, alt0),
6940 fold_convert_loc (loc, utype, alt1));
6941 /* If the sum evaluated to a constant that is not -INF the multiplication
6942 cannot overflow. */
6943 if (TREE_CODE (tem) == INTEGER_CST
6944 && (wi::to_wide (tem)
6945 != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
6946 return fold_build2_loc (loc, MULT_EXPR, type,
6947 fold_convert (type, tem), same);
6949 return fold_convert_loc (loc, type,
6950 fold_build2_loc (loc, MULT_EXPR, utype, tem,
6951 fold_convert_loc (loc, utype, same)));
6954 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6955 specified by EXPR into the buffer PTR of length LEN bytes.
6956 Return the number of bytes placed in the buffer, or zero
6957 upon failure. */
6959 static int
6960 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
6962 tree type = TREE_TYPE (expr);
6963 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
6964 int byte, offset, word, words;
6965 unsigned char value;
6967 if ((off == -1 && total_bytes > len) || off >= total_bytes)
6968 return 0;
6969 if (off == -1)
6970 off = 0;
6972 if (ptr == NULL)
6973 /* Dry run. */
6974 return MIN (len, total_bytes - off);
6976 words = total_bytes / UNITS_PER_WORD;
6978 for (byte = 0; byte < total_bytes; byte++)
6980 int bitpos = byte * BITS_PER_UNIT;
6981 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
6982 number of bytes. */
6983 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
6985 if (total_bytes > UNITS_PER_WORD)
6987 word = byte / UNITS_PER_WORD;
6988 if (WORDS_BIG_ENDIAN)
6989 word = (words - 1) - word;
6990 offset = word * UNITS_PER_WORD;
6991 if (BYTES_BIG_ENDIAN)
6992 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6993 else
6994 offset += byte % UNITS_PER_WORD;
6996 else
6997 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6998 if (offset >= off && offset - off < len)
6999 ptr[offset - off] = value;
7001 return MIN (len, total_bytes - off);
7005 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7006 specified by EXPR into the buffer PTR of length LEN bytes.
7007 Return the number of bytes placed in the buffer, or zero
7008 upon failure. */
7010 static int
7011 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7013 tree type = TREE_TYPE (expr);
7014 scalar_mode mode = SCALAR_TYPE_MODE (type);
7015 int total_bytes = GET_MODE_SIZE (mode);
7016 FIXED_VALUE_TYPE value;
7017 tree i_value, i_type;
7019 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7020 return 0;
7022 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7024 if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7025 return 0;
7027 value = TREE_FIXED_CST (expr);
7028 i_value = double_int_to_tree (i_type, value.data);
7030 return native_encode_int (i_value, ptr, len, off);
7034 /* Subroutine of native_encode_expr. Encode the REAL_CST
7035 specified by EXPR into the buffer PTR of length LEN bytes.
7036 Return the number of bytes placed in the buffer, or zero
7037 upon failure. */
7039 static int
7040 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7042 tree type = TREE_TYPE (expr);
7043 int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7044 int byte, offset, word, words, bitpos;
7045 unsigned char value;
7047 /* There are always 32 bits in each long, no matter the size of
7048 the hosts long. We handle floating point representations with
7049 up to 192 bits. */
7050 long tmp[6];
7052 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7053 return 0;
7054 if (off == -1)
7055 off = 0;
7057 if (ptr == NULL)
7058 /* Dry run. */
7059 return MIN (len, total_bytes - off);
7061 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7063 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7065 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7066 bitpos += BITS_PER_UNIT)
7068 byte = (bitpos / BITS_PER_UNIT) & 3;
7069 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7071 if (UNITS_PER_WORD < 4)
7073 word = byte / UNITS_PER_WORD;
7074 if (WORDS_BIG_ENDIAN)
7075 word = (words - 1) - word;
7076 offset = word * UNITS_PER_WORD;
7077 if (BYTES_BIG_ENDIAN)
7078 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7079 else
7080 offset += byte % UNITS_PER_WORD;
7082 else
7084 offset = byte;
7085 if (BYTES_BIG_ENDIAN)
7087 /* Reverse bytes within each long, or within the entire float
7088 if it's smaller than a long (for HFmode). */
7089 offset = MIN (3, total_bytes - 1) - offset;
7090 gcc_assert (offset >= 0);
7093 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7094 if (offset >= off
7095 && offset - off < len)
7096 ptr[offset - off] = value;
7098 return MIN (len, total_bytes - off);
7101 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7102 specified by EXPR into the buffer PTR of length LEN bytes.
7103 Return the number of bytes placed in the buffer, or zero
7104 upon failure. */
7106 static int
7107 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7109 int rsize, isize;
7110 tree part;
7112 part = TREE_REALPART (expr);
7113 rsize = native_encode_expr (part, ptr, len, off);
7114 if (off == -1 && rsize == 0)
7115 return 0;
7116 part = TREE_IMAGPART (expr);
7117 if (off != -1)
7118 off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7119 isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7120 len - rsize, off);
7121 if (off == -1 && isize != rsize)
7122 return 0;
7123 return rsize + isize;
7127 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7128 specified by EXPR into the buffer PTR of length LEN bytes.
7129 Return the number of bytes placed in the buffer, or zero
7130 upon failure. */
7132 static int
7133 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7135 unsigned i, count;
7136 int size, offset;
7137 tree itype, elem;
7139 offset = 0;
7140 count = VECTOR_CST_NELTS (expr);
7141 itype = TREE_TYPE (TREE_TYPE (expr));
7142 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7143 for (i = 0; i < count; i++)
7145 if (off >= size)
7147 off -= size;
7148 continue;
7150 elem = VECTOR_CST_ELT (expr, i);
7151 int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7152 len - offset, off);
7153 if ((off == -1 && res != size) || res == 0)
7154 return 0;
7155 offset += res;
7156 if (offset >= len)
7157 return offset;
7158 if (off != -1)
7159 off = 0;
7161 return offset;
7165 /* Subroutine of native_encode_expr. Encode the STRING_CST
7166 specified by EXPR into the buffer PTR of length LEN bytes.
7167 Return the number of bytes placed in the buffer, or zero
7168 upon failure. */
7170 static int
7171 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7173 tree type = TREE_TYPE (expr);
7175 /* Wide-char strings are encoded in target byte-order so native
7176 encoding them is trivial. */
7177 if (BITS_PER_UNIT != CHAR_BIT
7178 || TREE_CODE (type) != ARRAY_TYPE
7179 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7180 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7181 return 0;
7183 HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7184 if ((off == -1 && total_bytes > len) || off >= total_bytes)
7185 return 0;
7186 if (off == -1)
7187 off = 0;
7188 if (ptr == NULL)
7189 /* Dry run. */;
7190 else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7192 int written = 0;
7193 if (off < TREE_STRING_LENGTH (expr))
7195 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7196 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7198 memset (ptr + written, 0,
7199 MIN (total_bytes - written, len - written));
7201 else
7202 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7203 return MIN (total_bytes - off, len);
7207 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7208 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7209 buffer PTR of length LEN bytes. If PTR is NULL, don't actually store
7210 anything, just do a dry run. If OFF is not -1 then start
7211 the encoding at byte offset OFF and encode at most LEN bytes.
7212 Return the number of bytes placed in the buffer, or zero upon failure. */
7215 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7217 /* We don't support starting at negative offset and -1 is special. */
7218 if (off < -1)
7219 return 0;
7221 switch (TREE_CODE (expr))
7223 case INTEGER_CST:
7224 return native_encode_int (expr, ptr, len, off);
7226 case REAL_CST:
7227 return native_encode_real (expr, ptr, len, off);
7229 case FIXED_CST:
7230 return native_encode_fixed (expr, ptr, len, off);
7232 case COMPLEX_CST:
7233 return native_encode_complex (expr, ptr, len, off);
7235 case VECTOR_CST:
7236 return native_encode_vector (expr, ptr, len, off);
7238 case STRING_CST:
7239 return native_encode_string (expr, ptr, len, off);
7241 default:
7242 return 0;
7247 /* Subroutine of native_interpret_expr. Interpret the contents of
7248 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7249 If the buffer cannot be interpreted, return NULL_TREE. */
7251 static tree
7252 native_interpret_int (tree type, const unsigned char *ptr, int len)
7254 int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7256 if (total_bytes > len
7257 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7258 return NULL_TREE;
7260 wide_int result = wi::from_buffer (ptr, total_bytes);
7262 return wide_int_to_tree (type, result);
7266 /* Subroutine of native_interpret_expr. Interpret the contents of
7267 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7268 If the buffer cannot be interpreted, return NULL_TREE. */
7270 static tree
7271 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7273 scalar_mode mode = SCALAR_TYPE_MODE (type);
7274 int total_bytes = GET_MODE_SIZE (mode);
7275 double_int result;
7276 FIXED_VALUE_TYPE fixed_value;
7278 if (total_bytes > len
7279 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7280 return NULL_TREE;
7282 result = double_int::from_buffer (ptr, total_bytes);
7283 fixed_value = fixed_from_double_int (result, mode);
7285 return build_fixed (type, fixed_value);
7289 /* Subroutine of native_interpret_expr. Interpret the contents of
7290 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7291 If the buffer cannot be interpreted, return NULL_TREE. */
7293 static tree
7294 native_interpret_real (tree type, const unsigned char *ptr, int len)
7296 scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7297 int total_bytes = GET_MODE_SIZE (mode);
7298 unsigned char value;
7299 /* There are always 32 bits in each long, no matter the size of
7300 the hosts long. We handle floating point representations with
7301 up to 192 bits. */
7302 REAL_VALUE_TYPE r;
7303 long tmp[6];
7305 if (total_bytes > len || total_bytes > 24)
7306 return NULL_TREE;
7307 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7309 memset (tmp, 0, sizeof (tmp));
7310 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7311 bitpos += BITS_PER_UNIT)
7313 /* Both OFFSET and BYTE index within a long;
7314 bitpos indexes the whole float. */
7315 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7316 if (UNITS_PER_WORD < 4)
7318 int word = byte / UNITS_PER_WORD;
7319 if (WORDS_BIG_ENDIAN)
7320 word = (words - 1) - word;
7321 offset = word * UNITS_PER_WORD;
7322 if (BYTES_BIG_ENDIAN)
7323 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7324 else
7325 offset += byte % UNITS_PER_WORD;
7327 else
7329 offset = byte;
7330 if (BYTES_BIG_ENDIAN)
7332 /* Reverse bytes within each long, or within the entire float
7333 if it's smaller than a long (for HFmode). */
7334 offset = MIN (3, total_bytes - 1) - offset;
7335 gcc_assert (offset >= 0);
7338 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7340 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7343 real_from_target (&r, tmp, mode);
7344 return build_real (type, r);
7348 /* Subroutine of native_interpret_expr. Interpret the contents of
7349 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7350 If the buffer cannot be interpreted, return NULL_TREE. */
7352 static tree
7353 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7355 tree etype, rpart, ipart;
7356 int size;
7358 etype = TREE_TYPE (type);
7359 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7360 if (size * 2 > len)
7361 return NULL_TREE;
7362 rpart = native_interpret_expr (etype, ptr, size);
7363 if (!rpart)
7364 return NULL_TREE;
7365 ipart = native_interpret_expr (etype, ptr+size, size);
7366 if (!ipart)
7367 return NULL_TREE;
7368 return build_complex (type, rpart, ipart);
7372 /* Subroutine of native_interpret_expr. Interpret the contents of
7373 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7374 If the buffer cannot be interpreted, return NULL_TREE. */
7376 static tree
7377 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7379 tree etype, elem;
7380 int i, size, count;
7382 etype = TREE_TYPE (type);
7383 size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7384 count = TYPE_VECTOR_SUBPARTS (type);
7385 if (size * count > len)
7386 return NULL_TREE;
7388 auto_vec<tree, 32> elements (count);
7389 for (i = 0; i < count; ++i)
7391 elem = native_interpret_expr (etype, ptr+(i*size), size);
7392 if (!elem)
7393 return NULL_TREE;
7394 elements.quick_push (elem);
7396 return build_vector (type, elements);
7400 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7401 the buffer PTR of length LEN as a constant of type TYPE. For
7402 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7403 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7404 return NULL_TREE. */
7406 tree
7407 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7409 switch (TREE_CODE (type))
7411 case INTEGER_TYPE:
7412 case ENUMERAL_TYPE:
7413 case BOOLEAN_TYPE:
7414 case POINTER_TYPE:
7415 case REFERENCE_TYPE:
7416 return native_interpret_int (type, ptr, len);
7418 case REAL_TYPE:
7419 return native_interpret_real (type, ptr, len);
7421 case FIXED_POINT_TYPE:
7422 return native_interpret_fixed (type, ptr, len);
7424 case COMPLEX_TYPE:
7425 return native_interpret_complex (type, ptr, len);
7427 case VECTOR_TYPE:
7428 return native_interpret_vector (type, ptr, len);
7430 default:
7431 return NULL_TREE;
7435 /* Returns true if we can interpret the contents of a native encoding
7436 as TYPE. */
7438 static bool
7439 can_native_interpret_type_p (tree type)
7441 switch (TREE_CODE (type))
7443 case INTEGER_TYPE:
7444 case ENUMERAL_TYPE:
7445 case BOOLEAN_TYPE:
7446 case POINTER_TYPE:
7447 case REFERENCE_TYPE:
7448 case FIXED_POINT_TYPE:
7449 case REAL_TYPE:
7450 case COMPLEX_TYPE:
7451 case VECTOR_TYPE:
7452 return true;
7453 default:
7454 return false;
7459 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7460 TYPE at compile-time. If we're unable to perform the conversion
7461 return NULL_TREE. */
7463 static tree
7464 fold_view_convert_expr (tree type, tree expr)
7466 /* We support up to 512-bit values (for V8DFmode). */
7467 unsigned char buffer[64];
7468 int len;
7470 /* Check that the host and target are sane. */
7471 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7472 return NULL_TREE;
7474 len = native_encode_expr (expr, buffer, sizeof (buffer));
7475 if (len == 0)
7476 return NULL_TREE;
7478 return native_interpret_expr (type, buffer, len);
7481 /* Build an expression for the address of T. Folds away INDIRECT_REF
7482 to avoid confusing the gimplify process. */
7484 tree
7485 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7487 /* The size of the object is not relevant when talking about its address. */
7488 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7489 t = TREE_OPERAND (t, 0);
7491 if (TREE_CODE (t) == INDIRECT_REF)
7493 t = TREE_OPERAND (t, 0);
7495 if (TREE_TYPE (t) != ptrtype)
7496 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7498 else if (TREE_CODE (t) == MEM_REF
7499 && integer_zerop (TREE_OPERAND (t, 1)))
7500 return TREE_OPERAND (t, 0);
7501 else if (TREE_CODE (t) == MEM_REF
7502 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7503 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7504 TREE_OPERAND (t, 0),
7505 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7506 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7508 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7510 if (TREE_TYPE (t) != ptrtype)
7511 t = fold_convert_loc (loc, ptrtype, t);
7513 else
7514 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7516 return t;
7519 /* Build an expression for the address of T. */
7521 tree
7522 build_fold_addr_expr_loc (location_t loc, tree t)
7524 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7526 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7529 /* Fold a unary expression of code CODE and type TYPE with operand
7530 OP0. Return the folded expression if folding is successful.
7531 Otherwise, return NULL_TREE. */
7533 tree
7534 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7536 tree tem;
7537 tree arg0;
7538 enum tree_code_class kind = TREE_CODE_CLASS (code);
7540 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7541 && TREE_CODE_LENGTH (code) == 1);
7543 arg0 = op0;
7544 if (arg0)
7546 if (CONVERT_EXPR_CODE_P (code)
7547 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7549 /* Don't use STRIP_NOPS, because signedness of argument type
7550 matters. */
7551 STRIP_SIGN_NOPS (arg0);
7553 else
7555 /* Strip any conversions that don't change the mode. This
7556 is safe for every expression, except for a comparison
7557 expression because its signedness is derived from its
7558 operands.
7560 Note that this is done as an internal manipulation within
7561 the constant folder, in order to find the simplest
7562 representation of the arguments so that their form can be
7563 studied. In any cases, the appropriate type conversions
7564 should be put back in the tree that will get out of the
7565 constant folder. */
7566 STRIP_NOPS (arg0);
7569 if (CONSTANT_CLASS_P (arg0))
7571 tree tem = const_unop (code, type, arg0);
7572 if (tem)
7574 if (TREE_TYPE (tem) != type)
7575 tem = fold_convert_loc (loc, type, tem);
7576 return tem;
7581 tem = generic_simplify (loc, code, type, op0);
7582 if (tem)
7583 return tem;
7585 if (TREE_CODE_CLASS (code) == tcc_unary)
7587 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7588 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7589 fold_build1_loc (loc, code, type,
7590 fold_convert_loc (loc, TREE_TYPE (op0),
7591 TREE_OPERAND (arg0, 1))));
7592 else if (TREE_CODE (arg0) == COND_EXPR)
7594 tree arg01 = TREE_OPERAND (arg0, 1);
7595 tree arg02 = TREE_OPERAND (arg0, 2);
7596 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7597 arg01 = fold_build1_loc (loc, code, type,
7598 fold_convert_loc (loc,
7599 TREE_TYPE (op0), arg01));
7600 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7601 arg02 = fold_build1_loc (loc, code, type,
7602 fold_convert_loc (loc,
7603 TREE_TYPE (op0), arg02));
7604 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7605 arg01, arg02);
7607 /* If this was a conversion, and all we did was to move into
7608 inside the COND_EXPR, bring it back out. But leave it if
7609 it is a conversion from integer to integer and the
7610 result precision is no wider than a word since such a
7611 conversion is cheap and may be optimized away by combine,
7612 while it couldn't if it were outside the COND_EXPR. Then return
7613 so we don't get into an infinite recursion loop taking the
7614 conversion out and then back in. */
7616 if ((CONVERT_EXPR_CODE_P (code)
7617 || code == NON_LVALUE_EXPR)
7618 && TREE_CODE (tem) == COND_EXPR
7619 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7620 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7621 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7622 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7623 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7624 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7625 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7626 && (INTEGRAL_TYPE_P
7627 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7628 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7629 || flag_syntax_only))
7630 tem = build1_loc (loc, code, type,
7631 build3 (COND_EXPR,
7632 TREE_TYPE (TREE_OPERAND
7633 (TREE_OPERAND (tem, 1), 0)),
7634 TREE_OPERAND (tem, 0),
7635 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7636 TREE_OPERAND (TREE_OPERAND (tem, 2),
7637 0)));
7638 return tem;
7642 switch (code)
7644 case NON_LVALUE_EXPR:
7645 if (!maybe_lvalue_p (op0))
7646 return fold_convert_loc (loc, type, op0);
7647 return NULL_TREE;
7649 CASE_CONVERT:
7650 case FLOAT_EXPR:
7651 case FIX_TRUNC_EXPR:
7652 if (COMPARISON_CLASS_P (op0))
7654 /* If we have (type) (a CMP b) and type is an integral type, return
7655 new expression involving the new type. Canonicalize
7656 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7657 non-integral type.
7658 Do not fold the result as that would not simplify further, also
7659 folding again results in recursions. */
7660 if (TREE_CODE (type) == BOOLEAN_TYPE)
7661 return build2_loc (loc, TREE_CODE (op0), type,
7662 TREE_OPERAND (op0, 0),
7663 TREE_OPERAND (op0, 1));
7664 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7665 && TREE_CODE (type) != VECTOR_TYPE)
7666 return build3_loc (loc, COND_EXPR, type, op0,
7667 constant_boolean_node (true, type),
7668 constant_boolean_node (false, type));
7671 /* Handle (T *)&A.B.C for A being of type T and B and C
7672 living at offset zero. This occurs frequently in
7673 C++ upcasting and then accessing the base. */
7674 if (TREE_CODE (op0) == ADDR_EXPR
7675 && POINTER_TYPE_P (type)
7676 && handled_component_p (TREE_OPERAND (op0, 0)))
7678 HOST_WIDE_INT bitsize, bitpos;
7679 tree offset;
7680 machine_mode mode;
7681 int unsignedp, reversep, volatilep;
7682 tree base
7683 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7684 &offset, &mode, &unsignedp, &reversep,
7685 &volatilep);
7686 /* If the reference was to a (constant) zero offset, we can use
7687 the address of the base if it has the same base type
7688 as the result type and the pointer type is unqualified. */
7689 if (! offset && bitpos == 0
7690 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7691 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7692 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7693 return fold_convert_loc (loc, type,
7694 build_fold_addr_expr_loc (loc, base));
7697 if (TREE_CODE (op0) == MODIFY_EXPR
7698 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7699 /* Detect assigning a bitfield. */
7700 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7701 && DECL_BIT_FIELD
7702 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7704 /* Don't leave an assignment inside a conversion
7705 unless assigning a bitfield. */
7706 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7707 /* First do the assignment, then return converted constant. */
7708 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7709 TREE_NO_WARNING (tem) = 1;
7710 TREE_USED (tem) = 1;
7711 return tem;
7714 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7715 constants (if x has signed type, the sign bit cannot be set
7716 in c). This folds extension into the BIT_AND_EXPR.
7717 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7718 very likely don't have maximal range for their precision and this
7719 transformation effectively doesn't preserve non-maximal ranges. */
7720 if (TREE_CODE (type) == INTEGER_TYPE
7721 && TREE_CODE (op0) == BIT_AND_EXPR
7722 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7724 tree and_expr = op0;
7725 tree and0 = TREE_OPERAND (and_expr, 0);
7726 tree and1 = TREE_OPERAND (and_expr, 1);
7727 int change = 0;
7729 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7730 || (TYPE_PRECISION (type)
7731 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7732 change = 1;
7733 else if (TYPE_PRECISION (TREE_TYPE (and1))
7734 <= HOST_BITS_PER_WIDE_INT
7735 && tree_fits_uhwi_p (and1))
7737 unsigned HOST_WIDE_INT cst;
7739 cst = tree_to_uhwi (and1);
7740 cst &= HOST_WIDE_INT_M1U
7741 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7742 change = (cst == 0);
7743 if (change
7744 && !flag_syntax_only
7745 && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7746 == ZERO_EXTEND))
7748 tree uns = unsigned_type_for (TREE_TYPE (and0));
7749 and0 = fold_convert_loc (loc, uns, and0);
7750 and1 = fold_convert_loc (loc, uns, and1);
7753 if (change)
7755 tem = force_fit_type (type, wi::to_widest (and1), 0,
7756 TREE_OVERFLOW (and1));
7757 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7758 fold_convert_loc (loc, type, and0), tem);
7762 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7763 cast (T1)X will fold away. We assume that this happens when X itself
7764 is a cast. */
7765 if (POINTER_TYPE_P (type)
7766 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7767 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7769 tree arg00 = TREE_OPERAND (arg0, 0);
7770 tree arg01 = TREE_OPERAND (arg0, 1);
7772 return fold_build_pointer_plus_loc
7773 (loc, fold_convert_loc (loc, type, arg00), arg01);
7776 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7777 of the same precision, and X is an integer type not narrower than
7778 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7779 if (INTEGRAL_TYPE_P (type)
7780 && TREE_CODE (op0) == BIT_NOT_EXPR
7781 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7782 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7783 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7785 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7786 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7787 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7788 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7789 fold_convert_loc (loc, type, tem));
7792 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7793 type of X and Y (integer types only). */
7794 if (INTEGRAL_TYPE_P (type)
7795 && TREE_CODE (op0) == MULT_EXPR
7796 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7797 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7799 /* Be careful not to introduce new overflows. */
7800 tree mult_type;
7801 if (TYPE_OVERFLOW_WRAPS (type))
7802 mult_type = type;
7803 else
7804 mult_type = unsigned_type_for (type);
7806 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7808 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7809 fold_convert_loc (loc, mult_type,
7810 TREE_OPERAND (op0, 0)),
7811 fold_convert_loc (loc, mult_type,
7812 TREE_OPERAND (op0, 1)));
7813 return fold_convert_loc (loc, type, tem);
7817 return NULL_TREE;
7819 case VIEW_CONVERT_EXPR:
7820 if (TREE_CODE (op0) == MEM_REF)
7822 if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7823 type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7824 tem = fold_build2_loc (loc, MEM_REF, type,
7825 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7826 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7827 return tem;
7830 return NULL_TREE;
7832 case NEGATE_EXPR:
7833 tem = fold_negate_expr (loc, arg0);
7834 if (tem)
7835 return fold_convert_loc (loc, type, tem);
7836 return NULL_TREE;
7838 case ABS_EXPR:
7839 /* Convert fabs((double)float) into (double)fabsf(float). */
7840 if (TREE_CODE (arg0) == NOP_EXPR
7841 && TREE_CODE (type) == REAL_TYPE)
7843 tree targ0 = strip_float_extensions (arg0);
7844 if (targ0 != arg0)
7845 return fold_convert_loc (loc, type,
7846 fold_build1_loc (loc, ABS_EXPR,
7847 TREE_TYPE (targ0),
7848 targ0));
7850 return NULL_TREE;
7852 case BIT_NOT_EXPR:
7853 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7854 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7855 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7856 fold_convert_loc (loc, type,
7857 TREE_OPERAND (arg0, 0)))))
7858 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7859 fold_convert_loc (loc, type,
7860 TREE_OPERAND (arg0, 1)));
7861 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7862 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7863 fold_convert_loc (loc, type,
7864 TREE_OPERAND (arg0, 1)))))
7865 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7866 fold_convert_loc (loc, type,
7867 TREE_OPERAND (arg0, 0)), tem);
7869 return NULL_TREE;
7871 case TRUTH_NOT_EXPR:
7872 /* Note that the operand of this must be an int
7873 and its values must be 0 or 1.
7874 ("true" is a fixed value perhaps depending on the language,
7875 but we don't handle values other than 1 correctly yet.) */
7876 tem = fold_truth_not_expr (loc, arg0);
7877 if (!tem)
7878 return NULL_TREE;
7879 return fold_convert_loc (loc, type, tem);
7881 case INDIRECT_REF:
7882 /* Fold *&X to X if X is an lvalue. */
7883 if (TREE_CODE (op0) == ADDR_EXPR)
7885 tree op00 = TREE_OPERAND (op0, 0);
7886 if ((VAR_P (op00)
7887 || TREE_CODE (op00) == PARM_DECL
7888 || TREE_CODE (op00) == RESULT_DECL)
7889 && !TREE_READONLY (op00))
7890 return op00;
7892 return NULL_TREE;
7894 default:
7895 return NULL_TREE;
7896 } /* switch (code) */
7900 /* If the operation was a conversion do _not_ mark a resulting constant
7901 with TREE_OVERFLOW if the original constant was not. These conversions
7902 have implementation defined behavior and retaining the TREE_OVERFLOW
7903 flag here would confuse later passes such as VRP. */
7904 tree
7905 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7906 tree type, tree op0)
7908 tree res = fold_unary_loc (loc, code, type, op0);
7909 if (res
7910 && TREE_CODE (res) == INTEGER_CST
7911 && TREE_CODE (op0) == INTEGER_CST
7912 && CONVERT_EXPR_CODE_P (code))
7913 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7915 return res;
7918 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7919 operands OP0 and OP1. LOC is the location of the resulting expression.
7920 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7921 Return the folded expression if folding is successful. Otherwise,
7922 return NULL_TREE. */
7923 static tree
7924 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7925 tree arg0, tree arg1, tree op0, tree op1)
7927 tree tem;
7929 /* We only do these simplifications if we are optimizing. */
7930 if (!optimize)
7931 return NULL_TREE;
7933 /* Check for things like (A || B) && (A || C). We can convert this
7934 to A || (B && C). Note that either operator can be any of the four
7935 truth and/or operations and the transformation will still be
7936 valid. Also note that we only care about order for the
7937 ANDIF and ORIF operators. If B contains side effects, this
7938 might change the truth-value of A. */
7939 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7940 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7941 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7942 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7943 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7944 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7946 tree a00 = TREE_OPERAND (arg0, 0);
7947 tree a01 = TREE_OPERAND (arg0, 1);
7948 tree a10 = TREE_OPERAND (arg1, 0);
7949 tree a11 = TREE_OPERAND (arg1, 1);
7950 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7951 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7952 && (code == TRUTH_AND_EXPR
7953 || code == TRUTH_OR_EXPR));
7955 if (operand_equal_p (a00, a10, 0))
7956 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7957 fold_build2_loc (loc, code, type, a01, a11));
7958 else if (commutative && operand_equal_p (a00, a11, 0))
7959 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
7960 fold_build2_loc (loc, code, type, a01, a10));
7961 else if (commutative && operand_equal_p (a01, a10, 0))
7962 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
7963 fold_build2_loc (loc, code, type, a00, a11));
7965 /* This case if tricky because we must either have commutative
7966 operators or else A10 must not have side-effects. */
7968 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7969 && operand_equal_p (a01, a11, 0))
7970 return fold_build2_loc (loc, TREE_CODE (arg0), type,
7971 fold_build2_loc (loc, code, type, a00, a10),
7972 a01);
7975 /* See if we can build a range comparison. */
7976 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
7977 return tem;
7979 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
7980 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
7982 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
7983 if (tem)
7984 return fold_build2_loc (loc, code, type, tem, arg1);
7987 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
7988 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
7990 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
7991 if (tem)
7992 return fold_build2_loc (loc, code, type, arg0, tem);
7995 /* Check for the possibility of merging component references. If our
7996 lhs is another similar operation, try to merge its rhs with our
7997 rhs. Then try to merge our lhs and rhs. */
7998 if (TREE_CODE (arg0) == code
7999 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8000 TREE_OPERAND (arg0, 1), arg1)))
8001 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8003 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8004 return tem;
8006 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8007 && !flag_sanitize_coverage
8008 && (code == TRUTH_AND_EXPR
8009 || code == TRUTH_ANDIF_EXPR
8010 || code == TRUTH_OR_EXPR
8011 || code == TRUTH_ORIF_EXPR))
8013 enum tree_code ncode, icode;
8015 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8016 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8017 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8019 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8020 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8021 We don't want to pack more than two leafs to a non-IF AND/OR
8022 expression.
8023 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8024 equal to IF-CODE, then we don't want to add right-hand operand.
8025 If the inner right-hand side of left-hand operand has
8026 side-effects, or isn't simple, then we can't add to it,
8027 as otherwise we might destroy if-sequence. */
8028 if (TREE_CODE (arg0) == icode
8029 && simple_operand_p_2 (arg1)
8030 /* Needed for sequence points to handle trappings, and
8031 side-effects. */
8032 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8034 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8035 arg1);
8036 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8037 tem);
8039 /* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8040 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8041 else if (TREE_CODE (arg1) == icode
8042 && simple_operand_p_2 (arg0)
8043 /* Needed for sequence points to handle trappings, and
8044 side-effects. */
8045 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8047 tem = fold_build2_loc (loc, ncode, type,
8048 arg0, TREE_OPERAND (arg1, 0));
8049 return fold_build2_loc (loc, icode, type, tem,
8050 TREE_OPERAND (arg1, 1));
8052 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8053 into (A OR B).
8054 For sequence point consistancy, we need to check for trapping,
8055 and side-effects. */
8056 else if (code == icode && simple_operand_p_2 (arg0)
8057 && simple_operand_p_2 (arg1))
8058 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8061 return NULL_TREE;
8064 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8065 by changing CODE to reduce the magnitude of constants involved in
8066 ARG0 of the comparison.
8067 Returns a canonicalized comparison tree if a simplification was
8068 possible, otherwise returns NULL_TREE.
8069 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8070 valid if signed overflow is undefined. */
8072 static tree
8073 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8074 tree arg0, tree arg1,
8075 bool *strict_overflow_p)
8077 enum tree_code code0 = TREE_CODE (arg0);
8078 tree t, cst0 = NULL_TREE;
8079 int sgn0;
8081 /* Match A +- CST code arg1. We can change this only if overflow
8082 is undefined. */
8083 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8084 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8085 /* In principle pointers also have undefined overflow behavior,
8086 but that causes problems elsewhere. */
8087 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8088 && (code0 == MINUS_EXPR
8089 || code0 == PLUS_EXPR)
8090 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8091 return NULL_TREE;
8093 /* Identify the constant in arg0 and its sign. */
8094 cst0 = TREE_OPERAND (arg0, 1);
8095 sgn0 = tree_int_cst_sgn (cst0);
8097 /* Overflowed constants and zero will cause problems. */
8098 if (integer_zerop (cst0)
8099 || TREE_OVERFLOW (cst0))
8100 return NULL_TREE;
8102 /* See if we can reduce the magnitude of the constant in
8103 arg0 by changing the comparison code. */
8104 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8105 if (code == LT_EXPR
8106 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8107 code = LE_EXPR;
8108 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8109 else if (code == GT_EXPR
8110 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8111 code = GE_EXPR;
8112 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8113 else if (code == LE_EXPR
8114 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8115 code = LT_EXPR;
8116 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8117 else if (code == GE_EXPR
8118 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8119 code = GT_EXPR;
8120 else
8121 return NULL_TREE;
8122 *strict_overflow_p = true;
8124 /* Now build the constant reduced in magnitude. But not if that
8125 would produce one outside of its types range. */
8126 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8127 && ((sgn0 == 1
8128 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8129 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8130 || (sgn0 == -1
8131 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8132 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8133 return NULL_TREE;
8135 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8136 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8137 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8138 t = fold_convert (TREE_TYPE (arg1), t);
8140 return fold_build2_loc (loc, code, type, t, arg1);
8143 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8144 overflow further. Try to decrease the magnitude of constants involved
8145 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8146 and put sole constants at the second argument position.
8147 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8149 static tree
8150 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8151 tree arg0, tree arg1)
8153 tree t;
8154 bool strict_overflow_p;
8155 const char * const warnmsg = G_("assuming signed overflow does not occur "
8156 "when reducing constant in comparison");
8158 /* Try canonicalization by simplifying arg0. */
8159 strict_overflow_p = false;
8160 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8161 &strict_overflow_p);
8162 if (t)
8164 if (strict_overflow_p)
8165 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8166 return t;
8169 /* Try canonicalization by simplifying arg1 using the swapped
8170 comparison. */
8171 code = swap_tree_comparison (code);
8172 strict_overflow_p = false;
8173 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8174 &strict_overflow_p);
8175 if (t && strict_overflow_p)
8176 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8177 return t;
8180 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8181 space. This is used to avoid issuing overflow warnings for
8182 expressions like &p->x which can not wrap. */
8184 static bool
8185 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8187 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8188 return true;
8190 if (bitpos < 0)
8191 return true;
8193 wide_int wi_offset;
8194 int precision = TYPE_PRECISION (TREE_TYPE (base));
8195 if (offset == NULL_TREE)
8196 wi_offset = wi::zero (precision);
8197 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8198 return true;
8199 else
8200 wi_offset = wi::to_wide (offset);
8202 bool overflow;
8203 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8204 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8205 if (overflow)
8206 return true;
8208 if (!wi::fits_uhwi_p (total))
8209 return true;
8211 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8212 if (size <= 0)
8213 return true;
8215 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8216 array. */
8217 if (TREE_CODE (base) == ADDR_EXPR)
8219 HOST_WIDE_INT base_size;
8221 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8222 if (base_size > 0 && size < base_size)
8223 size = base_size;
8226 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8229 /* Return a positive integer when the symbol DECL is known to have
8230 a nonzero address, zero when it's known not to (e.g., it's a weak
8231 symbol), and a negative integer when the symbol is not yet in the
8232 symbol table and so whether or not its address is zero is unknown.
8233 For function local objects always return positive integer. */
8234 static int
8235 maybe_nonzero_address (tree decl)
8237 if (DECL_P (decl) && decl_in_symtab_p (decl))
8238 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8239 return symbol->nonzero_address ();
8241 /* Function local objects are never NULL. */
8242 if (DECL_P (decl)
8243 && (DECL_CONTEXT (decl)
8244 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8245 && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8246 return 1;
8248 return -1;
8251 /* Subroutine of fold_binary. This routine performs all of the
8252 transformations that are common to the equality/inequality
8253 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8254 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8255 fold_binary should call fold_binary. Fold a comparison with
8256 tree code CODE and type TYPE with operands OP0 and OP1. Return
8257 the folded comparison or NULL_TREE. */
8259 static tree
8260 fold_comparison (location_t loc, enum tree_code code, tree type,
8261 tree op0, tree op1)
8263 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8264 tree arg0, arg1, tem;
8266 arg0 = op0;
8267 arg1 = op1;
8269 STRIP_SIGN_NOPS (arg0);
8270 STRIP_SIGN_NOPS (arg1);
8272 /* For comparisons of pointers we can decompose it to a compile time
8273 comparison of the base objects and the offsets into the object.
8274 This requires at least one operand being an ADDR_EXPR or a
8275 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8276 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8277 && (TREE_CODE (arg0) == ADDR_EXPR
8278 || TREE_CODE (arg1) == ADDR_EXPR
8279 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8280 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8282 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8283 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8284 machine_mode mode;
8285 int volatilep, reversep, unsignedp;
8286 bool indirect_base0 = false, indirect_base1 = false;
8288 /* Get base and offset for the access. Strip ADDR_EXPR for
8289 get_inner_reference, but put it back by stripping INDIRECT_REF
8290 off the base object if possible. indirect_baseN will be true
8291 if baseN is not an address but refers to the object itself. */
8292 base0 = arg0;
8293 if (TREE_CODE (arg0) == ADDR_EXPR)
8295 base0
8296 = get_inner_reference (TREE_OPERAND (arg0, 0),
8297 &bitsize, &bitpos0, &offset0, &mode,
8298 &unsignedp, &reversep, &volatilep);
8299 if (TREE_CODE (base0) == INDIRECT_REF)
8300 base0 = TREE_OPERAND (base0, 0);
8301 else
8302 indirect_base0 = true;
8304 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8306 base0 = TREE_OPERAND (arg0, 0);
8307 STRIP_SIGN_NOPS (base0);
8308 if (TREE_CODE (base0) == ADDR_EXPR)
8310 base0
8311 = get_inner_reference (TREE_OPERAND (base0, 0),
8312 &bitsize, &bitpos0, &offset0, &mode,
8313 &unsignedp, &reversep, &volatilep);
8314 if (TREE_CODE (base0) == INDIRECT_REF)
8315 base0 = TREE_OPERAND (base0, 0);
8316 else
8317 indirect_base0 = true;
8319 if (offset0 == NULL_TREE || integer_zerop (offset0))
8320 offset0 = TREE_OPERAND (arg0, 1);
8321 else
8322 offset0 = size_binop (PLUS_EXPR, offset0,
8323 TREE_OPERAND (arg0, 1));
8324 if (TREE_CODE (offset0) == INTEGER_CST)
8326 offset_int tem = wi::sext (wi::to_offset (offset0),
8327 TYPE_PRECISION (sizetype));
8328 tem <<= LOG2_BITS_PER_UNIT;
8329 tem += bitpos0;
8330 if (wi::fits_shwi_p (tem))
8332 bitpos0 = tem.to_shwi ();
8333 offset0 = NULL_TREE;
8338 base1 = arg1;
8339 if (TREE_CODE (arg1) == ADDR_EXPR)
8341 base1
8342 = get_inner_reference (TREE_OPERAND (arg1, 0),
8343 &bitsize, &bitpos1, &offset1, &mode,
8344 &unsignedp, &reversep, &volatilep);
8345 if (TREE_CODE (base1) == INDIRECT_REF)
8346 base1 = TREE_OPERAND (base1, 0);
8347 else
8348 indirect_base1 = true;
8350 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8352 base1 = TREE_OPERAND (arg1, 0);
8353 STRIP_SIGN_NOPS (base1);
8354 if (TREE_CODE (base1) == ADDR_EXPR)
8356 base1
8357 = get_inner_reference (TREE_OPERAND (base1, 0),
8358 &bitsize, &bitpos1, &offset1, &mode,
8359 &unsignedp, &reversep, &volatilep);
8360 if (TREE_CODE (base1) == INDIRECT_REF)
8361 base1 = TREE_OPERAND (base1, 0);
8362 else
8363 indirect_base1 = true;
8365 if (offset1 == NULL_TREE || integer_zerop (offset1))
8366 offset1 = TREE_OPERAND (arg1, 1);
8367 else
8368 offset1 = size_binop (PLUS_EXPR, offset1,
8369 TREE_OPERAND (arg1, 1));
8370 if (TREE_CODE (offset1) == INTEGER_CST)
8372 offset_int tem = wi::sext (wi::to_offset (offset1),
8373 TYPE_PRECISION (sizetype));
8374 tem <<= LOG2_BITS_PER_UNIT;
8375 tem += bitpos1;
8376 if (wi::fits_shwi_p (tem))
8378 bitpos1 = tem.to_shwi ();
8379 offset1 = NULL_TREE;
8384 /* If we have equivalent bases we might be able to simplify. */
8385 if (indirect_base0 == indirect_base1
8386 && operand_equal_p (base0, base1,
8387 indirect_base0 ? OEP_ADDRESS_OF : 0))
8389 /* We can fold this expression to a constant if the non-constant
8390 offset parts are equal. */
8391 if (offset0 == offset1
8392 || (offset0 && offset1
8393 && operand_equal_p (offset0, offset1, 0)))
8395 if (!equality_code
8396 && bitpos0 != bitpos1
8397 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8398 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8399 fold_overflow_warning (("assuming pointer wraparound does not "
8400 "occur when comparing P +- C1 with "
8401 "P +- C2"),
8402 WARN_STRICT_OVERFLOW_CONDITIONAL);
8404 switch (code)
8406 case EQ_EXPR:
8407 return constant_boolean_node (bitpos0 == bitpos1, type);
8408 case NE_EXPR:
8409 return constant_boolean_node (bitpos0 != bitpos1, type);
8410 case LT_EXPR:
8411 return constant_boolean_node (bitpos0 < bitpos1, type);
8412 case LE_EXPR:
8413 return constant_boolean_node (bitpos0 <= bitpos1, type);
8414 case GE_EXPR:
8415 return constant_boolean_node (bitpos0 >= bitpos1, type);
8416 case GT_EXPR:
8417 return constant_boolean_node (bitpos0 > bitpos1, type);
8418 default:;
8421 /* We can simplify the comparison to a comparison of the variable
8422 offset parts if the constant offset parts are equal.
8423 Be careful to use signed sizetype here because otherwise we
8424 mess with array offsets in the wrong way. This is possible
8425 because pointer arithmetic is restricted to retain within an
8426 object and overflow on pointer differences is undefined as of
8427 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8428 else if (bitpos0 == bitpos1)
8430 /* By converting to signed sizetype we cover middle-end pointer
8431 arithmetic which operates on unsigned pointer types of size
8432 type size and ARRAY_REF offsets which are properly sign or
8433 zero extended from their type in case it is narrower than
8434 sizetype. */
8435 if (offset0 == NULL_TREE)
8436 offset0 = build_int_cst (ssizetype, 0);
8437 else
8438 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8439 if (offset1 == NULL_TREE)
8440 offset1 = build_int_cst (ssizetype, 0);
8441 else
8442 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8444 if (!equality_code
8445 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8446 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8447 fold_overflow_warning (("assuming pointer wraparound does not "
8448 "occur when comparing P +- C1 with "
8449 "P +- C2"),
8450 WARN_STRICT_OVERFLOW_COMPARISON);
8452 return fold_build2_loc (loc, code, type, offset0, offset1);
8455 /* For equal offsets we can simplify to a comparison of the
8456 base addresses. */
8457 else if (bitpos0 == bitpos1
8458 && (indirect_base0
8459 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8460 && (indirect_base1
8461 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8462 && ((offset0 == offset1)
8463 || (offset0 && offset1
8464 && operand_equal_p (offset0, offset1, 0))))
8466 if (indirect_base0)
8467 base0 = build_fold_addr_expr_loc (loc, base0);
8468 if (indirect_base1)
8469 base1 = build_fold_addr_expr_loc (loc, base1);
8470 return fold_build2_loc (loc, code, type, base0, base1);
8472 /* Comparison between an ordinary (non-weak) symbol and a null
8473 pointer can be eliminated since such symbols must have a non
8474 null address. In C, relational expressions between pointers
8475 to objects and null pointers are undefined. The results
8476 below follow the C++ rules with the additional property that
8477 every object pointer compares greater than a null pointer.
8479 else if (((DECL_P (base0)
8480 && maybe_nonzero_address (base0) > 0
8481 /* Avoid folding references to struct members at offset 0 to
8482 prevent tests like '&ptr->firstmember == 0' from getting
8483 eliminated. When ptr is null, although the -> expression
8484 is strictly speaking invalid, GCC retains it as a matter
8485 of QoI. See PR c/44555. */
8486 && (offset0 == NULL_TREE && bitpos0 != 0))
8487 || CONSTANT_CLASS_P (base0))
8488 && indirect_base0
8489 /* The caller guarantees that when one of the arguments is
8490 constant (i.e., null in this case) it is second. */
8491 && integer_zerop (arg1))
8493 switch (code)
8495 case EQ_EXPR:
8496 case LE_EXPR:
8497 case LT_EXPR:
8498 return constant_boolean_node (false, type);
8499 case GE_EXPR:
8500 case GT_EXPR:
8501 case NE_EXPR:
8502 return constant_boolean_node (true, type);
8503 default:
8504 gcc_unreachable ();
8509 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8510 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8511 the resulting offset is smaller in absolute value than the
8512 original one and has the same sign. */
8513 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8514 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8515 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8516 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8517 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8518 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8519 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8520 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8522 tree const1 = TREE_OPERAND (arg0, 1);
8523 tree const2 = TREE_OPERAND (arg1, 1);
8524 tree variable1 = TREE_OPERAND (arg0, 0);
8525 tree variable2 = TREE_OPERAND (arg1, 0);
8526 tree cst;
8527 const char * const warnmsg = G_("assuming signed overflow does not "
8528 "occur when combining constants around "
8529 "a comparison");
8531 /* Put the constant on the side where it doesn't overflow and is
8532 of lower absolute value and of same sign than before. */
8533 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8534 ? MINUS_EXPR : PLUS_EXPR,
8535 const2, const1);
8536 if (!TREE_OVERFLOW (cst)
8537 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8538 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8540 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8541 return fold_build2_loc (loc, code, type,
8542 variable1,
8543 fold_build2_loc (loc, TREE_CODE (arg1),
8544 TREE_TYPE (arg1),
8545 variable2, cst));
8548 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8549 ? MINUS_EXPR : PLUS_EXPR,
8550 const1, const2);
8551 if (!TREE_OVERFLOW (cst)
8552 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8553 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8555 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8556 return fold_build2_loc (loc, code, type,
8557 fold_build2_loc (loc, TREE_CODE (arg0),
8558 TREE_TYPE (arg0),
8559 variable1, cst),
8560 variable2);
8564 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8565 if (tem)
8566 return tem;
8568 /* If we are comparing an expression that just has comparisons
8569 of two integer values, arithmetic expressions of those comparisons,
8570 and constants, we can simplify it. There are only three cases
8571 to check: the two values can either be equal, the first can be
8572 greater, or the second can be greater. Fold the expression for
8573 those three values. Since each value must be 0 or 1, we have
8574 eight possibilities, each of which corresponds to the constant 0
8575 or 1 or one of the six possible comparisons.
8577 This handles common cases like (a > b) == 0 but also handles
8578 expressions like ((x > y) - (y > x)) > 0, which supposedly
8579 occur in macroized code. */
8581 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8583 tree cval1 = 0, cval2 = 0;
8584 int save_p = 0;
8586 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8587 /* Don't handle degenerate cases here; they should already
8588 have been handled anyway. */
8589 && cval1 != 0 && cval2 != 0
8590 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8591 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8592 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8593 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8594 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8595 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8596 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8598 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8599 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8601 /* We can't just pass T to eval_subst in case cval1 or cval2
8602 was the same as ARG1. */
8604 tree high_result
8605 = fold_build2_loc (loc, code, type,
8606 eval_subst (loc, arg0, cval1, maxval,
8607 cval2, minval),
8608 arg1);
8609 tree equal_result
8610 = fold_build2_loc (loc, code, type,
8611 eval_subst (loc, arg0, cval1, maxval,
8612 cval2, maxval),
8613 arg1);
8614 tree low_result
8615 = fold_build2_loc (loc, code, type,
8616 eval_subst (loc, arg0, cval1, minval,
8617 cval2, maxval),
8618 arg1);
8620 /* All three of these results should be 0 or 1. Confirm they are.
8621 Then use those values to select the proper code to use. */
8623 if (TREE_CODE (high_result) == INTEGER_CST
8624 && TREE_CODE (equal_result) == INTEGER_CST
8625 && TREE_CODE (low_result) == INTEGER_CST)
8627 /* Make a 3-bit mask with the high-order bit being the
8628 value for `>', the next for '=', and the low for '<'. */
8629 switch ((integer_onep (high_result) * 4)
8630 + (integer_onep (equal_result) * 2)
8631 + integer_onep (low_result))
8633 case 0:
8634 /* Always false. */
8635 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8636 case 1:
8637 code = LT_EXPR;
8638 break;
8639 case 2:
8640 code = EQ_EXPR;
8641 break;
8642 case 3:
8643 code = LE_EXPR;
8644 break;
8645 case 4:
8646 code = GT_EXPR;
8647 break;
8648 case 5:
8649 code = NE_EXPR;
8650 break;
8651 case 6:
8652 code = GE_EXPR;
8653 break;
8654 case 7:
8655 /* Always true. */
8656 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8659 if (save_p)
8661 tem = save_expr (build2 (code, type, cval1, cval2));
8662 protected_set_expr_location (tem, loc);
8663 return tem;
8665 return fold_build2_loc (loc, code, type, cval1, cval2);
8670 return NULL_TREE;
8674 /* Subroutine of fold_binary. Optimize complex multiplications of the
8675 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8676 argument EXPR represents the expression "z" of type TYPE. */
8678 static tree
8679 fold_mult_zconjz (location_t loc, tree type, tree expr)
8681 tree itype = TREE_TYPE (type);
8682 tree rpart, ipart, tem;
8684 if (TREE_CODE (expr) == COMPLEX_EXPR)
8686 rpart = TREE_OPERAND (expr, 0);
8687 ipart = TREE_OPERAND (expr, 1);
8689 else if (TREE_CODE (expr) == COMPLEX_CST)
8691 rpart = TREE_REALPART (expr);
8692 ipart = TREE_IMAGPART (expr);
8694 else
8696 expr = save_expr (expr);
8697 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8698 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8701 rpart = save_expr (rpart);
8702 ipart = save_expr (ipart);
8703 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8704 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8705 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8706 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8707 build_zero_cst (itype));
8711 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8712 CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8713 true if successful. */
8715 static bool
8716 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8718 unsigned int i;
8720 if (TREE_CODE (arg) == VECTOR_CST)
8722 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8723 elts[i] = VECTOR_CST_ELT (arg, i);
8725 else if (TREE_CODE (arg) == CONSTRUCTOR)
8727 constructor_elt *elt;
8729 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8730 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8731 return false;
8732 else
8733 elts[i] = elt->value;
8735 else
8736 return false;
8737 for (; i < nelts; i++)
8738 elts[i]
8739 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8740 return true;
8743 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8744 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8745 NULL_TREE otherwise. */
8747 static tree
8748 fold_vec_perm (tree type, tree arg0, tree arg1, vec_perm_indices sel)
8750 unsigned int i;
8751 bool need_ctor = false;
8753 unsigned int nelts = sel.length ();
8754 gcc_assert (TYPE_VECTOR_SUBPARTS (type) == nelts
8755 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8756 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8757 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8758 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8759 return NULL_TREE;
8761 tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8762 if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8763 || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8764 return NULL_TREE;
8766 auto_vec<tree, 32> out_elts (nelts);
8767 for (i = 0; i < nelts; i++)
8769 if (!CONSTANT_CLASS_P (in_elts[sel[i]]))
8770 need_ctor = true;
8771 out_elts.quick_push (unshare_expr (in_elts[sel[i]]));
8774 if (need_ctor)
8776 vec<constructor_elt, va_gc> *v;
8777 vec_alloc (v, nelts);
8778 for (i = 0; i < nelts; i++)
8779 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8780 return build_constructor (type, v);
8782 else
8783 return build_vector (type, out_elts);
8786 /* Try to fold a pointer difference of type TYPE two address expressions of
8787 array references AREF0 and AREF1 using location LOC. Return a
8788 simplified expression for the difference or NULL_TREE. */
8790 static tree
8791 fold_addr_of_array_ref_difference (location_t loc, tree type,
8792 tree aref0, tree aref1)
8794 tree base0 = TREE_OPERAND (aref0, 0);
8795 tree base1 = TREE_OPERAND (aref1, 0);
8796 tree base_offset = build_int_cst (type, 0);
8798 /* If the bases are array references as well, recurse. If the bases
8799 are pointer indirections compute the difference of the pointers.
8800 If the bases are equal, we are set. */
8801 if ((TREE_CODE (base0) == ARRAY_REF
8802 && TREE_CODE (base1) == ARRAY_REF
8803 && (base_offset
8804 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8805 || (INDIRECT_REF_P (base0)
8806 && INDIRECT_REF_P (base1)
8807 && (base_offset
8808 = fold_binary_loc (loc, MINUS_EXPR, type,
8809 fold_convert (type, TREE_OPERAND (base0, 0)),
8810 fold_convert (type,
8811 TREE_OPERAND (base1, 0)))))
8812 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8814 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8815 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8816 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8817 tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
8818 return fold_build2_loc (loc, PLUS_EXPR, type,
8819 base_offset,
8820 fold_build2_loc (loc, MULT_EXPR, type,
8821 diff, esz));
8823 return NULL_TREE;
8826 /* If the real or vector real constant CST of type TYPE has an exact
8827 inverse, return it, else return NULL. */
8829 tree
8830 exact_inverse (tree type, tree cst)
8832 REAL_VALUE_TYPE r;
8833 tree unit_type;
8834 machine_mode mode;
8835 unsigned vec_nelts, i;
8837 switch (TREE_CODE (cst))
8839 case REAL_CST:
8840 r = TREE_REAL_CST (cst);
8842 if (exact_real_inverse (TYPE_MODE (type), &r))
8843 return build_real (type, r);
8845 return NULL_TREE;
8847 case VECTOR_CST:
8849 vec_nelts = VECTOR_CST_NELTS (cst);
8850 unit_type = TREE_TYPE (type);
8851 mode = TYPE_MODE (unit_type);
8853 auto_vec<tree, 32> elts (vec_nelts);
8854 for (i = 0; i < vec_nelts; i++)
8856 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8857 if (!exact_real_inverse (mode, &r))
8858 return NULL_TREE;
8859 elts.quick_push (build_real (unit_type, r));
8862 return build_vector (type, elts);
8865 default:
8866 return NULL_TREE;
8870 /* Mask out the tz least significant bits of X of type TYPE where
8871 tz is the number of trailing zeroes in Y. */
8872 static wide_int
8873 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8875 int tz = wi::ctz (y);
8876 if (tz > 0)
8877 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8878 return x;
8881 /* Return true when T is an address and is known to be nonzero.
8882 For floating point we further ensure that T is not denormal.
8883 Similar logic is present in nonzero_address in rtlanal.h.
8885 If the return value is based on the assumption that signed overflow
8886 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
8887 change *STRICT_OVERFLOW_P. */
8889 static bool
8890 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
8892 tree type = TREE_TYPE (t);
8893 enum tree_code code;
8895 /* Doing something useful for floating point would need more work. */
8896 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8897 return false;
8899 code = TREE_CODE (t);
8900 switch (TREE_CODE_CLASS (code))
8902 case tcc_unary:
8903 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8904 strict_overflow_p);
8905 case tcc_binary:
8906 case tcc_comparison:
8907 return tree_binary_nonzero_warnv_p (code, type,
8908 TREE_OPERAND (t, 0),
8909 TREE_OPERAND (t, 1),
8910 strict_overflow_p);
8911 case tcc_constant:
8912 case tcc_declaration:
8913 case tcc_reference:
8914 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8916 default:
8917 break;
8920 switch (code)
8922 case TRUTH_NOT_EXPR:
8923 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
8924 strict_overflow_p);
8926 case TRUTH_AND_EXPR:
8927 case TRUTH_OR_EXPR:
8928 case TRUTH_XOR_EXPR:
8929 return tree_binary_nonzero_warnv_p (code, type,
8930 TREE_OPERAND (t, 0),
8931 TREE_OPERAND (t, 1),
8932 strict_overflow_p);
8934 case COND_EXPR:
8935 case CONSTRUCTOR:
8936 case OBJ_TYPE_REF:
8937 case ASSERT_EXPR:
8938 case ADDR_EXPR:
8939 case WITH_SIZE_EXPR:
8940 case SSA_NAME:
8941 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
8943 case COMPOUND_EXPR:
8944 case MODIFY_EXPR:
8945 case BIND_EXPR:
8946 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
8947 strict_overflow_p);
8949 case SAVE_EXPR:
8950 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
8951 strict_overflow_p);
8953 case CALL_EXPR:
8955 tree fndecl = get_callee_fndecl (t);
8956 if (!fndecl) return false;
8957 if (flag_delete_null_pointer_checks && !flag_check_new
8958 && DECL_IS_OPERATOR_NEW (fndecl)
8959 && !TREE_NOTHROW (fndecl))
8960 return true;
8961 if (flag_delete_null_pointer_checks
8962 && lookup_attribute ("returns_nonnull",
8963 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
8964 return true;
8965 return alloca_call_p (t);
8968 default:
8969 break;
8971 return false;
8974 /* Return true when T is an address and is known to be nonzero.
8975 Handle warnings about undefined signed overflow. */
8977 bool
8978 tree_expr_nonzero_p (tree t)
8980 bool ret, strict_overflow_p;
8982 strict_overflow_p = false;
8983 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
8984 if (strict_overflow_p)
8985 fold_overflow_warning (("assuming signed overflow does not occur when "
8986 "determining that expression is always "
8987 "non-zero"),
8988 WARN_STRICT_OVERFLOW_MISC);
8989 return ret;
8992 /* Return true if T is known not to be equal to an integer W. */
8994 bool
8995 expr_not_equal_to (tree t, const wide_int &w)
8997 wide_int min, max, nz;
8998 value_range_type rtype;
8999 switch (TREE_CODE (t))
9001 case INTEGER_CST:
9002 return wi::to_wide (t) != w;
9004 case SSA_NAME:
9005 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9006 return false;
9007 rtype = get_range_info (t, &min, &max);
9008 if (rtype == VR_RANGE)
9010 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9011 return true;
9012 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9013 return true;
9015 else if (rtype == VR_ANTI_RANGE
9016 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9017 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9018 return true;
9019 /* If T has some known zero bits and W has any of those bits set,
9020 then T is known not to be equal to W. */
9021 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9022 TYPE_PRECISION (TREE_TYPE (t))), 0))
9023 return true;
9024 return false;
9026 default:
9027 return false;
9031 /* Fold a binary expression of code CODE and type TYPE with operands
9032 OP0 and OP1. LOC is the location of the resulting expression.
9033 Return the folded expression if folding is successful. Otherwise,
9034 return NULL_TREE. */
9036 tree
9037 fold_binary_loc (location_t loc,
9038 enum tree_code code, tree type, tree op0, tree op1)
9040 enum tree_code_class kind = TREE_CODE_CLASS (code);
9041 tree arg0, arg1, tem;
9042 tree t1 = NULL_TREE;
9043 bool strict_overflow_p;
9044 unsigned int prec;
9046 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9047 && TREE_CODE_LENGTH (code) == 2
9048 && op0 != NULL_TREE
9049 && op1 != NULL_TREE);
9051 arg0 = op0;
9052 arg1 = op1;
9054 /* Strip any conversions that don't change the mode. This is
9055 safe for every expression, except for a comparison expression
9056 because its signedness is derived from its operands. So, in
9057 the latter case, only strip conversions that don't change the
9058 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9059 preserved.
9061 Note that this is done as an internal manipulation within the
9062 constant folder, in order to find the simplest representation
9063 of the arguments so that their form can be studied. In any
9064 cases, the appropriate type conversions should be put back in
9065 the tree that will get out of the constant folder. */
9067 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9069 STRIP_SIGN_NOPS (arg0);
9070 STRIP_SIGN_NOPS (arg1);
9072 else
9074 STRIP_NOPS (arg0);
9075 STRIP_NOPS (arg1);
9078 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9079 constant but we can't do arithmetic on them. */
9080 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9082 tem = const_binop (code, type, arg0, arg1);
9083 if (tem != NULL_TREE)
9085 if (TREE_TYPE (tem) != type)
9086 tem = fold_convert_loc (loc, type, tem);
9087 return tem;
9091 /* If this is a commutative operation, and ARG0 is a constant, move it
9092 to ARG1 to reduce the number of tests below. */
9093 if (commutative_tree_code (code)
9094 && tree_swap_operands_p (arg0, arg1))
9095 return fold_build2_loc (loc, code, type, op1, op0);
9097 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9098 to ARG1 to reduce the number of tests below. */
9099 if (kind == tcc_comparison
9100 && tree_swap_operands_p (arg0, arg1))
9101 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9103 tem = generic_simplify (loc, code, type, op0, op1);
9104 if (tem)
9105 return tem;
9107 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9109 First check for cases where an arithmetic operation is applied to a
9110 compound, conditional, or comparison operation. Push the arithmetic
9111 operation inside the compound or conditional to see if any folding
9112 can then be done. Convert comparison to conditional for this purpose.
9113 The also optimizes non-constant cases that used to be done in
9114 expand_expr.
9116 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9117 one of the operands is a comparison and the other is a comparison, a
9118 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9119 code below would make the expression more complex. Change it to a
9120 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9121 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9123 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9124 || code == EQ_EXPR || code == NE_EXPR)
9125 && TREE_CODE (type) != VECTOR_TYPE
9126 && ((truth_value_p (TREE_CODE (arg0))
9127 && (truth_value_p (TREE_CODE (arg1))
9128 || (TREE_CODE (arg1) == BIT_AND_EXPR
9129 && integer_onep (TREE_OPERAND (arg1, 1)))))
9130 || (truth_value_p (TREE_CODE (arg1))
9131 && (truth_value_p (TREE_CODE (arg0))
9132 || (TREE_CODE (arg0) == BIT_AND_EXPR
9133 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9135 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9136 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9137 : TRUTH_XOR_EXPR,
9138 boolean_type_node,
9139 fold_convert_loc (loc, boolean_type_node, arg0),
9140 fold_convert_loc (loc, boolean_type_node, arg1));
9142 if (code == EQ_EXPR)
9143 tem = invert_truthvalue_loc (loc, tem);
9145 return fold_convert_loc (loc, type, tem);
9148 if (TREE_CODE_CLASS (code) == tcc_binary
9149 || TREE_CODE_CLASS (code) == tcc_comparison)
9151 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9153 tem = fold_build2_loc (loc, code, type,
9154 fold_convert_loc (loc, TREE_TYPE (op0),
9155 TREE_OPERAND (arg0, 1)), op1);
9156 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9157 tem);
9159 if (TREE_CODE (arg1) == COMPOUND_EXPR)
9161 tem = fold_build2_loc (loc, code, type, op0,
9162 fold_convert_loc (loc, TREE_TYPE (op1),
9163 TREE_OPERAND (arg1, 1)));
9164 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9165 tem);
9168 if (TREE_CODE (arg0) == COND_EXPR
9169 || TREE_CODE (arg0) == VEC_COND_EXPR
9170 || COMPARISON_CLASS_P (arg0))
9172 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9173 arg0, arg1,
9174 /*cond_first_p=*/1);
9175 if (tem != NULL_TREE)
9176 return tem;
9179 if (TREE_CODE (arg1) == COND_EXPR
9180 || TREE_CODE (arg1) == VEC_COND_EXPR
9181 || COMPARISON_CLASS_P (arg1))
9183 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9184 arg1, arg0,
9185 /*cond_first_p=*/0);
9186 if (tem != NULL_TREE)
9187 return tem;
9191 switch (code)
9193 case MEM_REF:
9194 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9195 if (TREE_CODE (arg0) == ADDR_EXPR
9196 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9198 tree iref = TREE_OPERAND (arg0, 0);
9199 return fold_build2 (MEM_REF, type,
9200 TREE_OPERAND (iref, 0),
9201 int_const_binop (PLUS_EXPR, arg1,
9202 TREE_OPERAND (iref, 1)));
9205 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9206 if (TREE_CODE (arg0) == ADDR_EXPR
9207 && handled_component_p (TREE_OPERAND (arg0, 0)))
9209 tree base;
9210 HOST_WIDE_INT coffset;
9211 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9212 &coffset);
9213 if (!base)
9214 return NULL_TREE;
9215 return fold_build2 (MEM_REF, type,
9216 build_fold_addr_expr (base),
9217 int_const_binop (PLUS_EXPR, arg1,
9218 size_int (coffset)));
9221 return NULL_TREE;
9223 case POINTER_PLUS_EXPR:
9224 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9225 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9226 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9227 return fold_convert_loc (loc, type,
9228 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9229 fold_convert_loc (loc, sizetype,
9230 arg1),
9231 fold_convert_loc (loc, sizetype,
9232 arg0)));
9234 return NULL_TREE;
9236 case PLUS_EXPR:
9237 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9239 /* X + (X / CST) * -CST is X % CST. */
9240 if (TREE_CODE (arg1) == MULT_EXPR
9241 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9242 && operand_equal_p (arg0,
9243 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9245 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9246 tree cst1 = TREE_OPERAND (arg1, 1);
9247 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9248 cst1, cst0);
9249 if (sum && integer_zerop (sum))
9250 return fold_convert_loc (loc, type,
9251 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9252 TREE_TYPE (arg0), arg0,
9253 cst0));
9257 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9258 one. Make sure the type is not saturating and has the signedness of
9259 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9260 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9261 if ((TREE_CODE (arg0) == MULT_EXPR
9262 || TREE_CODE (arg1) == MULT_EXPR)
9263 && !TYPE_SATURATING (type)
9264 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9265 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9266 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9268 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9269 if (tem)
9270 return tem;
9273 if (! FLOAT_TYPE_P (type))
9275 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9276 (plus (plus (mult) (mult)) (foo)) so that we can
9277 take advantage of the factoring cases below. */
9278 if (ANY_INTEGRAL_TYPE_P (type)
9279 && TYPE_OVERFLOW_WRAPS (type)
9280 && (((TREE_CODE (arg0) == PLUS_EXPR
9281 || TREE_CODE (arg0) == MINUS_EXPR)
9282 && TREE_CODE (arg1) == MULT_EXPR)
9283 || ((TREE_CODE (arg1) == PLUS_EXPR
9284 || TREE_CODE (arg1) == MINUS_EXPR)
9285 && TREE_CODE (arg0) == MULT_EXPR)))
9287 tree parg0, parg1, parg, marg;
9288 enum tree_code pcode;
9290 if (TREE_CODE (arg1) == MULT_EXPR)
9291 parg = arg0, marg = arg1;
9292 else
9293 parg = arg1, marg = arg0;
9294 pcode = TREE_CODE (parg);
9295 parg0 = TREE_OPERAND (parg, 0);
9296 parg1 = TREE_OPERAND (parg, 1);
9297 STRIP_NOPS (parg0);
9298 STRIP_NOPS (parg1);
9300 if (TREE_CODE (parg0) == MULT_EXPR
9301 && TREE_CODE (parg1) != MULT_EXPR)
9302 return fold_build2_loc (loc, pcode, type,
9303 fold_build2_loc (loc, PLUS_EXPR, type,
9304 fold_convert_loc (loc, type,
9305 parg0),
9306 fold_convert_loc (loc, type,
9307 marg)),
9308 fold_convert_loc (loc, type, parg1));
9309 if (TREE_CODE (parg0) != MULT_EXPR
9310 && TREE_CODE (parg1) == MULT_EXPR)
9311 return
9312 fold_build2_loc (loc, PLUS_EXPR, type,
9313 fold_convert_loc (loc, type, parg0),
9314 fold_build2_loc (loc, pcode, type,
9315 fold_convert_loc (loc, type, marg),
9316 fold_convert_loc (loc, type,
9317 parg1)));
9320 else
9322 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9323 to __complex__ ( x, y ). This is not the same for SNaNs or
9324 if signed zeros are involved. */
9325 if (!HONOR_SNANS (element_mode (arg0))
9326 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9327 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9329 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9330 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9331 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9332 bool arg0rz = false, arg0iz = false;
9333 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9334 || (arg0i && (arg0iz = real_zerop (arg0i))))
9336 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9337 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9338 if (arg0rz && arg1i && real_zerop (arg1i))
9340 tree rp = arg1r ? arg1r
9341 : build1 (REALPART_EXPR, rtype, arg1);
9342 tree ip = arg0i ? arg0i
9343 : build1 (IMAGPART_EXPR, rtype, arg0);
9344 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9346 else if (arg0iz && arg1r && real_zerop (arg1r))
9348 tree rp = arg0r ? arg0r
9349 : build1 (REALPART_EXPR, rtype, arg0);
9350 tree ip = arg1i ? arg1i
9351 : build1 (IMAGPART_EXPR, rtype, arg1);
9352 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9357 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9358 We associate floats only if the user has specified
9359 -fassociative-math. */
9360 if (flag_associative_math
9361 && TREE_CODE (arg1) == PLUS_EXPR
9362 && TREE_CODE (arg0) != MULT_EXPR)
9364 tree tree10 = TREE_OPERAND (arg1, 0);
9365 tree tree11 = TREE_OPERAND (arg1, 1);
9366 if (TREE_CODE (tree11) == MULT_EXPR
9367 && TREE_CODE (tree10) == MULT_EXPR)
9369 tree tree0;
9370 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9371 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9374 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9375 We associate floats only if the user has specified
9376 -fassociative-math. */
9377 if (flag_associative_math
9378 && TREE_CODE (arg0) == PLUS_EXPR
9379 && TREE_CODE (arg1) != MULT_EXPR)
9381 tree tree00 = TREE_OPERAND (arg0, 0);
9382 tree tree01 = TREE_OPERAND (arg0, 1);
9383 if (TREE_CODE (tree01) == MULT_EXPR
9384 && TREE_CODE (tree00) == MULT_EXPR)
9386 tree tree0;
9387 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9388 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9393 bit_rotate:
9394 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9395 is a rotate of A by C1 bits. */
9396 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9397 is a rotate of A by B bits.
9398 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9399 though in this case CODE must be | and not + or ^, otherwise
9400 it doesn't return A when B is 0. */
9402 enum tree_code code0, code1;
9403 tree rtype;
9404 code0 = TREE_CODE (arg0);
9405 code1 = TREE_CODE (arg1);
9406 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9407 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9408 && operand_equal_p (TREE_OPERAND (arg0, 0),
9409 TREE_OPERAND (arg1, 0), 0)
9410 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9411 TYPE_UNSIGNED (rtype))
9412 /* Only create rotates in complete modes. Other cases are not
9413 expanded properly. */
9414 && (element_precision (rtype)
9415 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9417 tree tree01, tree11;
9418 tree orig_tree01, orig_tree11;
9419 enum tree_code code01, code11;
9421 tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9422 tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9423 STRIP_NOPS (tree01);
9424 STRIP_NOPS (tree11);
9425 code01 = TREE_CODE (tree01);
9426 code11 = TREE_CODE (tree11);
9427 if (code11 != MINUS_EXPR
9428 && (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9430 std::swap (code0, code1);
9431 std::swap (code01, code11);
9432 std::swap (tree01, tree11);
9433 std::swap (orig_tree01, orig_tree11);
9435 if (code01 == INTEGER_CST
9436 && code11 == INTEGER_CST
9437 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9438 == element_precision (rtype)))
9440 tem = build2_loc (loc, LROTATE_EXPR,
9441 rtype, TREE_OPERAND (arg0, 0),
9442 code0 == LSHIFT_EXPR
9443 ? orig_tree01 : orig_tree11);
9444 return fold_convert_loc (loc, type, tem);
9446 else if (code11 == MINUS_EXPR)
9448 tree tree110, tree111;
9449 tree110 = TREE_OPERAND (tree11, 0);
9450 tree111 = TREE_OPERAND (tree11, 1);
9451 STRIP_NOPS (tree110);
9452 STRIP_NOPS (tree111);
9453 if (TREE_CODE (tree110) == INTEGER_CST
9454 && 0 == compare_tree_int (tree110,
9455 element_precision (rtype))
9456 && operand_equal_p (tree01, tree111, 0))
9458 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9459 ? LROTATE_EXPR : RROTATE_EXPR),
9460 rtype, TREE_OPERAND (arg0, 0),
9461 orig_tree01);
9462 return fold_convert_loc (loc, type, tem);
9465 else if (code == BIT_IOR_EXPR
9466 && code11 == BIT_AND_EXPR
9467 && pow2p_hwi (element_precision (rtype)))
9469 tree tree110, tree111;
9470 tree110 = TREE_OPERAND (tree11, 0);
9471 tree111 = TREE_OPERAND (tree11, 1);
9472 STRIP_NOPS (tree110);
9473 STRIP_NOPS (tree111);
9474 if (TREE_CODE (tree110) == NEGATE_EXPR
9475 && TREE_CODE (tree111) == INTEGER_CST
9476 && 0 == compare_tree_int (tree111,
9477 element_precision (rtype) - 1)
9478 && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9480 tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9481 ? LROTATE_EXPR : RROTATE_EXPR),
9482 rtype, TREE_OPERAND (arg0, 0),
9483 orig_tree01);
9484 return fold_convert_loc (loc, type, tem);
9490 associate:
9491 /* In most languages, can't associate operations on floats through
9492 parentheses. Rather than remember where the parentheses were, we
9493 don't associate floats at all, unless the user has specified
9494 -fassociative-math.
9495 And, we need to make sure type is not saturating. */
9497 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9498 && !TYPE_SATURATING (type))
9500 tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9501 tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9502 tree atype = type;
9503 bool ok = true;
9505 /* Split both trees into variables, constants, and literals. Then
9506 associate each group together, the constants with literals,
9507 then the result with variables. This increases the chances of
9508 literals being recombined later and of generating relocatable
9509 expressions for the sum of a constant and literal. */
9510 var0 = split_tree (arg0, type, code,
9511 &minus_var0, &con0, &minus_con0,
9512 &lit0, &minus_lit0, 0);
9513 var1 = split_tree (arg1, type, code,
9514 &minus_var1, &con1, &minus_con1,
9515 &lit1, &minus_lit1, code == MINUS_EXPR);
9517 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9518 if (code == MINUS_EXPR)
9519 code = PLUS_EXPR;
9521 /* With undefined overflow prefer doing association in a type
9522 which wraps on overflow, if that is one of the operand types. */
9523 if (POINTER_TYPE_P (type)
9524 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9526 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9527 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9528 atype = TREE_TYPE (arg0);
9529 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9530 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9531 atype = TREE_TYPE (arg1);
9532 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9535 /* With undefined overflow we can only associate constants with one
9536 variable, and constants whose association doesn't overflow. */
9537 if (POINTER_TYPE_P (atype)
9538 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9540 if ((var0 && var1) || (minus_var0 && minus_var1))
9542 /* ??? If split_tree would handle NEGATE_EXPR we could
9543 simply reject these cases and the allowed cases would
9544 be the var0/minus_var1 ones. */
9545 tree tmp0 = var0 ? var0 : minus_var0;
9546 tree tmp1 = var1 ? var1 : minus_var1;
9547 bool one_neg = false;
9549 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9551 tmp0 = TREE_OPERAND (tmp0, 0);
9552 one_neg = !one_neg;
9554 if (CONVERT_EXPR_P (tmp0)
9555 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9556 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9557 <= TYPE_PRECISION (atype)))
9558 tmp0 = TREE_OPERAND (tmp0, 0);
9559 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9561 tmp1 = TREE_OPERAND (tmp1, 0);
9562 one_neg = !one_neg;
9564 if (CONVERT_EXPR_P (tmp1)
9565 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9566 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9567 <= TYPE_PRECISION (atype)))
9568 tmp1 = TREE_OPERAND (tmp1, 0);
9569 /* The only case we can still associate with two variables
9570 is if they cancel out. */
9571 if (!one_neg
9572 || !operand_equal_p (tmp0, tmp1, 0))
9573 ok = false;
9575 else if ((var0 && minus_var1
9576 && ! operand_equal_p (var0, minus_var1, 0))
9577 || (minus_var0 && var1
9578 && ! operand_equal_p (minus_var0, var1, 0)))
9579 ok = false;
9582 /* Only do something if we found more than two objects. Otherwise,
9583 nothing has changed and we risk infinite recursion. */
9584 if (ok
9585 && (2 < ((var0 != 0) + (var1 != 0)
9586 + (minus_var0 != 0) + (minus_var1 != 0)
9587 + (con0 != 0) + (con1 != 0)
9588 + (minus_con0 != 0) + (minus_con1 != 0)
9589 + (lit0 != 0) + (lit1 != 0)
9590 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9592 var0 = associate_trees (loc, var0, var1, code, atype);
9593 minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9594 code, atype);
9595 con0 = associate_trees (loc, con0, con1, code, atype);
9596 minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9597 code, atype);
9598 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9599 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9600 code, atype);
9602 if (minus_var0 && var0)
9604 var0 = associate_trees (loc, var0, minus_var0,
9605 MINUS_EXPR, atype);
9606 minus_var0 = 0;
9608 if (minus_con0 && con0)
9610 con0 = associate_trees (loc, con0, minus_con0,
9611 MINUS_EXPR, atype);
9612 minus_con0 = 0;
9615 /* Preserve the MINUS_EXPR if the negative part of the literal is
9616 greater than the positive part. Otherwise, the multiplicative
9617 folding code (i.e extract_muldiv) may be fooled in case
9618 unsigned constants are subtracted, like in the following
9619 example: ((X*2 + 4) - 8U)/2. */
9620 if (minus_lit0 && lit0)
9622 if (TREE_CODE (lit0) == INTEGER_CST
9623 && TREE_CODE (minus_lit0) == INTEGER_CST
9624 && tree_int_cst_lt (lit0, minus_lit0)
9625 /* But avoid ending up with only negated parts. */
9626 && (var0 || con0))
9628 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9629 MINUS_EXPR, atype);
9630 lit0 = 0;
9632 else
9634 lit0 = associate_trees (loc, lit0, minus_lit0,
9635 MINUS_EXPR, atype);
9636 minus_lit0 = 0;
9640 /* Don't introduce overflows through reassociation. */
9641 if ((lit0 && TREE_OVERFLOW_P (lit0))
9642 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9643 return NULL_TREE;
9645 /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9646 con0 = associate_trees (loc, con0, lit0, code, atype);
9647 lit0 = 0;
9648 minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9649 code, atype);
9650 minus_lit0 = 0;
9652 /* Eliminate minus_con0. */
9653 if (minus_con0)
9655 if (con0)
9656 con0 = associate_trees (loc, con0, minus_con0,
9657 MINUS_EXPR, atype);
9658 else if (var0)
9659 var0 = associate_trees (loc, var0, minus_con0,
9660 MINUS_EXPR, atype);
9661 else
9662 gcc_unreachable ();
9663 minus_con0 = 0;
9666 /* Eliminate minus_var0. */
9667 if (minus_var0)
9669 if (con0)
9670 con0 = associate_trees (loc, con0, minus_var0,
9671 MINUS_EXPR, atype);
9672 else
9673 gcc_unreachable ();
9674 minus_var0 = 0;
9677 return
9678 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9679 code, atype));
9683 return NULL_TREE;
9685 case MINUS_EXPR:
9686 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9687 if (TREE_CODE (arg0) == NEGATE_EXPR
9688 && negate_expr_p (op1))
9689 return fold_build2_loc (loc, MINUS_EXPR, type,
9690 negate_expr (op1),
9691 fold_convert_loc (loc, type,
9692 TREE_OPERAND (arg0, 0)));
9694 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9695 __complex__ ( x, -y ). This is not the same for SNaNs or if
9696 signed zeros are involved. */
9697 if (!HONOR_SNANS (element_mode (arg0))
9698 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9699 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9701 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9702 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9703 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9704 bool arg0rz = false, arg0iz = false;
9705 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9706 || (arg0i && (arg0iz = real_zerop (arg0i))))
9708 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9709 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9710 if (arg0rz && arg1i && real_zerop (arg1i))
9712 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9713 arg1r ? arg1r
9714 : build1 (REALPART_EXPR, rtype, arg1));
9715 tree ip = arg0i ? arg0i
9716 : build1 (IMAGPART_EXPR, rtype, arg0);
9717 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9719 else if (arg0iz && arg1r && real_zerop (arg1r))
9721 tree rp = arg0r ? arg0r
9722 : build1 (REALPART_EXPR, rtype, arg0);
9723 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9724 arg1i ? arg1i
9725 : build1 (IMAGPART_EXPR, rtype, arg1));
9726 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9731 /* A - B -> A + (-B) if B is easily negatable. */
9732 if (negate_expr_p (op1)
9733 && ! TYPE_OVERFLOW_SANITIZED (type)
9734 && ((FLOAT_TYPE_P (type)
9735 /* Avoid this transformation if B is a positive REAL_CST. */
9736 && (TREE_CODE (op1) != REAL_CST
9737 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9738 || INTEGRAL_TYPE_P (type)))
9739 return fold_build2_loc (loc, PLUS_EXPR, type,
9740 fold_convert_loc (loc, type, arg0),
9741 negate_expr (op1));
9743 /* Fold &a[i] - &a[j] to i-j. */
9744 if (TREE_CODE (arg0) == ADDR_EXPR
9745 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9746 && TREE_CODE (arg1) == ADDR_EXPR
9747 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9749 tree tem = fold_addr_of_array_ref_difference (loc, type,
9750 TREE_OPERAND (arg0, 0),
9751 TREE_OPERAND (arg1, 0));
9752 if (tem)
9753 return tem;
9756 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9757 one. Make sure the type is not saturating and has the signedness of
9758 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9759 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9760 if ((TREE_CODE (arg0) == MULT_EXPR
9761 || TREE_CODE (arg1) == MULT_EXPR)
9762 && !TYPE_SATURATING (type)
9763 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9764 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9765 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9767 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9768 if (tem)
9769 return tem;
9772 goto associate;
9774 case MULT_EXPR:
9775 if (! FLOAT_TYPE_P (type))
9777 /* Transform x * -C into -x * C if x is easily negatable. */
9778 if (TREE_CODE (op1) == INTEGER_CST
9779 && tree_int_cst_sgn (op1) == -1
9780 && negate_expr_p (op0)
9781 && negate_expr_p (op1)
9782 && (tem = negate_expr (op1)) != op1
9783 && ! TREE_OVERFLOW (tem))
9784 return fold_build2_loc (loc, MULT_EXPR, type,
9785 fold_convert_loc (loc, type,
9786 negate_expr (op0)), tem);
9788 strict_overflow_p = false;
9789 if (TREE_CODE (arg1) == INTEGER_CST
9790 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9791 &strict_overflow_p)))
9793 if (strict_overflow_p)
9794 fold_overflow_warning (("assuming signed overflow does not "
9795 "occur when simplifying "
9796 "multiplication"),
9797 WARN_STRICT_OVERFLOW_MISC);
9798 return fold_convert_loc (loc, type, tem);
9801 /* Optimize z * conj(z) for integer complex numbers. */
9802 if (TREE_CODE (arg0) == CONJ_EXPR
9803 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9804 return fold_mult_zconjz (loc, type, arg1);
9805 if (TREE_CODE (arg1) == CONJ_EXPR
9806 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9807 return fold_mult_zconjz (loc, type, arg0);
9809 else
9811 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9812 This is not the same for NaNs or if signed zeros are
9813 involved. */
9814 if (!HONOR_NANS (arg0)
9815 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9816 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9817 && TREE_CODE (arg1) == COMPLEX_CST
9818 && real_zerop (TREE_REALPART (arg1)))
9820 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9821 if (real_onep (TREE_IMAGPART (arg1)))
9822 return
9823 fold_build2_loc (loc, COMPLEX_EXPR, type,
9824 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9825 rtype, arg0)),
9826 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9827 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9828 return
9829 fold_build2_loc (loc, COMPLEX_EXPR, type,
9830 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9831 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9832 rtype, arg0)));
9835 /* Optimize z * conj(z) for floating point complex numbers.
9836 Guarded by flag_unsafe_math_optimizations as non-finite
9837 imaginary components don't produce scalar results. */
9838 if (flag_unsafe_math_optimizations
9839 && TREE_CODE (arg0) == CONJ_EXPR
9840 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9841 return fold_mult_zconjz (loc, type, arg1);
9842 if (flag_unsafe_math_optimizations
9843 && TREE_CODE (arg1) == CONJ_EXPR
9844 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9845 return fold_mult_zconjz (loc, type, arg0);
9847 goto associate;
9849 case BIT_IOR_EXPR:
9850 /* Canonicalize (X & C1) | C2. */
9851 if (TREE_CODE (arg0) == BIT_AND_EXPR
9852 && TREE_CODE (arg1) == INTEGER_CST
9853 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9855 int width = TYPE_PRECISION (type), w;
9856 wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
9857 wide_int c2 = wi::to_wide (arg1);
9859 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9860 if ((c1 & c2) == c1)
9861 return omit_one_operand_loc (loc, type, arg1,
9862 TREE_OPERAND (arg0, 0));
9864 wide_int msk = wi::mask (width, false,
9865 TYPE_PRECISION (TREE_TYPE (arg1)));
9867 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9868 if (wi::bit_and_not (msk, c1 | c2) == 0)
9870 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9871 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9874 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9875 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9876 mode which allows further optimizations. */
9877 c1 &= msk;
9878 c2 &= msk;
9879 wide_int c3 = wi::bit_and_not (c1, c2);
9880 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9882 wide_int mask = wi::mask (w, false,
9883 TYPE_PRECISION (type));
9884 if (((c1 | c2) & mask) == mask
9885 && wi::bit_and_not (c1, mask) == 0)
9887 c3 = mask;
9888 break;
9892 if (c3 != c1)
9894 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9895 tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
9896 wide_int_to_tree (type, c3));
9897 return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
9901 /* See if this can be simplified into a rotate first. If that
9902 is unsuccessful continue in the association code. */
9903 goto bit_rotate;
9905 case BIT_XOR_EXPR:
9906 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9907 if (TREE_CODE (arg0) == BIT_AND_EXPR
9908 && INTEGRAL_TYPE_P (type)
9909 && integer_onep (TREE_OPERAND (arg0, 1))
9910 && integer_onep (arg1))
9911 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
9912 build_zero_cst (TREE_TYPE (arg0)));
9914 /* See if this can be simplified into a rotate first. If that
9915 is unsuccessful continue in the association code. */
9916 goto bit_rotate;
9918 case BIT_AND_EXPR:
9919 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9920 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9921 && INTEGRAL_TYPE_P (type)
9922 && integer_onep (TREE_OPERAND (arg0, 1))
9923 && integer_onep (arg1))
9925 tree tem2;
9926 tem = TREE_OPERAND (arg0, 0);
9927 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9928 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9929 tem, tem2);
9930 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9931 build_zero_cst (TREE_TYPE (tem)));
9933 /* Fold ~X & 1 as (X & 1) == 0. */
9934 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9935 && INTEGRAL_TYPE_P (type)
9936 && integer_onep (arg1))
9938 tree tem2;
9939 tem = TREE_OPERAND (arg0, 0);
9940 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
9941 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
9942 tem, tem2);
9943 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
9944 build_zero_cst (TREE_TYPE (tem)));
9946 /* Fold !X & 1 as X == 0. */
9947 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9948 && integer_onep (arg1))
9950 tem = TREE_OPERAND (arg0, 0);
9951 return fold_build2_loc (loc, EQ_EXPR, type, tem,
9952 build_zero_cst (TREE_TYPE (tem)));
9955 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
9956 multiple of 1 << CST. */
9957 if (TREE_CODE (arg1) == INTEGER_CST)
9959 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
9960 wide_int ncst1 = -cst1;
9961 if ((cst1 & ncst1) == ncst1
9962 && multiple_of_p (type, arg0,
9963 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
9964 return fold_convert_loc (loc, type, arg0);
9967 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
9968 bits from CST2. */
9969 if (TREE_CODE (arg1) == INTEGER_CST
9970 && TREE_CODE (arg0) == MULT_EXPR
9971 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9973 wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
9974 wide_int masked
9975 = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
9977 if (masked == 0)
9978 return omit_two_operands_loc (loc, type, build_zero_cst (type),
9979 arg0, arg1);
9980 else if (masked != warg1)
9982 /* Avoid the transform if arg1 is a mask of some
9983 mode which allows further optimizations. */
9984 int pop = wi::popcount (warg1);
9985 if (!(pop >= BITS_PER_UNIT
9986 && pow2p_hwi (pop)
9987 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
9988 return fold_build2_loc (loc, code, type, op0,
9989 wide_int_to_tree (type, masked));
9993 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
9994 ((A & N) + B) & M -> (A + B) & M
9995 Similarly if (N & M) == 0,
9996 ((A | N) + B) & M -> (A + B) & M
9997 and for - instead of + (or unary - instead of +)
9998 and/or ^ instead of |.
9999 If B is constant and (B & M) == 0, fold into A & M. */
10000 if (TREE_CODE (arg1) == INTEGER_CST)
10002 wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10003 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10004 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10005 && (TREE_CODE (arg0) == PLUS_EXPR
10006 || TREE_CODE (arg0) == MINUS_EXPR
10007 || TREE_CODE (arg0) == NEGATE_EXPR)
10008 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10009 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10011 tree pmop[2];
10012 int which = 0;
10013 wide_int cst0;
10015 /* Now we know that arg0 is (C + D) or (C - D) or
10016 -C and arg1 (M) is == (1LL << cst) - 1.
10017 Store C into PMOP[0] and D into PMOP[1]. */
10018 pmop[0] = TREE_OPERAND (arg0, 0);
10019 pmop[1] = NULL;
10020 if (TREE_CODE (arg0) != NEGATE_EXPR)
10022 pmop[1] = TREE_OPERAND (arg0, 1);
10023 which = 1;
10026 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10027 which = -1;
10029 for (; which >= 0; which--)
10030 switch (TREE_CODE (pmop[which]))
10032 case BIT_AND_EXPR:
10033 case BIT_IOR_EXPR:
10034 case BIT_XOR_EXPR:
10035 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10036 != INTEGER_CST)
10037 break;
10038 cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10039 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10041 if (cst0 != cst1)
10042 break;
10044 else if (cst0 != 0)
10045 break;
10046 /* If C or D is of the form (A & N) where
10047 (N & M) == M, or of the form (A | N) or
10048 (A ^ N) where (N & M) == 0, replace it with A. */
10049 pmop[which] = TREE_OPERAND (pmop[which], 0);
10050 break;
10051 case INTEGER_CST:
10052 /* If C or D is a N where (N & M) == 0, it can be
10053 omitted (assumed 0). */
10054 if ((TREE_CODE (arg0) == PLUS_EXPR
10055 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10056 && (cst1 & wi::to_wide (pmop[which])) == 0)
10057 pmop[which] = NULL;
10058 break;
10059 default:
10060 break;
10063 /* Only build anything new if we optimized one or both arguments
10064 above. */
10065 if (pmop[0] != TREE_OPERAND (arg0, 0)
10066 || (TREE_CODE (arg0) != NEGATE_EXPR
10067 && pmop[1] != TREE_OPERAND (arg0, 1)))
10069 tree utype = TREE_TYPE (arg0);
10070 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10072 /* Perform the operations in a type that has defined
10073 overflow behavior. */
10074 utype = unsigned_type_for (TREE_TYPE (arg0));
10075 if (pmop[0] != NULL)
10076 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10077 if (pmop[1] != NULL)
10078 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10081 if (TREE_CODE (arg0) == NEGATE_EXPR)
10082 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10083 else if (TREE_CODE (arg0) == PLUS_EXPR)
10085 if (pmop[0] != NULL && pmop[1] != NULL)
10086 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10087 pmop[0], pmop[1]);
10088 else if (pmop[0] != NULL)
10089 tem = pmop[0];
10090 else if (pmop[1] != NULL)
10091 tem = pmop[1];
10092 else
10093 return build_int_cst (type, 0);
10095 else if (pmop[0] == NULL)
10096 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10097 else
10098 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10099 pmop[0], pmop[1]);
10100 /* TEM is now the new binary +, - or unary - replacement. */
10101 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10102 fold_convert_loc (loc, utype, arg1));
10103 return fold_convert_loc (loc, type, tem);
10108 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10109 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10110 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10112 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10114 wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10115 if (mask == -1)
10116 return
10117 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10120 goto associate;
10122 case RDIV_EXPR:
10123 /* Don't touch a floating-point divide by zero unless the mode
10124 of the constant can represent infinity. */
10125 if (TREE_CODE (arg1) == REAL_CST
10126 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10127 && real_zerop (arg1))
10128 return NULL_TREE;
10130 /* (-A) / (-B) -> A / B */
10131 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10132 return fold_build2_loc (loc, RDIV_EXPR, type,
10133 TREE_OPERAND (arg0, 0),
10134 negate_expr (arg1));
10135 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10136 return fold_build2_loc (loc, RDIV_EXPR, type,
10137 negate_expr (arg0),
10138 TREE_OPERAND (arg1, 0));
10139 return NULL_TREE;
10141 case TRUNC_DIV_EXPR:
10142 /* Fall through */
10144 case FLOOR_DIV_EXPR:
10145 /* Simplify A / (B << N) where A and B are positive and B is
10146 a power of 2, to A >> (N + log2(B)). */
10147 strict_overflow_p = false;
10148 if (TREE_CODE (arg1) == LSHIFT_EXPR
10149 && (TYPE_UNSIGNED (type)
10150 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10152 tree sval = TREE_OPERAND (arg1, 0);
10153 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10155 tree sh_cnt = TREE_OPERAND (arg1, 1);
10156 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10157 wi::exact_log2 (wi::to_wide (sval)));
10159 if (strict_overflow_p)
10160 fold_overflow_warning (("assuming signed overflow does not "
10161 "occur when simplifying A / (B << N)"),
10162 WARN_STRICT_OVERFLOW_MISC);
10164 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10165 sh_cnt, pow2);
10166 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10167 fold_convert_loc (loc, type, arg0), sh_cnt);
10171 /* Fall through */
10173 case ROUND_DIV_EXPR:
10174 case CEIL_DIV_EXPR:
10175 case EXACT_DIV_EXPR:
10176 if (integer_zerop (arg1))
10177 return NULL_TREE;
10179 /* Convert -A / -B to A / B when the type is signed and overflow is
10180 undefined. */
10181 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10182 && TREE_CODE (op0) == NEGATE_EXPR
10183 && negate_expr_p (op1))
10185 if (INTEGRAL_TYPE_P (type))
10186 fold_overflow_warning (("assuming signed overflow does not occur "
10187 "when distributing negation across "
10188 "division"),
10189 WARN_STRICT_OVERFLOW_MISC);
10190 return fold_build2_loc (loc, code, type,
10191 fold_convert_loc (loc, type,
10192 TREE_OPERAND (arg0, 0)),
10193 negate_expr (op1));
10195 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10196 && TREE_CODE (arg1) == NEGATE_EXPR
10197 && negate_expr_p (op0))
10199 if (INTEGRAL_TYPE_P (type))
10200 fold_overflow_warning (("assuming signed overflow does not occur "
10201 "when distributing negation across "
10202 "division"),
10203 WARN_STRICT_OVERFLOW_MISC);
10204 return fold_build2_loc (loc, code, type,
10205 negate_expr (op0),
10206 fold_convert_loc (loc, type,
10207 TREE_OPERAND (arg1, 0)));
10210 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10211 operation, EXACT_DIV_EXPR.
10213 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10214 At one time others generated faster code, it's not clear if they do
10215 after the last round to changes to the DIV code in expmed.c. */
10216 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10217 && multiple_of_p (type, arg0, arg1))
10218 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10219 fold_convert (type, arg0),
10220 fold_convert (type, arg1));
10222 strict_overflow_p = false;
10223 if (TREE_CODE (arg1) == INTEGER_CST
10224 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10225 &strict_overflow_p)))
10227 if (strict_overflow_p)
10228 fold_overflow_warning (("assuming signed overflow does not occur "
10229 "when simplifying division"),
10230 WARN_STRICT_OVERFLOW_MISC);
10231 return fold_convert_loc (loc, type, tem);
10234 return NULL_TREE;
10236 case CEIL_MOD_EXPR:
10237 case FLOOR_MOD_EXPR:
10238 case ROUND_MOD_EXPR:
10239 case TRUNC_MOD_EXPR:
10240 strict_overflow_p = false;
10241 if (TREE_CODE (arg1) == INTEGER_CST
10242 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10243 &strict_overflow_p)))
10245 if (strict_overflow_p)
10246 fold_overflow_warning (("assuming signed overflow does not occur "
10247 "when simplifying modulus"),
10248 WARN_STRICT_OVERFLOW_MISC);
10249 return fold_convert_loc (loc, type, tem);
10252 return NULL_TREE;
10254 case LROTATE_EXPR:
10255 case RROTATE_EXPR:
10256 case RSHIFT_EXPR:
10257 case LSHIFT_EXPR:
10258 /* Since negative shift count is not well-defined,
10259 don't try to compute it in the compiler. */
10260 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10261 return NULL_TREE;
10263 prec = element_precision (type);
10265 /* If we have a rotate of a bit operation with the rotate count and
10266 the second operand of the bit operation both constant,
10267 permute the two operations. */
10268 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10269 && (TREE_CODE (arg0) == BIT_AND_EXPR
10270 || TREE_CODE (arg0) == BIT_IOR_EXPR
10271 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10272 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10274 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10275 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10276 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10277 fold_build2_loc (loc, code, type,
10278 arg00, arg1),
10279 fold_build2_loc (loc, code, type,
10280 arg01, arg1));
10283 /* Two consecutive rotates adding up to the some integer
10284 multiple of the precision of the type can be ignored. */
10285 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10286 && TREE_CODE (arg0) == RROTATE_EXPR
10287 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10288 && wi::umod_trunc (wi::to_wide (arg1)
10289 + wi::to_wide (TREE_OPERAND (arg0, 1)),
10290 prec) == 0)
10291 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10293 return NULL_TREE;
10295 case MIN_EXPR:
10296 case MAX_EXPR:
10297 goto associate;
10299 case TRUTH_ANDIF_EXPR:
10300 /* Note that the operands of this must be ints
10301 and their values must be 0 or 1.
10302 ("true" is a fixed value perhaps depending on the language.) */
10303 /* If first arg is constant zero, return it. */
10304 if (integer_zerop (arg0))
10305 return fold_convert_loc (loc, type, arg0);
10306 /* FALLTHRU */
10307 case TRUTH_AND_EXPR:
10308 /* If either arg is constant true, drop it. */
10309 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10310 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10311 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10312 /* Preserve sequence points. */
10313 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10314 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10315 /* If second arg is constant zero, result is zero, but first arg
10316 must be evaluated. */
10317 if (integer_zerop (arg1))
10318 return omit_one_operand_loc (loc, type, arg1, arg0);
10319 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10320 case will be handled here. */
10321 if (integer_zerop (arg0))
10322 return omit_one_operand_loc (loc, type, arg0, arg1);
10324 /* !X && X is always false. */
10325 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10326 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10327 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10328 /* X && !X is always false. */
10329 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10330 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10331 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10333 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10334 means A >= Y && A != MAX, but in this case we know that
10335 A < X <= MAX. */
10337 if (!TREE_SIDE_EFFECTS (arg0)
10338 && !TREE_SIDE_EFFECTS (arg1))
10340 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10341 if (tem && !operand_equal_p (tem, arg0, 0))
10342 return fold_build2_loc (loc, code, type, tem, arg1);
10344 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10345 if (tem && !operand_equal_p (tem, arg1, 0))
10346 return fold_build2_loc (loc, code, type, arg0, tem);
10349 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10350 != NULL_TREE)
10351 return tem;
10353 return NULL_TREE;
10355 case TRUTH_ORIF_EXPR:
10356 /* Note that the operands of this must be ints
10357 and their values must be 0 or true.
10358 ("true" is a fixed value perhaps depending on the language.) */
10359 /* If first arg is constant true, return it. */
10360 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10361 return fold_convert_loc (loc, type, arg0);
10362 /* FALLTHRU */
10363 case TRUTH_OR_EXPR:
10364 /* If either arg is constant zero, drop it. */
10365 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10366 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10367 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10368 /* Preserve sequence points. */
10369 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10370 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10371 /* If second arg is constant true, result is true, but we must
10372 evaluate first arg. */
10373 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10374 return omit_one_operand_loc (loc, type, arg1, arg0);
10375 /* Likewise for first arg, but note this only occurs here for
10376 TRUTH_OR_EXPR. */
10377 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10378 return omit_one_operand_loc (loc, type, arg0, arg1);
10380 /* !X || X is always true. */
10381 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10382 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10383 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10384 /* X || !X is always true. */
10385 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10386 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10387 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10389 /* (X && !Y) || (!X && Y) is X ^ Y */
10390 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10391 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10393 tree a0, a1, l0, l1, n0, n1;
10395 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10396 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10398 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10399 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10401 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10402 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10404 if ((operand_equal_p (n0, a0, 0)
10405 && operand_equal_p (n1, a1, 0))
10406 || (operand_equal_p (n0, a1, 0)
10407 && operand_equal_p (n1, a0, 0)))
10408 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10411 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10412 != NULL_TREE)
10413 return tem;
10415 return NULL_TREE;
10417 case TRUTH_XOR_EXPR:
10418 /* If the second arg is constant zero, drop it. */
10419 if (integer_zerop (arg1))
10420 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10421 /* If the second arg is constant true, this is a logical inversion. */
10422 if (integer_onep (arg1))
10424 tem = invert_truthvalue_loc (loc, arg0);
10425 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10427 /* Identical arguments cancel to zero. */
10428 if (operand_equal_p (arg0, arg1, 0))
10429 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10431 /* !X ^ X is always true. */
10432 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10433 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10434 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10436 /* X ^ !X is always true. */
10437 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10438 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10439 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10441 return NULL_TREE;
10443 case EQ_EXPR:
10444 case NE_EXPR:
10445 STRIP_NOPS (arg0);
10446 STRIP_NOPS (arg1);
10448 tem = fold_comparison (loc, code, type, op0, op1);
10449 if (tem != NULL_TREE)
10450 return tem;
10452 /* bool_var != 1 becomes !bool_var. */
10453 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10454 && code == NE_EXPR)
10455 return fold_convert_loc (loc, type,
10456 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10457 TREE_TYPE (arg0), arg0));
10459 /* bool_var == 0 becomes !bool_var. */
10460 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10461 && code == EQ_EXPR)
10462 return fold_convert_loc (loc, type,
10463 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10464 TREE_TYPE (arg0), arg0));
10466 /* !exp != 0 becomes !exp */
10467 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10468 && code == NE_EXPR)
10469 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10471 /* If this is an EQ or NE comparison with zero and ARG0 is
10472 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10473 two operations, but the latter can be done in one less insn
10474 on machines that have only two-operand insns or on which a
10475 constant cannot be the first operand. */
10476 if (TREE_CODE (arg0) == BIT_AND_EXPR
10477 && integer_zerop (arg1))
10479 tree arg00 = TREE_OPERAND (arg0, 0);
10480 tree arg01 = TREE_OPERAND (arg0, 1);
10481 if (TREE_CODE (arg00) == LSHIFT_EXPR
10482 && integer_onep (TREE_OPERAND (arg00, 0)))
10484 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10485 arg01, TREE_OPERAND (arg00, 1));
10486 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10487 build_int_cst (TREE_TYPE (arg0), 1));
10488 return fold_build2_loc (loc, code, type,
10489 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10490 arg1);
10492 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10493 && integer_onep (TREE_OPERAND (arg01, 0)))
10495 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10496 arg00, TREE_OPERAND (arg01, 1));
10497 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10498 build_int_cst (TREE_TYPE (arg0), 1));
10499 return fold_build2_loc (loc, code, type,
10500 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10501 arg1);
10505 /* If this is an NE or EQ comparison of zero against the result of a
10506 signed MOD operation whose second operand is a power of 2, make
10507 the MOD operation unsigned since it is simpler and equivalent. */
10508 if (integer_zerop (arg1)
10509 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10510 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10511 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10512 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10513 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10514 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10516 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10517 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10518 fold_convert_loc (loc, newtype,
10519 TREE_OPERAND (arg0, 0)),
10520 fold_convert_loc (loc, newtype,
10521 TREE_OPERAND (arg0, 1)));
10523 return fold_build2_loc (loc, code, type, newmod,
10524 fold_convert_loc (loc, newtype, arg1));
10527 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10528 C1 is a valid shift constant, and C2 is a power of two, i.e.
10529 a single bit. */
10530 if (TREE_CODE (arg0) == BIT_AND_EXPR
10531 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10532 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10533 == INTEGER_CST
10534 && integer_pow2p (TREE_OPERAND (arg0, 1))
10535 && integer_zerop (arg1))
10537 tree itype = TREE_TYPE (arg0);
10538 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10539 prec = TYPE_PRECISION (itype);
10541 /* Check for a valid shift count. */
10542 if (wi::ltu_p (wi::to_wide (arg001), prec))
10544 tree arg01 = TREE_OPERAND (arg0, 1);
10545 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10546 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10547 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10548 can be rewritten as (X & (C2 << C1)) != 0. */
10549 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10551 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10552 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10553 return fold_build2_loc (loc, code, type, tem,
10554 fold_convert_loc (loc, itype, arg1));
10556 /* Otherwise, for signed (arithmetic) shifts,
10557 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10558 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10559 else if (!TYPE_UNSIGNED (itype))
10560 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10561 arg000, build_int_cst (itype, 0));
10562 /* Otherwise, of unsigned (logical) shifts,
10563 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10564 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10565 else
10566 return omit_one_operand_loc (loc, type,
10567 code == EQ_EXPR ? integer_one_node
10568 : integer_zero_node,
10569 arg000);
10573 /* If this is a comparison of a field, we may be able to simplify it. */
10574 if ((TREE_CODE (arg0) == COMPONENT_REF
10575 || TREE_CODE (arg0) == BIT_FIELD_REF)
10576 /* Handle the constant case even without -O
10577 to make sure the warnings are given. */
10578 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10580 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10581 if (t1)
10582 return t1;
10585 /* Optimize comparisons of strlen vs zero to a compare of the
10586 first character of the string vs zero. To wit,
10587 strlen(ptr) == 0 => *ptr == 0
10588 strlen(ptr) != 0 => *ptr != 0
10589 Other cases should reduce to one of these two (or a constant)
10590 due to the return value of strlen being unsigned. */
10591 if (TREE_CODE (arg0) == CALL_EXPR
10592 && integer_zerop (arg1))
10594 tree fndecl = get_callee_fndecl (arg0);
10596 if (fndecl
10597 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10598 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10599 && call_expr_nargs (arg0) == 1
10600 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10602 tree iref = build_fold_indirect_ref_loc (loc,
10603 CALL_EXPR_ARG (arg0, 0));
10604 return fold_build2_loc (loc, code, type, iref,
10605 build_int_cst (TREE_TYPE (iref), 0));
10609 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10610 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10611 if (TREE_CODE (arg0) == RSHIFT_EXPR
10612 && integer_zerop (arg1)
10613 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10615 tree arg00 = TREE_OPERAND (arg0, 0);
10616 tree arg01 = TREE_OPERAND (arg0, 1);
10617 tree itype = TREE_TYPE (arg00);
10618 if (wi::to_wide (arg01) == element_precision (itype) - 1)
10620 if (TYPE_UNSIGNED (itype))
10622 itype = signed_type_for (itype);
10623 arg00 = fold_convert_loc (loc, itype, arg00);
10625 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10626 type, arg00, build_zero_cst (itype));
10630 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10631 (X & C) == 0 when C is a single bit. */
10632 if (TREE_CODE (arg0) == BIT_AND_EXPR
10633 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10634 && integer_zerop (arg1)
10635 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10637 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10638 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10639 TREE_OPERAND (arg0, 1));
10640 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10641 type, tem,
10642 fold_convert_loc (loc, TREE_TYPE (arg0),
10643 arg1));
10646 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10647 constant C is a power of two, i.e. a single bit. */
10648 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10649 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10650 && integer_zerop (arg1)
10651 && integer_pow2p (TREE_OPERAND (arg0, 1))
10652 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10653 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10655 tree arg00 = TREE_OPERAND (arg0, 0);
10656 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10657 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10660 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10661 when is C is a power of two, i.e. a single bit. */
10662 if (TREE_CODE (arg0) == BIT_AND_EXPR
10663 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10664 && integer_zerop (arg1)
10665 && integer_pow2p (TREE_OPERAND (arg0, 1))
10666 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10667 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10669 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10670 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10671 arg000, TREE_OPERAND (arg0, 1));
10672 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10673 tem, build_int_cst (TREE_TYPE (tem), 0));
10676 if (integer_zerop (arg1)
10677 && tree_expr_nonzero_p (arg0))
10679 tree res = constant_boolean_node (code==NE_EXPR, type);
10680 return omit_one_operand_loc (loc, type, res, arg0);
10683 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10684 if (TREE_CODE (arg0) == BIT_AND_EXPR
10685 && TREE_CODE (arg1) == BIT_AND_EXPR)
10687 tree arg00 = TREE_OPERAND (arg0, 0);
10688 tree arg01 = TREE_OPERAND (arg0, 1);
10689 tree arg10 = TREE_OPERAND (arg1, 0);
10690 tree arg11 = TREE_OPERAND (arg1, 1);
10691 tree itype = TREE_TYPE (arg0);
10693 if (operand_equal_p (arg01, arg11, 0))
10695 tem = fold_convert_loc (loc, itype, arg10);
10696 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10697 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10698 return fold_build2_loc (loc, code, type, tem,
10699 build_zero_cst (itype));
10701 if (operand_equal_p (arg01, arg10, 0))
10703 tem = fold_convert_loc (loc, itype, arg11);
10704 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10705 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10706 return fold_build2_loc (loc, code, type, tem,
10707 build_zero_cst (itype));
10709 if (operand_equal_p (arg00, arg11, 0))
10711 tem = fold_convert_loc (loc, itype, arg10);
10712 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10713 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10714 return fold_build2_loc (loc, code, type, tem,
10715 build_zero_cst (itype));
10717 if (operand_equal_p (arg00, arg10, 0))
10719 tem = fold_convert_loc (loc, itype, arg11);
10720 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10721 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10722 return fold_build2_loc (loc, code, type, tem,
10723 build_zero_cst (itype));
10727 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10728 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10730 tree arg00 = TREE_OPERAND (arg0, 0);
10731 tree arg01 = TREE_OPERAND (arg0, 1);
10732 tree arg10 = TREE_OPERAND (arg1, 0);
10733 tree arg11 = TREE_OPERAND (arg1, 1);
10734 tree itype = TREE_TYPE (arg0);
10736 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10737 operand_equal_p guarantees no side-effects so we don't need
10738 to use omit_one_operand on Z. */
10739 if (operand_equal_p (arg01, arg11, 0))
10740 return fold_build2_loc (loc, code, type, arg00,
10741 fold_convert_loc (loc, TREE_TYPE (arg00),
10742 arg10));
10743 if (operand_equal_p (arg01, arg10, 0))
10744 return fold_build2_loc (loc, code, type, arg00,
10745 fold_convert_loc (loc, TREE_TYPE (arg00),
10746 arg11));
10747 if (operand_equal_p (arg00, arg11, 0))
10748 return fold_build2_loc (loc, code, type, arg01,
10749 fold_convert_loc (loc, TREE_TYPE (arg01),
10750 arg10));
10751 if (operand_equal_p (arg00, arg10, 0))
10752 return fold_build2_loc (loc, code, type, arg01,
10753 fold_convert_loc (loc, TREE_TYPE (arg01),
10754 arg11));
10756 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10757 if (TREE_CODE (arg01) == INTEGER_CST
10758 && TREE_CODE (arg11) == INTEGER_CST)
10760 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10761 fold_convert_loc (loc, itype, arg11));
10762 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10763 return fold_build2_loc (loc, code, type, tem,
10764 fold_convert_loc (loc, itype, arg10));
10768 /* Attempt to simplify equality/inequality comparisons of complex
10769 values. Only lower the comparison if the result is known or
10770 can be simplified to a single scalar comparison. */
10771 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10772 || TREE_CODE (arg0) == COMPLEX_CST)
10773 && (TREE_CODE (arg1) == COMPLEX_EXPR
10774 || TREE_CODE (arg1) == COMPLEX_CST))
10776 tree real0, imag0, real1, imag1;
10777 tree rcond, icond;
10779 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10781 real0 = TREE_OPERAND (arg0, 0);
10782 imag0 = TREE_OPERAND (arg0, 1);
10784 else
10786 real0 = TREE_REALPART (arg0);
10787 imag0 = TREE_IMAGPART (arg0);
10790 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10792 real1 = TREE_OPERAND (arg1, 0);
10793 imag1 = TREE_OPERAND (arg1, 1);
10795 else
10797 real1 = TREE_REALPART (arg1);
10798 imag1 = TREE_IMAGPART (arg1);
10801 rcond = fold_binary_loc (loc, code, type, real0, real1);
10802 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
10804 if (integer_zerop (rcond))
10806 if (code == EQ_EXPR)
10807 return omit_two_operands_loc (loc, type, boolean_false_node,
10808 imag0, imag1);
10809 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
10811 else
10813 if (code == NE_EXPR)
10814 return omit_two_operands_loc (loc, type, boolean_true_node,
10815 imag0, imag1);
10816 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
10820 icond = fold_binary_loc (loc, code, type, imag0, imag1);
10821 if (icond && TREE_CODE (icond) == INTEGER_CST)
10823 if (integer_zerop (icond))
10825 if (code == EQ_EXPR)
10826 return omit_two_operands_loc (loc, type, boolean_false_node,
10827 real0, real1);
10828 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
10830 else
10832 if (code == NE_EXPR)
10833 return omit_two_operands_loc (loc, type, boolean_true_node,
10834 real0, real1);
10835 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
10840 return NULL_TREE;
10842 case LT_EXPR:
10843 case GT_EXPR:
10844 case LE_EXPR:
10845 case GE_EXPR:
10846 tem = fold_comparison (loc, code, type, op0, op1);
10847 if (tem != NULL_TREE)
10848 return tem;
10850 /* Transform comparisons of the form X +- C CMP X. */
10851 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10852 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10853 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10854 && !HONOR_SNANS (arg0))
10856 tree arg01 = TREE_OPERAND (arg0, 1);
10857 enum tree_code code0 = TREE_CODE (arg0);
10858 int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10860 /* (X - c) > X becomes false. */
10861 if (code == GT_EXPR
10862 && ((code0 == MINUS_EXPR && is_positive >= 0)
10863 || (code0 == PLUS_EXPR && is_positive <= 0)))
10864 return constant_boolean_node (0, type);
10866 /* Likewise (X + c) < X becomes false. */
10867 if (code == LT_EXPR
10868 && ((code0 == PLUS_EXPR && is_positive >= 0)
10869 || (code0 == MINUS_EXPR && is_positive <= 0)))
10870 return constant_boolean_node (0, type);
10872 /* Convert (X - c) <= X to true. */
10873 if (!HONOR_NANS (arg1)
10874 && code == LE_EXPR
10875 && ((code0 == MINUS_EXPR && is_positive >= 0)
10876 || (code0 == PLUS_EXPR && is_positive <= 0)))
10877 return constant_boolean_node (1, type);
10879 /* Convert (X + c) >= X to true. */
10880 if (!HONOR_NANS (arg1)
10881 && code == GE_EXPR
10882 && ((code0 == PLUS_EXPR && is_positive >= 0)
10883 || (code0 == MINUS_EXPR && is_positive <= 0)))
10884 return constant_boolean_node (1, type);
10887 /* If we are comparing an ABS_EXPR with a constant, we can
10888 convert all the cases into explicit comparisons, but they may
10889 well not be faster than doing the ABS and one comparison.
10890 But ABS (X) <= C is a range comparison, which becomes a subtraction
10891 and a comparison, and is probably faster. */
10892 if (code == LE_EXPR
10893 && TREE_CODE (arg1) == INTEGER_CST
10894 && TREE_CODE (arg0) == ABS_EXPR
10895 && ! TREE_SIDE_EFFECTS (arg0)
10896 && (0 != (tem = negate_expr (arg1)))
10897 && TREE_CODE (tem) == INTEGER_CST
10898 && !TREE_OVERFLOW (tem))
10899 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
10900 build2 (GE_EXPR, type,
10901 TREE_OPERAND (arg0, 0), tem),
10902 build2 (LE_EXPR, type,
10903 TREE_OPERAND (arg0, 0), arg1));
10905 /* Convert ABS_EXPR<x> >= 0 to true. */
10906 strict_overflow_p = false;
10907 if (code == GE_EXPR
10908 && (integer_zerop (arg1)
10909 || (! HONOR_NANS (arg0)
10910 && real_zerop (arg1)))
10911 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10913 if (strict_overflow_p)
10914 fold_overflow_warning (("assuming signed overflow does not occur "
10915 "when simplifying comparison of "
10916 "absolute value and zero"),
10917 WARN_STRICT_OVERFLOW_CONDITIONAL);
10918 return omit_one_operand_loc (loc, type,
10919 constant_boolean_node (true, type),
10920 arg0);
10923 /* Convert ABS_EXPR<x> < 0 to false. */
10924 strict_overflow_p = false;
10925 if (code == LT_EXPR
10926 && (integer_zerop (arg1) || real_zerop (arg1))
10927 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
10929 if (strict_overflow_p)
10930 fold_overflow_warning (("assuming signed overflow does not occur "
10931 "when simplifying comparison of "
10932 "absolute value and zero"),
10933 WARN_STRICT_OVERFLOW_CONDITIONAL);
10934 return omit_one_operand_loc (loc, type,
10935 constant_boolean_node (false, type),
10936 arg0);
10939 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10940 and similarly for >= into !=. */
10941 if ((code == LT_EXPR || code == GE_EXPR)
10942 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10943 && TREE_CODE (arg1) == LSHIFT_EXPR
10944 && integer_onep (TREE_OPERAND (arg1, 0)))
10945 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10946 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10947 TREE_OPERAND (arg1, 1)),
10948 build_zero_cst (TREE_TYPE (arg0)));
10950 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
10951 otherwise Y might be >= # of bits in X's type and thus e.g.
10952 (unsigned char) (1 << Y) for Y 15 might be 0.
10953 If the cast is widening, then 1 << Y should have unsigned type,
10954 otherwise if Y is number of bits in the signed shift type minus 1,
10955 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
10956 31 might be 0xffffffff80000000. */
10957 if ((code == LT_EXPR || code == GE_EXPR)
10958 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10959 && CONVERT_EXPR_P (arg1)
10960 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10961 && (element_precision (TREE_TYPE (arg1))
10962 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
10963 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
10964 || (element_precision (TREE_TYPE (arg1))
10965 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
10966 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10968 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10969 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
10970 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10971 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
10972 build_zero_cst (TREE_TYPE (arg0)));
10975 return NULL_TREE;
10977 case UNORDERED_EXPR:
10978 case ORDERED_EXPR:
10979 case UNLT_EXPR:
10980 case UNLE_EXPR:
10981 case UNGT_EXPR:
10982 case UNGE_EXPR:
10983 case UNEQ_EXPR:
10984 case LTGT_EXPR:
10985 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10987 tree targ0 = strip_float_extensions (arg0);
10988 tree targ1 = strip_float_extensions (arg1);
10989 tree newtype = TREE_TYPE (targ0);
10991 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10992 newtype = TREE_TYPE (targ1);
10994 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10995 return fold_build2_loc (loc, code, type,
10996 fold_convert_loc (loc, newtype, targ0),
10997 fold_convert_loc (loc, newtype, targ1));
11000 return NULL_TREE;
11002 case COMPOUND_EXPR:
11003 /* When pedantic, a compound expression can be neither an lvalue
11004 nor an integer constant expression. */
11005 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11006 return NULL_TREE;
11007 /* Don't let (0, 0) be null pointer constant. */
11008 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11009 : fold_convert_loc (loc, type, arg1);
11010 return pedantic_non_lvalue_loc (loc, tem);
11012 case ASSERT_EXPR:
11013 /* An ASSERT_EXPR should never be passed to fold_binary. */
11014 gcc_unreachable ();
11016 default:
11017 return NULL_TREE;
11018 } /* switch (code) */
11021 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11022 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11023 of GOTO_EXPR. */
11025 static tree
11026 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11028 switch (TREE_CODE (*tp))
11030 case LABEL_EXPR:
11031 return *tp;
11033 case GOTO_EXPR:
11034 *walk_subtrees = 0;
11036 /* fall through */
11038 default:
11039 return NULL_TREE;
11043 /* Return whether the sub-tree ST contains a label which is accessible from
11044 outside the sub-tree. */
11046 static bool
11047 contains_label_p (tree st)
11049 return
11050 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11053 /* Fold a ternary expression of code CODE and type TYPE with operands
11054 OP0, OP1, and OP2. Return the folded expression if folding is
11055 successful. Otherwise, return NULL_TREE. */
11057 tree
11058 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11059 tree op0, tree op1, tree op2)
11061 tree tem;
11062 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11063 enum tree_code_class kind = TREE_CODE_CLASS (code);
11065 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11066 && TREE_CODE_LENGTH (code) == 3);
11068 /* If this is a commutative operation, and OP0 is a constant, move it
11069 to OP1 to reduce the number of tests below. */
11070 if (commutative_ternary_tree_code (code)
11071 && tree_swap_operands_p (op0, op1))
11072 return fold_build3_loc (loc, code, type, op1, op0, op2);
11074 tem = generic_simplify (loc, code, type, op0, op1, op2);
11075 if (tem)
11076 return tem;
11078 /* Strip any conversions that don't change the mode. This is safe
11079 for every expression, except for a comparison expression because
11080 its signedness is derived from its operands. So, in the latter
11081 case, only strip conversions that don't change the signedness.
11083 Note that this is done as an internal manipulation within the
11084 constant folder, in order to find the simplest representation of
11085 the arguments so that their form can be studied. In any cases,
11086 the appropriate type conversions should be put back in the tree
11087 that will get out of the constant folder. */
11088 if (op0)
11090 arg0 = op0;
11091 STRIP_NOPS (arg0);
11094 if (op1)
11096 arg1 = op1;
11097 STRIP_NOPS (arg1);
11100 if (op2)
11102 arg2 = op2;
11103 STRIP_NOPS (arg2);
11106 switch (code)
11108 case COMPONENT_REF:
11109 if (TREE_CODE (arg0) == CONSTRUCTOR
11110 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11112 unsigned HOST_WIDE_INT idx;
11113 tree field, value;
11114 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11115 if (field == arg1)
11116 return value;
11118 return NULL_TREE;
11120 case COND_EXPR:
11121 case VEC_COND_EXPR:
11122 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11123 so all simple results must be passed through pedantic_non_lvalue. */
11124 if (TREE_CODE (arg0) == INTEGER_CST)
11126 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11127 tem = integer_zerop (arg0) ? op2 : op1;
11128 /* Only optimize constant conditions when the selected branch
11129 has the same type as the COND_EXPR. This avoids optimizing
11130 away "c ? x : throw", where the throw has a void type.
11131 Avoid throwing away that operand which contains label. */
11132 if ((!TREE_SIDE_EFFECTS (unused_op)
11133 || !contains_label_p (unused_op))
11134 && (! VOID_TYPE_P (TREE_TYPE (tem))
11135 || VOID_TYPE_P (type)))
11136 return pedantic_non_lvalue_loc (loc, tem);
11137 return NULL_TREE;
11139 else if (TREE_CODE (arg0) == VECTOR_CST)
11141 if ((TREE_CODE (arg1) == VECTOR_CST
11142 || TREE_CODE (arg1) == CONSTRUCTOR)
11143 && (TREE_CODE (arg2) == VECTOR_CST
11144 || TREE_CODE (arg2) == CONSTRUCTOR))
11146 unsigned int nelts = VECTOR_CST_NELTS (arg0), i;
11147 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11148 auto_vec_perm_indices sel (nelts);
11149 for (i = 0; i < nelts; i++)
11151 tree val = VECTOR_CST_ELT (arg0, i);
11152 if (integer_all_onesp (val))
11153 sel.quick_push (i);
11154 else if (integer_zerop (val))
11155 sel.quick_push (nelts + i);
11156 else /* Currently unreachable. */
11157 return NULL_TREE;
11159 tree t = fold_vec_perm (type, arg1, arg2, sel);
11160 if (t != NULL_TREE)
11161 return t;
11165 /* If we have A op B ? A : C, we may be able to convert this to a
11166 simpler expression, depending on the operation and the values
11167 of B and C. Signed zeros prevent all of these transformations,
11168 for reasons given above each one.
11170 Also try swapping the arguments and inverting the conditional. */
11171 if (COMPARISON_CLASS_P (arg0)
11172 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), arg1)
11173 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11175 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11176 if (tem)
11177 return tem;
11180 if (COMPARISON_CLASS_P (arg0)
11181 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11182 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11184 location_t loc0 = expr_location_or (arg0, loc);
11185 tem = fold_invert_truthvalue (loc0, arg0);
11186 if (tem && COMPARISON_CLASS_P (tem))
11188 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11189 if (tem)
11190 return tem;
11194 /* If the second operand is simpler than the third, swap them
11195 since that produces better jump optimization results. */
11196 if (truth_value_p (TREE_CODE (arg0))
11197 && tree_swap_operands_p (op1, op2))
11199 location_t loc0 = expr_location_or (arg0, loc);
11200 /* See if this can be inverted. If it can't, possibly because
11201 it was a floating-point inequality comparison, don't do
11202 anything. */
11203 tem = fold_invert_truthvalue (loc0, arg0);
11204 if (tem)
11205 return fold_build3_loc (loc, code, type, tem, op2, op1);
11208 /* Convert A ? 1 : 0 to simply A. */
11209 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11210 : (integer_onep (op1)
11211 && !VECTOR_TYPE_P (type)))
11212 && integer_zerop (op2)
11213 /* If we try to convert OP0 to our type, the
11214 call to fold will try to move the conversion inside
11215 a COND, which will recurse. In that case, the COND_EXPR
11216 is probably the best choice, so leave it alone. */
11217 && type == TREE_TYPE (arg0))
11218 return pedantic_non_lvalue_loc (loc, arg0);
11220 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11221 over COND_EXPR in cases such as floating point comparisons. */
11222 if (integer_zerop (op1)
11223 && code == COND_EXPR
11224 && integer_onep (op2)
11225 && !VECTOR_TYPE_P (type)
11226 && truth_value_p (TREE_CODE (arg0)))
11227 return pedantic_non_lvalue_loc (loc,
11228 fold_convert_loc (loc, type,
11229 invert_truthvalue_loc (loc,
11230 arg0)));
11232 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11233 if (TREE_CODE (arg0) == LT_EXPR
11234 && integer_zerop (TREE_OPERAND (arg0, 1))
11235 && integer_zerop (op2)
11236 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11238 /* sign_bit_p looks through both zero and sign extensions,
11239 but for this optimization only sign extensions are
11240 usable. */
11241 tree tem2 = TREE_OPERAND (arg0, 0);
11242 while (tem != tem2)
11244 if (TREE_CODE (tem2) != NOP_EXPR
11245 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11247 tem = NULL_TREE;
11248 break;
11250 tem2 = TREE_OPERAND (tem2, 0);
11252 /* sign_bit_p only checks ARG1 bits within A's precision.
11253 If <sign bit of A> has wider type than A, bits outside
11254 of A's precision in <sign bit of A> need to be checked.
11255 If they are all 0, this optimization needs to be done
11256 in unsigned A's type, if they are all 1 in signed A's type,
11257 otherwise this can't be done. */
11258 if (tem
11259 && TYPE_PRECISION (TREE_TYPE (tem))
11260 < TYPE_PRECISION (TREE_TYPE (arg1))
11261 && TYPE_PRECISION (TREE_TYPE (tem))
11262 < TYPE_PRECISION (type))
11264 int inner_width, outer_width;
11265 tree tem_type;
11267 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11268 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11269 if (outer_width > TYPE_PRECISION (type))
11270 outer_width = TYPE_PRECISION (type);
11272 wide_int mask = wi::shifted_mask
11273 (inner_width, outer_width - inner_width, false,
11274 TYPE_PRECISION (TREE_TYPE (arg1)));
11276 wide_int common = mask & wi::to_wide (arg1);
11277 if (common == mask)
11279 tem_type = signed_type_for (TREE_TYPE (tem));
11280 tem = fold_convert_loc (loc, tem_type, tem);
11282 else if (common == 0)
11284 tem_type = unsigned_type_for (TREE_TYPE (tem));
11285 tem = fold_convert_loc (loc, tem_type, tem);
11287 else
11288 tem = NULL;
11291 if (tem)
11292 return
11293 fold_convert_loc (loc, type,
11294 fold_build2_loc (loc, BIT_AND_EXPR,
11295 TREE_TYPE (tem), tem,
11296 fold_convert_loc (loc,
11297 TREE_TYPE (tem),
11298 arg1)));
11301 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11302 already handled above. */
11303 if (TREE_CODE (arg0) == BIT_AND_EXPR
11304 && integer_onep (TREE_OPERAND (arg0, 1))
11305 && integer_zerop (op2)
11306 && integer_pow2p (arg1))
11308 tree tem = TREE_OPERAND (arg0, 0);
11309 STRIP_NOPS (tem);
11310 if (TREE_CODE (tem) == RSHIFT_EXPR
11311 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11312 && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11313 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11314 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11315 fold_convert_loc (loc, type,
11316 TREE_OPERAND (tem, 0)),
11317 op1);
11320 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11321 is probably obsolete because the first operand should be a
11322 truth value (that's why we have the two cases above), but let's
11323 leave it in until we can confirm this for all front-ends. */
11324 if (integer_zerop (op2)
11325 && TREE_CODE (arg0) == NE_EXPR
11326 && integer_zerop (TREE_OPERAND (arg0, 1))
11327 && integer_pow2p (arg1)
11328 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11329 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11330 arg1, OEP_ONLY_CONST))
11331 return pedantic_non_lvalue_loc (loc,
11332 fold_convert_loc (loc, type,
11333 TREE_OPERAND (arg0, 0)));
11335 /* Disable the transformations below for vectors, since
11336 fold_binary_op_with_conditional_arg may undo them immediately,
11337 yielding an infinite loop. */
11338 if (code == VEC_COND_EXPR)
11339 return NULL_TREE;
11341 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11342 if (integer_zerop (op2)
11343 && truth_value_p (TREE_CODE (arg0))
11344 && truth_value_p (TREE_CODE (arg1))
11345 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11346 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11347 : TRUTH_ANDIF_EXPR,
11348 type, fold_convert_loc (loc, type, arg0), op1);
11350 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11351 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11352 && truth_value_p (TREE_CODE (arg0))
11353 && truth_value_p (TREE_CODE (arg1))
11354 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11356 location_t loc0 = expr_location_or (arg0, loc);
11357 /* Only perform transformation if ARG0 is easily inverted. */
11358 tem = fold_invert_truthvalue (loc0, arg0);
11359 if (tem)
11360 return fold_build2_loc (loc, code == VEC_COND_EXPR
11361 ? BIT_IOR_EXPR
11362 : TRUTH_ORIF_EXPR,
11363 type, fold_convert_loc (loc, type, tem),
11364 op1);
11367 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11368 if (integer_zerop (arg1)
11369 && truth_value_p (TREE_CODE (arg0))
11370 && truth_value_p (TREE_CODE (op2))
11371 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11373 location_t loc0 = expr_location_or (arg0, loc);
11374 /* Only perform transformation if ARG0 is easily inverted. */
11375 tem = fold_invert_truthvalue (loc0, arg0);
11376 if (tem)
11377 return fold_build2_loc (loc, code == VEC_COND_EXPR
11378 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11379 type, fold_convert_loc (loc, type, tem),
11380 op2);
11383 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11384 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11385 && truth_value_p (TREE_CODE (arg0))
11386 && truth_value_p (TREE_CODE (op2))
11387 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11388 return fold_build2_loc (loc, code == VEC_COND_EXPR
11389 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11390 type, fold_convert_loc (loc, type, arg0), op2);
11392 return NULL_TREE;
11394 case CALL_EXPR:
11395 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11396 of fold_ternary on them. */
11397 gcc_unreachable ();
11399 case BIT_FIELD_REF:
11400 if (TREE_CODE (arg0) == VECTOR_CST
11401 && (type == TREE_TYPE (TREE_TYPE (arg0))
11402 || (TREE_CODE (type) == VECTOR_TYPE
11403 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11405 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11406 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11407 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11408 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11410 if (n != 0
11411 && (idx % width) == 0
11412 && (n % width) == 0
11413 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11415 idx = idx / width;
11416 n = n / width;
11418 if (TREE_CODE (arg0) == VECTOR_CST)
11420 if (n == 1)
11421 return VECTOR_CST_ELT (arg0, idx);
11423 auto_vec<tree, 32> vals (n);
11424 for (unsigned i = 0; i < n; ++i)
11425 vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11426 return build_vector (type, vals);
11431 /* On constants we can use native encode/interpret to constant
11432 fold (nearly) all BIT_FIELD_REFs. */
11433 if (CONSTANT_CLASS_P (arg0)
11434 && can_native_interpret_type_p (type)
11435 && BITS_PER_UNIT == 8)
11437 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11438 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11439 /* Limit us to a reasonable amount of work. To relax the
11440 other limitations we need bit-shifting of the buffer
11441 and rounding up the size. */
11442 if (bitpos % BITS_PER_UNIT == 0
11443 && bitsize % BITS_PER_UNIT == 0
11444 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11446 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11447 unsigned HOST_WIDE_INT len
11448 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11449 bitpos / BITS_PER_UNIT);
11450 if (len > 0
11451 && len * BITS_PER_UNIT >= bitsize)
11453 tree v = native_interpret_expr (type, b,
11454 bitsize / BITS_PER_UNIT);
11455 if (v)
11456 return v;
11461 return NULL_TREE;
11463 case FMA_EXPR:
11464 /* For integers we can decompose the FMA if possible. */
11465 if (TREE_CODE (arg0) == INTEGER_CST
11466 && TREE_CODE (arg1) == INTEGER_CST)
11467 return fold_build2_loc (loc, PLUS_EXPR, type,
11468 const_binop (MULT_EXPR, arg0, arg1), arg2);
11469 if (integer_zerop (arg2))
11470 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11472 return fold_fma (loc, type, arg0, arg1, arg2);
11474 case VEC_PERM_EXPR:
11475 if (TREE_CODE (arg2) == VECTOR_CST)
11477 unsigned int nelts = VECTOR_CST_NELTS (arg2), i, mask, mask2;
11478 bool need_mask_canon = false;
11479 bool need_mask_canon2 = false;
11480 bool all_in_vec0 = true;
11481 bool all_in_vec1 = true;
11482 bool maybe_identity = true;
11483 bool single_arg = (op0 == op1);
11484 bool changed = false;
11486 mask2 = 2 * nelts - 1;
11487 mask = single_arg ? (nelts - 1) : mask2;
11488 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (type));
11489 auto_vec_perm_indices sel (nelts);
11490 auto_vec_perm_indices sel2 (nelts);
11491 for (i = 0; i < nelts; i++)
11493 tree val = VECTOR_CST_ELT (arg2, i);
11494 if (TREE_CODE (val) != INTEGER_CST)
11495 return NULL_TREE;
11497 /* Make sure that the perm value is in an acceptable
11498 range. */
11499 wi::tree_to_wide_ref t = wi::to_wide (val);
11500 need_mask_canon |= wi::gtu_p (t, mask);
11501 need_mask_canon2 |= wi::gtu_p (t, mask2);
11502 unsigned int elt = t.to_uhwi () & mask;
11503 unsigned int elt2 = t.to_uhwi () & mask2;
11505 if (elt < nelts)
11506 all_in_vec1 = false;
11507 else
11508 all_in_vec0 = false;
11510 if ((elt & (nelts - 1)) != i)
11511 maybe_identity = false;
11513 sel.quick_push (elt);
11514 sel2.quick_push (elt2);
11517 if (maybe_identity)
11519 if (all_in_vec0)
11520 return op0;
11521 if (all_in_vec1)
11522 return op1;
11525 if (all_in_vec0)
11526 op1 = op0;
11527 else if (all_in_vec1)
11529 op0 = op1;
11530 for (i = 0; i < nelts; i++)
11531 sel[i] -= nelts;
11532 need_mask_canon = true;
11535 if ((TREE_CODE (op0) == VECTOR_CST
11536 || TREE_CODE (op0) == CONSTRUCTOR)
11537 && (TREE_CODE (op1) == VECTOR_CST
11538 || TREE_CODE (op1) == CONSTRUCTOR))
11540 tree t = fold_vec_perm (type, op0, op1, sel);
11541 if (t != NULL_TREE)
11542 return t;
11545 if (op0 == op1 && !single_arg)
11546 changed = true;
11548 /* Some targets are deficient and fail to expand a single
11549 argument permutation while still allowing an equivalent
11550 2-argument version. */
11551 if (need_mask_canon && arg2 == op2
11552 && !can_vec_perm_p (TYPE_MODE (type), false, &sel)
11553 && can_vec_perm_p (TYPE_MODE (type), false, &sel2))
11555 need_mask_canon = need_mask_canon2;
11556 sel = sel2;
11559 if (need_mask_canon && arg2 == op2)
11561 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11562 auto_vec<tree, 32> tsel (nelts);
11563 for (i = 0; i < nelts; i++)
11564 tsel.quick_push (build_int_cst (eltype, sel[i]));
11565 op2 = build_vector (TREE_TYPE (arg2), tsel);
11566 changed = true;
11569 if (changed)
11570 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11572 return NULL_TREE;
11574 case BIT_INSERT_EXPR:
11575 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11576 if (TREE_CODE (arg0) == INTEGER_CST
11577 && TREE_CODE (arg1) == INTEGER_CST)
11579 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11580 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11581 wide_int tem = (wi::to_wide (arg0)
11582 & wi::shifted_mask (bitpos, bitsize, true,
11583 TYPE_PRECISION (type)));
11584 wide_int tem2
11585 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11586 bitsize), bitpos);
11587 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11589 else if (TREE_CODE (arg0) == VECTOR_CST
11590 && CONSTANT_CLASS_P (arg1)
11591 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11592 TREE_TYPE (arg1)))
11594 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11595 unsigned HOST_WIDE_INT elsize
11596 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11597 if (bitpos % elsize == 0)
11599 unsigned k = bitpos / elsize;
11600 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11601 return arg0;
11602 else
11604 unsigned int nelts = VECTOR_CST_NELTS (arg0);
11605 auto_vec<tree, 32> elts (nelts);
11606 elts.quick_grow (nelts);
11607 memcpy (&elts[0], VECTOR_CST_ELTS (arg0),
11608 sizeof (tree) * nelts);
11609 elts[k] = arg1;
11610 return build_vector (type, elts);
11614 return NULL_TREE;
11616 default:
11617 return NULL_TREE;
11618 } /* switch (code) */
11621 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11622 of an array (or vector). */
11624 tree
11625 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11627 tree index_type = NULL_TREE;
11628 offset_int low_bound = 0;
11630 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11632 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11633 if (domain_type && TYPE_MIN_VALUE (domain_type))
11635 /* Static constructors for variably sized objects makes no sense. */
11636 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11637 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11638 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11642 if (index_type)
11643 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11644 TYPE_SIGN (index_type));
11646 offset_int index = low_bound - 1;
11647 if (index_type)
11648 index = wi::ext (index, TYPE_PRECISION (index_type),
11649 TYPE_SIGN (index_type));
11651 offset_int max_index;
11652 unsigned HOST_WIDE_INT cnt;
11653 tree cfield, cval;
11655 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11657 /* Array constructor might explicitly set index, or specify a range,
11658 or leave index NULL meaning that it is next index after previous
11659 one. */
11660 if (cfield)
11662 if (TREE_CODE (cfield) == INTEGER_CST)
11663 max_index = index = wi::to_offset (cfield);
11664 else
11666 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11667 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11668 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11671 else
11673 index += 1;
11674 if (index_type)
11675 index = wi::ext (index, TYPE_PRECISION (index_type),
11676 TYPE_SIGN (index_type));
11677 max_index = index;
11680 /* Do we have match? */
11681 if (wi::cmpu (access_index, index) >= 0
11682 && wi::cmpu (access_index, max_index) <= 0)
11683 return cval;
11685 return NULL_TREE;
11688 /* Perform constant folding and related simplification of EXPR.
11689 The related simplifications include x*1 => x, x*0 => 0, etc.,
11690 and application of the associative law.
11691 NOP_EXPR conversions may be removed freely (as long as we
11692 are careful not to change the type of the overall expression).
11693 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11694 but we can constant-fold them if they have constant operands. */
11696 #ifdef ENABLE_FOLD_CHECKING
11697 # define fold(x) fold_1 (x)
11698 static tree fold_1 (tree);
11699 static
11700 #endif
11701 tree
11702 fold (tree expr)
11704 const tree t = expr;
11705 enum tree_code code = TREE_CODE (t);
11706 enum tree_code_class kind = TREE_CODE_CLASS (code);
11707 tree tem;
11708 location_t loc = EXPR_LOCATION (expr);
11710 /* Return right away if a constant. */
11711 if (kind == tcc_constant)
11712 return t;
11714 /* CALL_EXPR-like objects with variable numbers of operands are
11715 treated specially. */
11716 if (kind == tcc_vl_exp)
11718 if (code == CALL_EXPR)
11720 tem = fold_call_expr (loc, expr, false);
11721 return tem ? tem : expr;
11723 return expr;
11726 if (IS_EXPR_CODE_CLASS (kind))
11728 tree type = TREE_TYPE (t);
11729 tree op0, op1, op2;
11731 switch (TREE_CODE_LENGTH (code))
11733 case 1:
11734 op0 = TREE_OPERAND (t, 0);
11735 tem = fold_unary_loc (loc, code, type, op0);
11736 return tem ? tem : expr;
11737 case 2:
11738 op0 = TREE_OPERAND (t, 0);
11739 op1 = TREE_OPERAND (t, 1);
11740 tem = fold_binary_loc (loc, code, type, op0, op1);
11741 return tem ? tem : expr;
11742 case 3:
11743 op0 = TREE_OPERAND (t, 0);
11744 op1 = TREE_OPERAND (t, 1);
11745 op2 = TREE_OPERAND (t, 2);
11746 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11747 return tem ? tem : expr;
11748 default:
11749 break;
11753 switch (code)
11755 case ARRAY_REF:
11757 tree op0 = TREE_OPERAND (t, 0);
11758 tree op1 = TREE_OPERAND (t, 1);
11760 if (TREE_CODE (op1) == INTEGER_CST
11761 && TREE_CODE (op0) == CONSTRUCTOR
11762 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11764 tree val = get_array_ctor_element_at_index (op0,
11765 wi::to_offset (op1));
11766 if (val)
11767 return val;
11770 return t;
11773 /* Return a VECTOR_CST if possible. */
11774 case CONSTRUCTOR:
11776 tree type = TREE_TYPE (t);
11777 if (TREE_CODE (type) != VECTOR_TYPE)
11778 return t;
11780 unsigned i;
11781 tree val;
11782 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11783 if (! CONSTANT_CLASS_P (val))
11784 return t;
11786 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11789 case CONST_DECL:
11790 return fold (DECL_INITIAL (t));
11792 default:
11793 return t;
11794 } /* switch (code) */
11797 #ifdef ENABLE_FOLD_CHECKING
11798 #undef fold
11800 static void fold_checksum_tree (const_tree, struct md5_ctx *,
11801 hash_table<nofree_ptr_hash<const tree_node> > *);
11802 static void fold_check_failed (const_tree, const_tree);
11803 void print_fold_checksum (const_tree);
11805 /* When --enable-checking=fold, compute a digest of expr before
11806 and after actual fold call to see if fold did not accidentally
11807 change original expr. */
11809 tree
11810 fold (tree expr)
11812 tree ret;
11813 struct md5_ctx ctx;
11814 unsigned char checksum_before[16], checksum_after[16];
11815 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11817 md5_init_ctx (&ctx);
11818 fold_checksum_tree (expr, &ctx, &ht);
11819 md5_finish_ctx (&ctx, checksum_before);
11820 ht.empty ();
11822 ret = fold_1 (expr);
11824 md5_init_ctx (&ctx);
11825 fold_checksum_tree (expr, &ctx, &ht);
11826 md5_finish_ctx (&ctx, checksum_after);
11828 if (memcmp (checksum_before, checksum_after, 16))
11829 fold_check_failed (expr, ret);
11831 return ret;
11834 void
11835 print_fold_checksum (const_tree expr)
11837 struct md5_ctx ctx;
11838 unsigned char checksum[16], cnt;
11839 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
11841 md5_init_ctx (&ctx);
11842 fold_checksum_tree (expr, &ctx, &ht);
11843 md5_finish_ctx (&ctx, checksum);
11844 for (cnt = 0; cnt < 16; ++cnt)
11845 fprintf (stderr, "%02x", checksum[cnt]);
11846 putc ('\n', stderr);
11849 static void
11850 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
11852 internal_error ("fold check: original tree changed by fold");
11855 static void
11856 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
11857 hash_table<nofree_ptr_hash <const tree_node> > *ht)
11859 const tree_node **slot;
11860 enum tree_code code;
11861 union tree_node buf;
11862 int i, len;
11864 recursive_label:
11865 if (expr == NULL)
11866 return;
11867 slot = ht->find_slot (expr, INSERT);
11868 if (*slot != NULL)
11869 return;
11870 *slot = expr;
11871 code = TREE_CODE (expr);
11872 if (TREE_CODE_CLASS (code) == tcc_declaration
11873 && HAS_DECL_ASSEMBLER_NAME_P (expr))
11875 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
11876 memcpy ((char *) &buf, expr, tree_size (expr));
11877 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
11878 buf.decl_with_vis.symtab_node = NULL;
11879 expr = (tree) &buf;
11881 else if (TREE_CODE_CLASS (code) == tcc_type
11882 && (TYPE_POINTER_TO (expr)
11883 || TYPE_REFERENCE_TO (expr)
11884 || TYPE_CACHED_VALUES_P (expr)
11885 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
11886 || TYPE_NEXT_VARIANT (expr)
11887 || TYPE_ALIAS_SET_KNOWN_P (expr)))
11889 /* Allow these fields to be modified. */
11890 tree tmp;
11891 memcpy ((char *) &buf, expr, tree_size (expr));
11892 expr = tmp = (tree) &buf;
11893 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
11894 TYPE_POINTER_TO (tmp) = NULL;
11895 TYPE_REFERENCE_TO (tmp) = NULL;
11896 TYPE_NEXT_VARIANT (tmp) = NULL;
11897 TYPE_ALIAS_SET (tmp) = -1;
11898 if (TYPE_CACHED_VALUES_P (tmp))
11900 TYPE_CACHED_VALUES_P (tmp) = 0;
11901 TYPE_CACHED_VALUES (tmp) = NULL;
11904 md5_process_bytes (expr, tree_size (expr), ctx);
11905 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
11906 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11907 if (TREE_CODE_CLASS (code) != tcc_type
11908 && TREE_CODE_CLASS (code) != tcc_declaration
11909 && code != TREE_LIST
11910 && code != SSA_NAME
11911 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
11912 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11913 switch (TREE_CODE_CLASS (code))
11915 case tcc_constant:
11916 switch (code)
11918 case STRING_CST:
11919 md5_process_bytes (TREE_STRING_POINTER (expr),
11920 TREE_STRING_LENGTH (expr), ctx);
11921 break;
11922 case COMPLEX_CST:
11923 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11924 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11925 break;
11926 case VECTOR_CST:
11927 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
11928 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
11929 break;
11930 default:
11931 break;
11933 break;
11934 case tcc_exceptional:
11935 switch (code)
11937 case TREE_LIST:
11938 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11939 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11940 expr = TREE_CHAIN (expr);
11941 goto recursive_label;
11942 break;
11943 case TREE_VEC:
11944 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11945 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11946 break;
11947 default:
11948 break;
11950 break;
11951 case tcc_expression:
11952 case tcc_reference:
11953 case tcc_comparison:
11954 case tcc_unary:
11955 case tcc_binary:
11956 case tcc_statement:
11957 case tcc_vl_exp:
11958 len = TREE_OPERAND_LENGTH (expr);
11959 for (i = 0; i < len; ++i)
11960 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11961 break;
11962 case tcc_declaration:
11963 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11964 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11965 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11967 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11968 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11969 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11970 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11971 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11974 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11976 if (TREE_CODE (expr) == FUNCTION_DECL)
11978 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11979 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
11981 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11983 break;
11984 case tcc_type:
11985 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11986 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11987 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11988 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11989 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11990 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11991 if (INTEGRAL_TYPE_P (expr)
11992 || SCALAR_FLOAT_TYPE_P (expr))
11994 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11995 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11997 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11998 if (TREE_CODE (expr) == RECORD_TYPE
11999 || TREE_CODE (expr) == UNION_TYPE
12000 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12001 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12002 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12003 break;
12004 default:
12005 break;
12009 /* Helper function for outputting the checksum of a tree T. When
12010 debugging with gdb, you can "define mynext" to be "next" followed
12011 by "call debug_fold_checksum (op0)", then just trace down till the
12012 outputs differ. */
12014 DEBUG_FUNCTION void
12015 debug_fold_checksum (const_tree t)
12017 int i;
12018 unsigned char checksum[16];
12019 struct md5_ctx ctx;
12020 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12022 md5_init_ctx (&ctx);
12023 fold_checksum_tree (t, &ctx, &ht);
12024 md5_finish_ctx (&ctx, checksum);
12025 ht.empty ();
12027 for (i = 0; i < 16; i++)
12028 fprintf (stderr, "%d ", checksum[i]);
12030 fprintf (stderr, "\n");
12033 #endif
12035 /* Fold a unary tree expression with code CODE of type TYPE with an
12036 operand OP0. LOC is the location of the resulting expression.
12037 Return a folded expression if successful. Otherwise, return a tree
12038 expression with code CODE of type TYPE with an operand OP0. */
12040 tree
12041 fold_build1_loc (location_t loc,
12042 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12044 tree tem;
12045 #ifdef ENABLE_FOLD_CHECKING
12046 unsigned char checksum_before[16], checksum_after[16];
12047 struct md5_ctx ctx;
12048 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12050 md5_init_ctx (&ctx);
12051 fold_checksum_tree (op0, &ctx, &ht);
12052 md5_finish_ctx (&ctx, checksum_before);
12053 ht.empty ();
12054 #endif
12056 tem = fold_unary_loc (loc, code, type, op0);
12057 if (!tem)
12058 tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12060 #ifdef ENABLE_FOLD_CHECKING
12061 md5_init_ctx (&ctx);
12062 fold_checksum_tree (op0, &ctx, &ht);
12063 md5_finish_ctx (&ctx, checksum_after);
12065 if (memcmp (checksum_before, checksum_after, 16))
12066 fold_check_failed (op0, tem);
12067 #endif
12068 return tem;
12071 /* Fold a binary tree expression with code CODE of type TYPE with
12072 operands OP0 and OP1. LOC is the location of the resulting
12073 expression. Return a folded expression if successful. Otherwise,
12074 return a tree expression with code CODE of type TYPE with operands
12075 OP0 and OP1. */
12077 tree
12078 fold_build2_loc (location_t loc,
12079 enum tree_code code, tree type, tree op0, tree op1
12080 MEM_STAT_DECL)
12082 tree tem;
12083 #ifdef ENABLE_FOLD_CHECKING
12084 unsigned char checksum_before_op0[16],
12085 checksum_before_op1[16],
12086 checksum_after_op0[16],
12087 checksum_after_op1[16];
12088 struct md5_ctx ctx;
12089 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12091 md5_init_ctx (&ctx);
12092 fold_checksum_tree (op0, &ctx, &ht);
12093 md5_finish_ctx (&ctx, checksum_before_op0);
12094 ht.empty ();
12096 md5_init_ctx (&ctx);
12097 fold_checksum_tree (op1, &ctx, &ht);
12098 md5_finish_ctx (&ctx, checksum_before_op1);
12099 ht.empty ();
12100 #endif
12102 tem = fold_binary_loc (loc, code, type, op0, op1);
12103 if (!tem)
12104 tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12106 #ifdef ENABLE_FOLD_CHECKING
12107 md5_init_ctx (&ctx);
12108 fold_checksum_tree (op0, &ctx, &ht);
12109 md5_finish_ctx (&ctx, checksum_after_op0);
12110 ht.empty ();
12112 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12113 fold_check_failed (op0, tem);
12115 md5_init_ctx (&ctx);
12116 fold_checksum_tree (op1, &ctx, &ht);
12117 md5_finish_ctx (&ctx, checksum_after_op1);
12119 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12120 fold_check_failed (op1, tem);
12121 #endif
12122 return tem;
12125 /* Fold a ternary tree expression with code CODE of type TYPE with
12126 operands OP0, OP1, and OP2. Return a folded expression if
12127 successful. Otherwise, return a tree expression with code CODE of
12128 type TYPE with operands OP0, OP1, and OP2. */
12130 tree
12131 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12132 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12134 tree tem;
12135 #ifdef ENABLE_FOLD_CHECKING
12136 unsigned char checksum_before_op0[16],
12137 checksum_before_op1[16],
12138 checksum_before_op2[16],
12139 checksum_after_op0[16],
12140 checksum_after_op1[16],
12141 checksum_after_op2[16];
12142 struct md5_ctx ctx;
12143 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12145 md5_init_ctx (&ctx);
12146 fold_checksum_tree (op0, &ctx, &ht);
12147 md5_finish_ctx (&ctx, checksum_before_op0);
12148 ht.empty ();
12150 md5_init_ctx (&ctx);
12151 fold_checksum_tree (op1, &ctx, &ht);
12152 md5_finish_ctx (&ctx, checksum_before_op1);
12153 ht.empty ();
12155 md5_init_ctx (&ctx);
12156 fold_checksum_tree (op2, &ctx, &ht);
12157 md5_finish_ctx (&ctx, checksum_before_op2);
12158 ht.empty ();
12159 #endif
12161 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12162 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12163 if (!tem)
12164 tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12166 #ifdef ENABLE_FOLD_CHECKING
12167 md5_init_ctx (&ctx);
12168 fold_checksum_tree (op0, &ctx, &ht);
12169 md5_finish_ctx (&ctx, checksum_after_op0);
12170 ht.empty ();
12172 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12173 fold_check_failed (op0, tem);
12175 md5_init_ctx (&ctx);
12176 fold_checksum_tree (op1, &ctx, &ht);
12177 md5_finish_ctx (&ctx, checksum_after_op1);
12178 ht.empty ();
12180 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12181 fold_check_failed (op1, tem);
12183 md5_init_ctx (&ctx);
12184 fold_checksum_tree (op2, &ctx, &ht);
12185 md5_finish_ctx (&ctx, checksum_after_op2);
12187 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12188 fold_check_failed (op2, tem);
12189 #endif
12190 return tem;
12193 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12194 arguments in ARGARRAY, and a null static chain.
12195 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12196 of type TYPE from the given operands as constructed by build_call_array. */
12198 tree
12199 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12200 int nargs, tree *argarray)
12202 tree tem;
12203 #ifdef ENABLE_FOLD_CHECKING
12204 unsigned char checksum_before_fn[16],
12205 checksum_before_arglist[16],
12206 checksum_after_fn[16],
12207 checksum_after_arglist[16];
12208 struct md5_ctx ctx;
12209 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12210 int i;
12212 md5_init_ctx (&ctx);
12213 fold_checksum_tree (fn, &ctx, &ht);
12214 md5_finish_ctx (&ctx, checksum_before_fn);
12215 ht.empty ();
12217 md5_init_ctx (&ctx);
12218 for (i = 0; i < nargs; i++)
12219 fold_checksum_tree (argarray[i], &ctx, &ht);
12220 md5_finish_ctx (&ctx, checksum_before_arglist);
12221 ht.empty ();
12222 #endif
12224 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12225 if (!tem)
12226 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12228 #ifdef ENABLE_FOLD_CHECKING
12229 md5_init_ctx (&ctx);
12230 fold_checksum_tree (fn, &ctx, &ht);
12231 md5_finish_ctx (&ctx, checksum_after_fn);
12232 ht.empty ();
12234 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12235 fold_check_failed (fn, tem);
12237 md5_init_ctx (&ctx);
12238 for (i = 0; i < nargs; i++)
12239 fold_checksum_tree (argarray[i], &ctx, &ht);
12240 md5_finish_ctx (&ctx, checksum_after_arglist);
12242 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12243 fold_check_failed (NULL_TREE, tem);
12244 #endif
12245 return tem;
12248 /* Perform constant folding and related simplification of initializer
12249 expression EXPR. These behave identically to "fold_buildN" but ignore
12250 potential run-time traps and exceptions that fold must preserve. */
12252 #define START_FOLD_INIT \
12253 int saved_signaling_nans = flag_signaling_nans;\
12254 int saved_trapping_math = flag_trapping_math;\
12255 int saved_rounding_math = flag_rounding_math;\
12256 int saved_trapv = flag_trapv;\
12257 int saved_folding_initializer = folding_initializer;\
12258 flag_signaling_nans = 0;\
12259 flag_trapping_math = 0;\
12260 flag_rounding_math = 0;\
12261 flag_trapv = 0;\
12262 folding_initializer = 1;
12264 #define END_FOLD_INIT \
12265 flag_signaling_nans = saved_signaling_nans;\
12266 flag_trapping_math = saved_trapping_math;\
12267 flag_rounding_math = saved_rounding_math;\
12268 flag_trapv = saved_trapv;\
12269 folding_initializer = saved_folding_initializer;
12271 tree
12272 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12273 tree type, tree op)
12275 tree result;
12276 START_FOLD_INIT;
12278 result = fold_build1_loc (loc, code, type, op);
12280 END_FOLD_INIT;
12281 return result;
12284 tree
12285 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12286 tree type, tree op0, tree op1)
12288 tree result;
12289 START_FOLD_INIT;
12291 result = fold_build2_loc (loc, code, type, op0, op1);
12293 END_FOLD_INIT;
12294 return result;
12297 tree
12298 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12299 int nargs, tree *argarray)
12301 tree result;
12302 START_FOLD_INIT;
12304 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12306 END_FOLD_INIT;
12307 return result;
12310 #undef START_FOLD_INIT
12311 #undef END_FOLD_INIT
12313 /* Determine if first argument is a multiple of second argument. Return 0 if
12314 it is not, or we cannot easily determined it to be.
12316 An example of the sort of thing we care about (at this point; this routine
12317 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12318 fold cases do now) is discovering that
12320 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12322 is a multiple of
12324 SAVE_EXPR (J * 8)
12326 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12328 This code also handles discovering that
12330 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12332 is a multiple of 8 so we don't have to worry about dealing with a
12333 possible remainder.
12335 Note that we *look* inside a SAVE_EXPR only to determine how it was
12336 calculated; it is not safe for fold to do much of anything else with the
12337 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12338 at run time. For example, the latter example above *cannot* be implemented
12339 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12340 evaluation time of the original SAVE_EXPR is not necessarily the same at
12341 the time the new expression is evaluated. The only optimization of this
12342 sort that would be valid is changing
12344 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12346 divided by 8 to
12348 SAVE_EXPR (I) * SAVE_EXPR (J)
12350 (where the same SAVE_EXPR (J) is used in the original and the
12351 transformed version). */
12354 multiple_of_p (tree type, const_tree top, const_tree bottom)
12356 gimple *stmt;
12357 tree t1, op1, op2;
12359 if (operand_equal_p (top, bottom, 0))
12360 return 1;
12362 if (TREE_CODE (type) != INTEGER_TYPE)
12363 return 0;
12365 switch (TREE_CODE (top))
12367 case BIT_AND_EXPR:
12368 /* Bitwise and provides a power of two multiple. If the mask is
12369 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12370 if (!integer_pow2p (bottom))
12371 return 0;
12372 /* FALLTHRU */
12374 case MULT_EXPR:
12375 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12376 || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12378 case MINUS_EXPR:
12379 /* It is impossible to prove if op0 - op1 is multiple of bottom
12380 precisely, so be conservative here checking if both op0 and op1
12381 are multiple of bottom. Note we check the second operand first
12382 since it's usually simpler. */
12383 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12384 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12386 case PLUS_EXPR:
12387 /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12388 as op0 - 3 if the expression has unsigned type. For example,
12389 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not. */
12390 op1 = TREE_OPERAND (top, 1);
12391 if (TYPE_UNSIGNED (type)
12392 && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12393 op1 = fold_build1 (NEGATE_EXPR, type, op1);
12394 return (multiple_of_p (type, op1, bottom)
12395 && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12397 case LSHIFT_EXPR:
12398 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12400 op1 = TREE_OPERAND (top, 1);
12401 /* const_binop may not detect overflow correctly,
12402 so check for it explicitly here. */
12403 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12404 wi::to_wide (op1))
12405 && 0 != (t1 = fold_convert (type,
12406 const_binop (LSHIFT_EXPR,
12407 size_one_node,
12408 op1)))
12409 && !TREE_OVERFLOW (t1))
12410 return multiple_of_p (type, t1, bottom);
12412 return 0;
12414 case NOP_EXPR:
12415 /* Can't handle conversions from non-integral or wider integral type. */
12416 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12417 || (TYPE_PRECISION (type)
12418 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12419 return 0;
12421 /* fall through */
12423 case SAVE_EXPR:
12424 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12426 case COND_EXPR:
12427 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12428 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12430 case INTEGER_CST:
12431 if (TREE_CODE (bottom) != INTEGER_CST
12432 || integer_zerop (bottom)
12433 || (TYPE_UNSIGNED (type)
12434 && (tree_int_cst_sgn (top) < 0
12435 || tree_int_cst_sgn (bottom) < 0)))
12436 return 0;
12437 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12438 SIGNED);
12440 case SSA_NAME:
12441 if (TREE_CODE (bottom) == INTEGER_CST
12442 && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12443 && gimple_code (stmt) == GIMPLE_ASSIGN)
12445 enum tree_code code = gimple_assign_rhs_code (stmt);
12447 /* Check for special cases to see if top is defined as multiple
12448 of bottom:
12450 top = (X & ~(bottom - 1) ; bottom is power of 2
12454 Y = X % bottom
12455 top = X - Y. */
12456 if (code == BIT_AND_EXPR
12457 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12458 && TREE_CODE (op2) == INTEGER_CST
12459 && integer_pow2p (bottom)
12460 && wi::multiple_of_p (wi::to_widest (op2),
12461 wi::to_widest (bottom), UNSIGNED))
12462 return 1;
12464 op1 = gimple_assign_rhs1 (stmt);
12465 if (code == MINUS_EXPR
12466 && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12467 && TREE_CODE (op2) == SSA_NAME
12468 && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12469 && gimple_code (stmt) == GIMPLE_ASSIGN
12470 && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12471 && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12472 && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12473 return 1;
12476 /* fall through */
12478 default:
12479 return 0;
12483 #define tree_expr_nonnegative_warnv_p(X, Y) \
12484 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12486 #define RECURSE(X) \
12487 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12489 /* Return true if CODE or TYPE is known to be non-negative. */
12491 static bool
12492 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12494 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12495 && truth_value_p (code))
12496 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12497 have a signed:1 type (where the value is -1 and 0). */
12498 return true;
12499 return false;
12502 /* Return true if (CODE OP0) is known to be non-negative. If the return
12503 value is based on the assumption that signed overflow is undefined,
12504 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12505 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12507 bool
12508 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12509 bool *strict_overflow_p, int depth)
12511 if (TYPE_UNSIGNED (type))
12512 return true;
12514 switch (code)
12516 case ABS_EXPR:
12517 /* We can't return 1 if flag_wrapv is set because
12518 ABS_EXPR<INT_MIN> = INT_MIN. */
12519 if (!ANY_INTEGRAL_TYPE_P (type))
12520 return true;
12521 if (TYPE_OVERFLOW_UNDEFINED (type))
12523 *strict_overflow_p = true;
12524 return true;
12526 break;
12528 case NON_LVALUE_EXPR:
12529 case FLOAT_EXPR:
12530 case FIX_TRUNC_EXPR:
12531 return RECURSE (op0);
12533 CASE_CONVERT:
12535 tree inner_type = TREE_TYPE (op0);
12536 tree outer_type = type;
12538 if (TREE_CODE (outer_type) == REAL_TYPE)
12540 if (TREE_CODE (inner_type) == REAL_TYPE)
12541 return RECURSE (op0);
12542 if (INTEGRAL_TYPE_P (inner_type))
12544 if (TYPE_UNSIGNED (inner_type))
12545 return true;
12546 return RECURSE (op0);
12549 else if (INTEGRAL_TYPE_P (outer_type))
12551 if (TREE_CODE (inner_type) == REAL_TYPE)
12552 return RECURSE (op0);
12553 if (INTEGRAL_TYPE_P (inner_type))
12554 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12555 && TYPE_UNSIGNED (inner_type);
12558 break;
12560 default:
12561 return tree_simple_nonnegative_warnv_p (code, type);
12564 /* We don't know sign of `t', so be conservative and return false. */
12565 return false;
12568 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12569 value is based on the assumption that signed overflow is undefined,
12570 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12571 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12573 bool
12574 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12575 tree op1, bool *strict_overflow_p,
12576 int depth)
12578 if (TYPE_UNSIGNED (type))
12579 return true;
12581 switch (code)
12583 case POINTER_PLUS_EXPR:
12584 case PLUS_EXPR:
12585 if (FLOAT_TYPE_P (type))
12586 return RECURSE (op0) && RECURSE (op1);
12588 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12589 both unsigned and at least 2 bits shorter than the result. */
12590 if (TREE_CODE (type) == INTEGER_TYPE
12591 && TREE_CODE (op0) == NOP_EXPR
12592 && TREE_CODE (op1) == NOP_EXPR)
12594 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12595 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12596 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12597 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12599 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12600 TYPE_PRECISION (inner2)) + 1;
12601 return prec < TYPE_PRECISION (type);
12604 break;
12606 case MULT_EXPR:
12607 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12609 /* x * x is always non-negative for floating point x
12610 or without overflow. */
12611 if (operand_equal_p (op0, op1, 0)
12612 || (RECURSE (op0) && RECURSE (op1)))
12614 if (ANY_INTEGRAL_TYPE_P (type)
12615 && TYPE_OVERFLOW_UNDEFINED (type))
12616 *strict_overflow_p = true;
12617 return true;
12621 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12622 both unsigned and their total bits is shorter than the result. */
12623 if (TREE_CODE (type) == INTEGER_TYPE
12624 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12625 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12627 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12628 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12629 : TREE_TYPE (op0);
12630 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12631 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12632 : TREE_TYPE (op1);
12634 bool unsigned0 = TYPE_UNSIGNED (inner0);
12635 bool unsigned1 = TYPE_UNSIGNED (inner1);
12637 if (TREE_CODE (op0) == INTEGER_CST)
12638 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12640 if (TREE_CODE (op1) == INTEGER_CST)
12641 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12643 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12644 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12646 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12647 ? tree_int_cst_min_precision (op0, UNSIGNED)
12648 : TYPE_PRECISION (inner0);
12650 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12651 ? tree_int_cst_min_precision (op1, UNSIGNED)
12652 : TYPE_PRECISION (inner1);
12654 return precision0 + precision1 < TYPE_PRECISION (type);
12657 return false;
12659 case BIT_AND_EXPR:
12660 case MAX_EXPR:
12661 return RECURSE (op0) || RECURSE (op1);
12663 case BIT_IOR_EXPR:
12664 case BIT_XOR_EXPR:
12665 case MIN_EXPR:
12666 case RDIV_EXPR:
12667 case TRUNC_DIV_EXPR:
12668 case CEIL_DIV_EXPR:
12669 case FLOOR_DIV_EXPR:
12670 case ROUND_DIV_EXPR:
12671 return RECURSE (op0) && RECURSE (op1);
12673 case TRUNC_MOD_EXPR:
12674 return RECURSE (op0);
12676 case FLOOR_MOD_EXPR:
12677 return RECURSE (op1);
12679 case CEIL_MOD_EXPR:
12680 case ROUND_MOD_EXPR:
12681 default:
12682 return tree_simple_nonnegative_warnv_p (code, type);
12685 /* We don't know sign of `t', so be conservative and return false. */
12686 return false;
12689 /* Return true if T is known to be non-negative. If the return
12690 value is based on the assumption that signed overflow is undefined,
12691 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12692 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12694 bool
12695 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12697 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12698 return true;
12700 switch (TREE_CODE (t))
12702 case INTEGER_CST:
12703 return tree_int_cst_sgn (t) >= 0;
12705 case REAL_CST:
12706 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12708 case FIXED_CST:
12709 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12711 case COND_EXPR:
12712 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12714 case SSA_NAME:
12715 /* Limit the depth of recursion to avoid quadratic behavior.
12716 This is expected to catch almost all occurrences in practice.
12717 If this code misses important cases that unbounded recursion
12718 would not, passes that need this information could be revised
12719 to provide it through dataflow propagation. */
12720 return (!name_registered_for_update_p (t)
12721 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12722 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12723 strict_overflow_p, depth));
12725 default:
12726 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12730 /* Return true if T is known to be non-negative. If the return
12731 value is based on the assumption that signed overflow is undefined,
12732 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12733 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12735 bool
12736 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12737 bool *strict_overflow_p, int depth)
12739 switch (fn)
12741 CASE_CFN_ACOS:
12742 CASE_CFN_ACOSH:
12743 CASE_CFN_CABS:
12744 CASE_CFN_COSH:
12745 CASE_CFN_ERFC:
12746 CASE_CFN_EXP:
12747 CASE_CFN_EXP10:
12748 CASE_CFN_EXP2:
12749 CASE_CFN_FABS:
12750 CASE_CFN_FDIM:
12751 CASE_CFN_HYPOT:
12752 CASE_CFN_POW10:
12753 CASE_CFN_FFS:
12754 CASE_CFN_PARITY:
12755 CASE_CFN_POPCOUNT:
12756 CASE_CFN_CLZ:
12757 CASE_CFN_CLRSB:
12758 case CFN_BUILT_IN_BSWAP32:
12759 case CFN_BUILT_IN_BSWAP64:
12760 /* Always true. */
12761 return true;
12763 CASE_CFN_SQRT:
12764 /* sqrt(-0.0) is -0.0. */
12765 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
12766 return true;
12767 return RECURSE (arg0);
12769 CASE_CFN_ASINH:
12770 CASE_CFN_ATAN:
12771 CASE_CFN_ATANH:
12772 CASE_CFN_CBRT:
12773 CASE_CFN_CEIL:
12774 CASE_CFN_ERF:
12775 CASE_CFN_EXPM1:
12776 CASE_CFN_FLOOR:
12777 CASE_CFN_FMOD:
12778 CASE_CFN_FREXP:
12779 CASE_CFN_ICEIL:
12780 CASE_CFN_IFLOOR:
12781 CASE_CFN_IRINT:
12782 CASE_CFN_IROUND:
12783 CASE_CFN_LCEIL:
12784 CASE_CFN_LDEXP:
12785 CASE_CFN_LFLOOR:
12786 CASE_CFN_LLCEIL:
12787 CASE_CFN_LLFLOOR:
12788 CASE_CFN_LLRINT:
12789 CASE_CFN_LLROUND:
12790 CASE_CFN_LRINT:
12791 CASE_CFN_LROUND:
12792 CASE_CFN_MODF:
12793 CASE_CFN_NEARBYINT:
12794 CASE_CFN_RINT:
12795 CASE_CFN_ROUND:
12796 CASE_CFN_SCALB:
12797 CASE_CFN_SCALBLN:
12798 CASE_CFN_SCALBN:
12799 CASE_CFN_SIGNBIT:
12800 CASE_CFN_SIGNIFICAND:
12801 CASE_CFN_SINH:
12802 CASE_CFN_TANH:
12803 CASE_CFN_TRUNC:
12804 /* True if the 1st argument is nonnegative. */
12805 return RECURSE (arg0);
12807 CASE_CFN_FMAX:
12808 /* True if the 1st OR 2nd arguments are nonnegative. */
12809 return RECURSE (arg0) || RECURSE (arg1);
12811 CASE_CFN_FMIN:
12812 /* True if the 1st AND 2nd arguments are nonnegative. */
12813 return RECURSE (arg0) && RECURSE (arg1);
12815 CASE_CFN_COPYSIGN:
12816 /* True if the 2nd argument is nonnegative. */
12817 return RECURSE (arg1);
12819 CASE_CFN_POWI:
12820 /* True if the 1st argument is nonnegative or the second
12821 argument is an even integer. */
12822 if (TREE_CODE (arg1) == INTEGER_CST
12823 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
12824 return true;
12825 return RECURSE (arg0);
12827 CASE_CFN_POW:
12828 /* True if the 1st argument is nonnegative or the second
12829 argument is an even integer valued real. */
12830 if (TREE_CODE (arg1) == REAL_CST)
12832 REAL_VALUE_TYPE c;
12833 HOST_WIDE_INT n;
12835 c = TREE_REAL_CST (arg1);
12836 n = real_to_integer (&c);
12837 if ((n & 1) == 0)
12839 REAL_VALUE_TYPE cint;
12840 real_from_integer (&cint, VOIDmode, n, SIGNED);
12841 if (real_identical (&c, &cint))
12842 return true;
12845 return RECURSE (arg0);
12847 default:
12848 break;
12850 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
12853 /* Return true if T is known to be non-negative. If the return
12854 value is based on the assumption that signed overflow is undefined,
12855 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12856 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12858 static bool
12859 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12861 enum tree_code code = TREE_CODE (t);
12862 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12863 return true;
12865 switch (code)
12867 case TARGET_EXPR:
12869 tree temp = TARGET_EXPR_SLOT (t);
12870 t = TARGET_EXPR_INITIAL (t);
12872 /* If the initializer is non-void, then it's a normal expression
12873 that will be assigned to the slot. */
12874 if (!VOID_TYPE_P (t))
12875 return RECURSE (t);
12877 /* Otherwise, the initializer sets the slot in some way. One common
12878 way is an assignment statement at the end of the initializer. */
12879 while (1)
12881 if (TREE_CODE (t) == BIND_EXPR)
12882 t = expr_last (BIND_EXPR_BODY (t));
12883 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12884 || TREE_CODE (t) == TRY_CATCH_EXPR)
12885 t = expr_last (TREE_OPERAND (t, 0));
12886 else if (TREE_CODE (t) == STATEMENT_LIST)
12887 t = expr_last (t);
12888 else
12889 break;
12891 if (TREE_CODE (t) == MODIFY_EXPR
12892 && TREE_OPERAND (t, 0) == temp)
12893 return RECURSE (TREE_OPERAND (t, 1));
12895 return false;
12898 case CALL_EXPR:
12900 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
12901 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
12903 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
12904 get_call_combined_fn (t),
12905 arg0,
12906 arg1,
12907 strict_overflow_p, depth);
12909 case COMPOUND_EXPR:
12910 case MODIFY_EXPR:
12911 return RECURSE (TREE_OPERAND (t, 1));
12913 case BIND_EXPR:
12914 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
12916 case SAVE_EXPR:
12917 return RECURSE (TREE_OPERAND (t, 0));
12919 default:
12920 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12924 #undef RECURSE
12925 #undef tree_expr_nonnegative_warnv_p
12927 /* Return true if T is known to be non-negative. If the return
12928 value is based on the assumption that signed overflow is undefined,
12929 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12930 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12932 bool
12933 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12935 enum tree_code code;
12936 if (t == error_mark_node)
12937 return false;
12939 code = TREE_CODE (t);
12940 switch (TREE_CODE_CLASS (code))
12942 case tcc_binary:
12943 case tcc_comparison:
12944 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12945 TREE_TYPE (t),
12946 TREE_OPERAND (t, 0),
12947 TREE_OPERAND (t, 1),
12948 strict_overflow_p, depth);
12950 case tcc_unary:
12951 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12952 TREE_TYPE (t),
12953 TREE_OPERAND (t, 0),
12954 strict_overflow_p, depth);
12956 case tcc_constant:
12957 case tcc_declaration:
12958 case tcc_reference:
12959 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
12961 default:
12962 break;
12965 switch (code)
12967 case TRUTH_AND_EXPR:
12968 case TRUTH_OR_EXPR:
12969 case TRUTH_XOR_EXPR:
12970 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
12971 TREE_TYPE (t),
12972 TREE_OPERAND (t, 0),
12973 TREE_OPERAND (t, 1),
12974 strict_overflow_p, depth);
12975 case TRUTH_NOT_EXPR:
12976 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
12977 TREE_TYPE (t),
12978 TREE_OPERAND (t, 0),
12979 strict_overflow_p, depth);
12981 case COND_EXPR:
12982 case CONSTRUCTOR:
12983 case OBJ_TYPE_REF:
12984 case ASSERT_EXPR:
12985 case ADDR_EXPR:
12986 case WITH_SIZE_EXPR:
12987 case SSA_NAME:
12988 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
12990 default:
12991 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
12995 /* Return true if `t' is known to be non-negative. Handle warnings
12996 about undefined signed overflow. */
12998 bool
12999 tree_expr_nonnegative_p (tree t)
13001 bool ret, strict_overflow_p;
13003 strict_overflow_p = false;
13004 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13005 if (strict_overflow_p)
13006 fold_overflow_warning (("assuming signed overflow does not occur when "
13007 "determining that expression is always "
13008 "non-negative"),
13009 WARN_STRICT_OVERFLOW_MISC);
13010 return ret;
13014 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13015 For floating point we further ensure that T is not denormal.
13016 Similar logic is present in nonzero_address in rtlanal.h.
13018 If the return value is based on the assumption that signed overflow
13019 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13020 change *STRICT_OVERFLOW_P. */
13022 bool
13023 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13024 bool *strict_overflow_p)
13026 switch (code)
13028 case ABS_EXPR:
13029 return tree_expr_nonzero_warnv_p (op0,
13030 strict_overflow_p);
13032 case NOP_EXPR:
13034 tree inner_type = TREE_TYPE (op0);
13035 tree outer_type = type;
13037 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13038 && tree_expr_nonzero_warnv_p (op0,
13039 strict_overflow_p));
13041 break;
13043 case NON_LVALUE_EXPR:
13044 return tree_expr_nonzero_warnv_p (op0,
13045 strict_overflow_p);
13047 default:
13048 break;
13051 return false;
13054 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13055 For floating point we further ensure that T is not denormal.
13056 Similar logic is present in nonzero_address in rtlanal.h.
13058 If the return value is based on the assumption that signed overflow
13059 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13060 change *STRICT_OVERFLOW_P. */
13062 bool
13063 tree_binary_nonzero_warnv_p (enum tree_code code,
13064 tree type,
13065 tree op0,
13066 tree op1, bool *strict_overflow_p)
13068 bool sub_strict_overflow_p;
13069 switch (code)
13071 case POINTER_PLUS_EXPR:
13072 case PLUS_EXPR:
13073 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13075 /* With the presence of negative values it is hard
13076 to say something. */
13077 sub_strict_overflow_p = false;
13078 if (!tree_expr_nonnegative_warnv_p (op0,
13079 &sub_strict_overflow_p)
13080 || !tree_expr_nonnegative_warnv_p (op1,
13081 &sub_strict_overflow_p))
13082 return false;
13083 /* One of operands must be positive and the other non-negative. */
13084 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13085 overflows, on a twos-complement machine the sum of two
13086 nonnegative numbers can never be zero. */
13087 return (tree_expr_nonzero_warnv_p (op0,
13088 strict_overflow_p)
13089 || tree_expr_nonzero_warnv_p (op1,
13090 strict_overflow_p));
13092 break;
13094 case MULT_EXPR:
13095 if (TYPE_OVERFLOW_UNDEFINED (type))
13097 if (tree_expr_nonzero_warnv_p (op0,
13098 strict_overflow_p)
13099 && tree_expr_nonzero_warnv_p (op1,
13100 strict_overflow_p))
13102 *strict_overflow_p = true;
13103 return true;
13106 break;
13108 case MIN_EXPR:
13109 sub_strict_overflow_p = false;
13110 if (tree_expr_nonzero_warnv_p (op0,
13111 &sub_strict_overflow_p)
13112 && tree_expr_nonzero_warnv_p (op1,
13113 &sub_strict_overflow_p))
13115 if (sub_strict_overflow_p)
13116 *strict_overflow_p = true;
13118 break;
13120 case MAX_EXPR:
13121 sub_strict_overflow_p = false;
13122 if (tree_expr_nonzero_warnv_p (op0,
13123 &sub_strict_overflow_p))
13125 if (sub_strict_overflow_p)
13126 *strict_overflow_p = true;
13128 /* When both operands are nonzero, then MAX must be too. */
13129 if (tree_expr_nonzero_warnv_p (op1,
13130 strict_overflow_p))
13131 return true;
13133 /* MAX where operand 0 is positive is positive. */
13134 return tree_expr_nonnegative_warnv_p (op0,
13135 strict_overflow_p);
13137 /* MAX where operand 1 is positive is positive. */
13138 else if (tree_expr_nonzero_warnv_p (op1,
13139 &sub_strict_overflow_p)
13140 && tree_expr_nonnegative_warnv_p (op1,
13141 &sub_strict_overflow_p))
13143 if (sub_strict_overflow_p)
13144 *strict_overflow_p = true;
13145 return true;
13147 break;
13149 case BIT_IOR_EXPR:
13150 return (tree_expr_nonzero_warnv_p (op1,
13151 strict_overflow_p)
13152 || tree_expr_nonzero_warnv_p (op0,
13153 strict_overflow_p));
13155 default:
13156 break;
13159 return false;
13162 /* Return true when T is an address and is known to be nonzero.
13163 For floating point we further ensure that T is not denormal.
13164 Similar logic is present in nonzero_address in rtlanal.h.
13166 If the return value is based on the assumption that signed overflow
13167 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13168 change *STRICT_OVERFLOW_P. */
13170 bool
13171 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13173 bool sub_strict_overflow_p;
13174 switch (TREE_CODE (t))
13176 case INTEGER_CST:
13177 return !integer_zerop (t);
13179 case ADDR_EXPR:
13181 tree base = TREE_OPERAND (t, 0);
13183 if (!DECL_P (base))
13184 base = get_base_address (base);
13186 if (base && TREE_CODE (base) == TARGET_EXPR)
13187 base = TARGET_EXPR_SLOT (base);
13189 if (!base)
13190 return false;
13192 /* For objects in symbol table check if we know they are non-zero.
13193 Don't do anything for variables and functions before symtab is built;
13194 it is quite possible that they will be declared weak later. */
13195 int nonzero_addr = maybe_nonzero_address (base);
13196 if (nonzero_addr >= 0)
13197 return nonzero_addr;
13199 /* Constants are never weak. */
13200 if (CONSTANT_CLASS_P (base))
13201 return true;
13203 return false;
13206 case COND_EXPR:
13207 sub_strict_overflow_p = false;
13208 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13209 &sub_strict_overflow_p)
13210 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13211 &sub_strict_overflow_p))
13213 if (sub_strict_overflow_p)
13214 *strict_overflow_p = true;
13215 return true;
13217 break;
13219 case SSA_NAME:
13220 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13221 break;
13222 return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13224 default:
13225 break;
13227 return false;
13230 #define integer_valued_real_p(X) \
13231 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13233 #define RECURSE(X) \
13234 ((integer_valued_real_p) (X, depth + 1))
13236 /* Return true if the floating point result of (CODE OP0) has an
13237 integer value. We also allow +Inf, -Inf and NaN to be considered
13238 integer values. Return false for signaling NaN.
13240 DEPTH is the current nesting depth of the query. */
13242 bool
13243 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13245 switch (code)
13247 case FLOAT_EXPR:
13248 return true;
13250 case ABS_EXPR:
13251 return RECURSE (op0);
13253 CASE_CONVERT:
13255 tree type = TREE_TYPE (op0);
13256 if (TREE_CODE (type) == INTEGER_TYPE)
13257 return true;
13258 if (TREE_CODE (type) == REAL_TYPE)
13259 return RECURSE (op0);
13260 break;
13263 default:
13264 break;
13266 return false;
13269 /* Return true if the floating point result of (CODE OP0 OP1) has an
13270 integer value. We also allow +Inf, -Inf and NaN to be considered
13271 integer values. Return false for signaling NaN.
13273 DEPTH is the current nesting depth of the query. */
13275 bool
13276 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13278 switch (code)
13280 case PLUS_EXPR:
13281 case MINUS_EXPR:
13282 case MULT_EXPR:
13283 case MIN_EXPR:
13284 case MAX_EXPR:
13285 return RECURSE (op0) && RECURSE (op1);
13287 default:
13288 break;
13290 return false;
13293 /* Return true if the floating point result of calling FNDECL with arguments
13294 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13295 considered integer values. Return false for signaling NaN. If FNDECL
13296 takes fewer than 2 arguments, the remaining ARGn are null.
13298 DEPTH is the current nesting depth of the query. */
13300 bool
13301 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13303 switch (fn)
13305 CASE_CFN_CEIL:
13306 CASE_CFN_FLOOR:
13307 CASE_CFN_NEARBYINT:
13308 CASE_CFN_RINT:
13309 CASE_CFN_ROUND:
13310 CASE_CFN_TRUNC:
13311 return true;
13313 CASE_CFN_FMIN:
13314 CASE_CFN_FMAX:
13315 return RECURSE (arg0) && RECURSE (arg1);
13317 default:
13318 break;
13320 return false;
13323 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13324 has an integer value. We also allow +Inf, -Inf and NaN to be
13325 considered integer values. Return false for signaling NaN.
13327 DEPTH is the current nesting depth of the query. */
13329 bool
13330 integer_valued_real_single_p (tree t, int depth)
13332 switch (TREE_CODE (t))
13334 case REAL_CST:
13335 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13337 case COND_EXPR:
13338 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13340 case SSA_NAME:
13341 /* Limit the depth of recursion to avoid quadratic behavior.
13342 This is expected to catch almost all occurrences in practice.
13343 If this code misses important cases that unbounded recursion
13344 would not, passes that need this information could be revised
13345 to provide it through dataflow propagation. */
13346 return (!name_registered_for_update_p (t)
13347 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13348 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13349 depth));
13351 default:
13352 break;
13354 return false;
13357 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13358 has an integer value. We also allow +Inf, -Inf and NaN to be
13359 considered integer values. Return false for signaling NaN.
13361 DEPTH is the current nesting depth of the query. */
13363 static bool
13364 integer_valued_real_invalid_p (tree t, int depth)
13366 switch (TREE_CODE (t))
13368 case COMPOUND_EXPR:
13369 case MODIFY_EXPR:
13370 case BIND_EXPR:
13371 return RECURSE (TREE_OPERAND (t, 1));
13373 case SAVE_EXPR:
13374 return RECURSE (TREE_OPERAND (t, 0));
13376 default:
13377 break;
13379 return false;
13382 #undef RECURSE
13383 #undef integer_valued_real_p
13385 /* Return true if the floating point expression T has an integer value.
13386 We also allow +Inf, -Inf and NaN to be considered integer values.
13387 Return false for signaling NaN.
13389 DEPTH is the current nesting depth of the query. */
13391 bool
13392 integer_valued_real_p (tree t, int depth)
13394 if (t == error_mark_node)
13395 return false;
13397 tree_code code = TREE_CODE (t);
13398 switch (TREE_CODE_CLASS (code))
13400 case tcc_binary:
13401 case tcc_comparison:
13402 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13403 TREE_OPERAND (t, 1), depth);
13405 case tcc_unary:
13406 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13408 case tcc_constant:
13409 case tcc_declaration:
13410 case tcc_reference:
13411 return integer_valued_real_single_p (t, depth);
13413 default:
13414 break;
13417 switch (code)
13419 case COND_EXPR:
13420 case SSA_NAME:
13421 return integer_valued_real_single_p (t, depth);
13423 case CALL_EXPR:
13425 tree arg0 = (call_expr_nargs (t) > 0
13426 ? CALL_EXPR_ARG (t, 0)
13427 : NULL_TREE);
13428 tree arg1 = (call_expr_nargs (t) > 1
13429 ? CALL_EXPR_ARG (t, 1)
13430 : NULL_TREE);
13431 return integer_valued_real_call_p (get_call_combined_fn (t),
13432 arg0, arg1, depth);
13435 default:
13436 return integer_valued_real_invalid_p (t, depth);
13440 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13441 attempt to fold the expression to a constant without modifying TYPE,
13442 OP0 or OP1.
13444 If the expression could be simplified to a constant, then return
13445 the constant. If the expression would not be simplified to a
13446 constant, then return NULL_TREE. */
13448 tree
13449 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13451 tree tem = fold_binary (code, type, op0, op1);
13452 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13455 /* Given the components of a unary expression CODE, TYPE and OP0,
13456 attempt to fold the expression to a constant without modifying
13457 TYPE or OP0.
13459 If the expression could be simplified to a constant, then return
13460 the constant. If the expression would not be simplified to a
13461 constant, then return NULL_TREE. */
13463 tree
13464 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13466 tree tem = fold_unary (code, type, op0);
13467 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13470 /* If EXP represents referencing an element in a constant string
13471 (either via pointer arithmetic or array indexing), return the
13472 tree representing the value accessed, otherwise return NULL. */
13474 tree
13475 fold_read_from_constant_string (tree exp)
13477 if ((TREE_CODE (exp) == INDIRECT_REF
13478 || TREE_CODE (exp) == ARRAY_REF)
13479 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13481 tree exp1 = TREE_OPERAND (exp, 0);
13482 tree index;
13483 tree string;
13484 location_t loc = EXPR_LOCATION (exp);
13486 if (TREE_CODE (exp) == INDIRECT_REF)
13487 string = string_constant (exp1, &index);
13488 else
13490 tree low_bound = array_ref_low_bound (exp);
13491 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13493 /* Optimize the special-case of a zero lower bound.
13495 We convert the low_bound to sizetype to avoid some problems
13496 with constant folding. (E.g. suppose the lower bound is 1,
13497 and its mode is QI. Without the conversion,l (ARRAY
13498 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13499 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13500 if (! integer_zerop (low_bound))
13501 index = size_diffop_loc (loc, index,
13502 fold_convert_loc (loc, sizetype, low_bound));
13504 string = exp1;
13507 scalar_int_mode char_mode;
13508 if (string
13509 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13510 && TREE_CODE (string) == STRING_CST
13511 && TREE_CODE (index) == INTEGER_CST
13512 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13513 && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13514 &char_mode)
13515 && GET_MODE_SIZE (char_mode) == 1)
13516 return build_int_cst_type (TREE_TYPE (exp),
13517 (TREE_STRING_POINTER (string)
13518 [TREE_INT_CST_LOW (index)]));
13520 return NULL;
13523 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13524 an integer constant, real, or fixed-point constant.
13526 TYPE is the type of the result. */
13528 static tree
13529 fold_negate_const (tree arg0, tree type)
13531 tree t = NULL_TREE;
13533 switch (TREE_CODE (arg0))
13535 case INTEGER_CST:
13537 bool overflow;
13538 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13539 t = force_fit_type (type, val, 1,
13540 (overflow && ! TYPE_UNSIGNED (type))
13541 || TREE_OVERFLOW (arg0));
13542 break;
13545 case REAL_CST:
13546 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13547 break;
13549 case FIXED_CST:
13551 FIXED_VALUE_TYPE f;
13552 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13553 &(TREE_FIXED_CST (arg0)), NULL,
13554 TYPE_SATURATING (type));
13555 t = build_fixed (type, f);
13556 /* Propagate overflow flags. */
13557 if (overflow_p | TREE_OVERFLOW (arg0))
13558 TREE_OVERFLOW (t) = 1;
13559 break;
13562 default:
13563 gcc_unreachable ();
13566 return t;
13569 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13570 an integer constant or real constant.
13572 TYPE is the type of the result. */
13574 tree
13575 fold_abs_const (tree arg0, tree type)
13577 tree t = NULL_TREE;
13579 switch (TREE_CODE (arg0))
13581 case INTEGER_CST:
13583 /* If the value is unsigned or non-negative, then the absolute value
13584 is the same as the ordinary value. */
13585 if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13586 t = arg0;
13588 /* If the value is negative, then the absolute value is
13589 its negation. */
13590 else
13592 bool overflow;
13593 wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13594 t = force_fit_type (type, val, -1,
13595 overflow | TREE_OVERFLOW (arg0));
13598 break;
13600 case REAL_CST:
13601 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13602 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13603 else
13604 t = arg0;
13605 break;
13607 default:
13608 gcc_unreachable ();
13611 return t;
13614 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13615 constant. TYPE is the type of the result. */
13617 static tree
13618 fold_not_const (const_tree arg0, tree type)
13620 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13622 return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13625 /* Given CODE, a relational operator, the target type, TYPE and two
13626 constant operands OP0 and OP1, return the result of the
13627 relational operation. If the result is not a compile time
13628 constant, then return NULL_TREE. */
13630 static tree
13631 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13633 int result, invert;
13635 /* From here on, the only cases we handle are when the result is
13636 known to be a constant. */
13638 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13640 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13641 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13643 /* Handle the cases where either operand is a NaN. */
13644 if (real_isnan (c0) || real_isnan (c1))
13646 switch (code)
13648 case EQ_EXPR:
13649 case ORDERED_EXPR:
13650 result = 0;
13651 break;
13653 case NE_EXPR:
13654 case UNORDERED_EXPR:
13655 case UNLT_EXPR:
13656 case UNLE_EXPR:
13657 case UNGT_EXPR:
13658 case UNGE_EXPR:
13659 case UNEQ_EXPR:
13660 result = 1;
13661 break;
13663 case LT_EXPR:
13664 case LE_EXPR:
13665 case GT_EXPR:
13666 case GE_EXPR:
13667 case LTGT_EXPR:
13668 if (flag_trapping_math)
13669 return NULL_TREE;
13670 result = 0;
13671 break;
13673 default:
13674 gcc_unreachable ();
13677 return constant_boolean_node (result, type);
13680 return constant_boolean_node (real_compare (code, c0, c1), type);
13683 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13685 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13686 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13687 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13690 /* Handle equality/inequality of complex constants. */
13691 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13693 tree rcond = fold_relational_const (code, type,
13694 TREE_REALPART (op0),
13695 TREE_REALPART (op1));
13696 tree icond = fold_relational_const (code, type,
13697 TREE_IMAGPART (op0),
13698 TREE_IMAGPART (op1));
13699 if (code == EQ_EXPR)
13700 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13701 else if (code == NE_EXPR)
13702 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13703 else
13704 return NULL_TREE;
13707 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13709 if (!VECTOR_TYPE_P (type))
13711 /* Have vector comparison with scalar boolean result. */
13712 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13713 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13714 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13716 tree elem0 = VECTOR_CST_ELT (op0, i);
13717 tree elem1 = VECTOR_CST_ELT (op1, i);
13718 tree tmp = fold_relational_const (code, type, elem0, elem1);
13719 if (tmp == NULL_TREE)
13720 return NULL_TREE;
13721 if (integer_zerop (tmp))
13722 return constant_boolean_node (false, type);
13724 return constant_boolean_node (true, type);
13726 unsigned count = VECTOR_CST_NELTS (op0);
13727 gcc_assert (VECTOR_CST_NELTS (op1) == count
13728 && TYPE_VECTOR_SUBPARTS (type) == count);
13730 auto_vec<tree, 32> elts (count);
13731 for (unsigned i = 0; i < count; i++)
13733 tree elem_type = TREE_TYPE (type);
13734 tree elem0 = VECTOR_CST_ELT (op0, i);
13735 tree elem1 = VECTOR_CST_ELT (op1, i);
13737 tree tem = fold_relational_const (code, elem_type,
13738 elem0, elem1);
13740 if (tem == NULL_TREE)
13741 return NULL_TREE;
13743 elts.quick_push (build_int_cst (elem_type,
13744 integer_zerop (tem) ? 0 : -1));
13747 return build_vector (type, elts);
13750 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13752 To compute GT, swap the arguments and do LT.
13753 To compute GE, do LT and invert the result.
13754 To compute LE, swap the arguments, do LT and invert the result.
13755 To compute NE, do EQ and invert the result.
13757 Therefore, the code below must handle only EQ and LT. */
13759 if (code == LE_EXPR || code == GT_EXPR)
13761 std::swap (op0, op1);
13762 code = swap_tree_comparison (code);
13765 /* Note that it is safe to invert for real values here because we
13766 have already handled the one case that it matters. */
13768 invert = 0;
13769 if (code == NE_EXPR || code == GE_EXPR)
13771 invert = 1;
13772 code = invert_tree_comparison (code, false);
13775 /* Compute a result for LT or EQ if args permit;
13776 Otherwise return T. */
13777 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13779 if (code == EQ_EXPR)
13780 result = tree_int_cst_equal (op0, op1);
13781 else
13782 result = tree_int_cst_lt (op0, op1);
13784 else
13785 return NULL_TREE;
13787 if (invert)
13788 result ^= 1;
13789 return constant_boolean_node (result, type);
13792 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
13793 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
13794 itself. */
13796 tree
13797 fold_build_cleanup_point_expr (tree type, tree expr)
13799 /* If the expression does not have side effects then we don't have to wrap
13800 it with a cleanup point expression. */
13801 if (!TREE_SIDE_EFFECTS (expr))
13802 return expr;
13804 /* If the expression is a return, check to see if the expression inside the
13805 return has no side effects or the right hand side of the modify expression
13806 inside the return. If either don't have side effects set we don't need to
13807 wrap the expression in a cleanup point expression. Note we don't check the
13808 left hand side of the modify because it should always be a return decl. */
13809 if (TREE_CODE (expr) == RETURN_EXPR)
13811 tree op = TREE_OPERAND (expr, 0);
13812 if (!op || !TREE_SIDE_EFFECTS (op))
13813 return expr;
13814 op = TREE_OPERAND (op, 1);
13815 if (!TREE_SIDE_EFFECTS (op))
13816 return expr;
13819 return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
13822 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13823 of an indirection through OP0, or NULL_TREE if no simplification is
13824 possible. */
13826 tree
13827 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
13829 tree sub = op0;
13830 tree subtype;
13832 STRIP_NOPS (sub);
13833 subtype = TREE_TYPE (sub);
13834 if (!POINTER_TYPE_P (subtype)
13835 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
13836 return NULL_TREE;
13838 if (TREE_CODE (sub) == ADDR_EXPR)
13840 tree op = TREE_OPERAND (sub, 0);
13841 tree optype = TREE_TYPE (op);
13842 /* *&CONST_DECL -> to the value of the const decl. */
13843 if (TREE_CODE (op) == CONST_DECL)
13844 return DECL_INITIAL (op);
13845 /* *&p => p; make sure to handle *&"str"[cst] here. */
13846 if (type == optype)
13848 tree fop = fold_read_from_constant_string (op);
13849 if (fop)
13850 return fop;
13851 else
13852 return op;
13854 /* *(foo *)&fooarray => fooarray[0] */
13855 else if (TREE_CODE (optype) == ARRAY_TYPE
13856 && type == TREE_TYPE (optype)
13857 && (!in_gimple_form
13858 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13860 tree type_domain = TYPE_DOMAIN (optype);
13861 tree min_val = size_zero_node;
13862 if (type_domain && TYPE_MIN_VALUE (type_domain))
13863 min_val = TYPE_MIN_VALUE (type_domain);
13864 if (in_gimple_form
13865 && TREE_CODE (min_val) != INTEGER_CST)
13866 return NULL_TREE;
13867 return build4_loc (loc, ARRAY_REF, type, op, min_val,
13868 NULL_TREE, NULL_TREE);
13870 /* *(foo *)&complexfoo => __real__ complexfoo */
13871 else if (TREE_CODE (optype) == COMPLEX_TYPE
13872 && type == TREE_TYPE (optype))
13873 return fold_build1_loc (loc, REALPART_EXPR, type, op);
13874 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13875 else if (TREE_CODE (optype) == VECTOR_TYPE
13876 && type == TREE_TYPE (optype))
13878 tree part_width = TYPE_SIZE (type);
13879 tree index = bitsize_int (0);
13880 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
13884 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
13885 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13887 tree op00 = TREE_OPERAND (sub, 0);
13888 tree op01 = TREE_OPERAND (sub, 1);
13890 STRIP_NOPS (op00);
13891 if (TREE_CODE (op00) == ADDR_EXPR)
13893 tree op00type;
13894 op00 = TREE_OPERAND (op00, 0);
13895 op00type = TREE_TYPE (op00);
13897 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
13898 if (TREE_CODE (op00type) == VECTOR_TYPE
13899 && type == TREE_TYPE (op00type))
13901 tree part_width = TYPE_SIZE (type);
13902 unsigned HOST_WIDE_INT max_offset
13903 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
13904 * TYPE_VECTOR_SUBPARTS (op00type));
13905 if (tree_int_cst_sign_bit (op01) == 0
13906 && compare_tree_int (op01, max_offset) == -1)
13908 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
13909 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
13910 tree index = bitsize_int (indexi);
13911 return fold_build3_loc (loc,
13912 BIT_FIELD_REF, type, op00,
13913 part_width, index);
13916 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13917 else if (TREE_CODE (op00type) == COMPLEX_TYPE
13918 && type == TREE_TYPE (op00type))
13920 tree size = TYPE_SIZE_UNIT (type);
13921 if (tree_int_cst_equal (size, op01))
13922 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
13924 /* ((foo *)&fooarray)[1] => fooarray[1] */
13925 else if (TREE_CODE (op00type) == ARRAY_TYPE
13926 && type == TREE_TYPE (op00type))
13928 tree type_domain = TYPE_DOMAIN (op00type);
13929 tree min = size_zero_node;
13930 if (type_domain && TYPE_MIN_VALUE (type_domain))
13931 min = TYPE_MIN_VALUE (type_domain);
13932 offset_int off = wi::to_offset (op01);
13933 offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
13934 offset_int remainder;
13935 off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
13936 if (remainder == 0 && TREE_CODE (min) == INTEGER_CST)
13938 off = off + wi::to_offset (min);
13939 op01 = wide_int_to_tree (sizetype, off);
13940 return build4_loc (loc, ARRAY_REF, type, op00, op01,
13941 NULL_TREE, NULL_TREE);
13947 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13948 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13949 && type == TREE_TYPE (TREE_TYPE (subtype))
13950 && (!in_gimple_form
13951 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
13953 tree type_domain;
13954 tree min_val = size_zero_node;
13955 sub = build_fold_indirect_ref_loc (loc, sub);
13956 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13957 if (type_domain && TYPE_MIN_VALUE (type_domain))
13958 min_val = TYPE_MIN_VALUE (type_domain);
13959 if (in_gimple_form
13960 && TREE_CODE (min_val) != INTEGER_CST)
13961 return NULL_TREE;
13962 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
13963 NULL_TREE);
13966 return NULL_TREE;
13969 /* Builds an expression for an indirection through T, simplifying some
13970 cases. */
13972 tree
13973 build_fold_indirect_ref_loc (location_t loc, tree t)
13975 tree type = TREE_TYPE (TREE_TYPE (t));
13976 tree sub = fold_indirect_ref_1 (loc, type, t);
13978 if (sub)
13979 return sub;
13981 return build1_loc (loc, INDIRECT_REF, type, t);
13984 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13986 tree
13987 fold_indirect_ref_loc (location_t loc, tree t)
13989 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
13991 if (sub)
13992 return sub;
13993 else
13994 return t;
13997 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13998 whose result is ignored. The type of the returned tree need not be
13999 the same as the original expression. */
14001 tree
14002 fold_ignored_result (tree t)
14004 if (!TREE_SIDE_EFFECTS (t))
14005 return integer_zero_node;
14007 for (;;)
14008 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14010 case tcc_unary:
14011 t = TREE_OPERAND (t, 0);
14012 break;
14014 case tcc_binary:
14015 case tcc_comparison:
14016 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14017 t = TREE_OPERAND (t, 0);
14018 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14019 t = TREE_OPERAND (t, 1);
14020 else
14021 return t;
14022 break;
14024 case tcc_expression:
14025 switch (TREE_CODE (t))
14027 case COMPOUND_EXPR:
14028 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14029 return t;
14030 t = TREE_OPERAND (t, 0);
14031 break;
14033 case COND_EXPR:
14034 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14035 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14036 return t;
14037 t = TREE_OPERAND (t, 0);
14038 break;
14040 default:
14041 return t;
14043 break;
14045 default:
14046 return t;
14050 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14052 tree
14053 round_up_loc (location_t loc, tree value, unsigned int divisor)
14055 tree div = NULL_TREE;
14057 if (divisor == 1)
14058 return value;
14060 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14061 have to do anything. Only do this when we are not given a const,
14062 because in that case, this check is more expensive than just
14063 doing it. */
14064 if (TREE_CODE (value) != INTEGER_CST)
14066 div = build_int_cst (TREE_TYPE (value), divisor);
14068 if (multiple_of_p (TREE_TYPE (value), value, div))
14069 return value;
14072 /* If divisor is a power of two, simplify this to bit manipulation. */
14073 if (pow2_or_zerop (divisor))
14075 if (TREE_CODE (value) == INTEGER_CST)
14077 wide_int val = wi::to_wide (value);
14078 bool overflow_p;
14080 if ((val & (divisor - 1)) == 0)
14081 return value;
14083 overflow_p = TREE_OVERFLOW (value);
14084 val += divisor - 1;
14085 val &= (int) -divisor;
14086 if (val == 0)
14087 overflow_p = true;
14089 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14091 else
14093 tree t;
14095 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14096 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14097 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14098 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14101 else
14103 if (!div)
14104 div = build_int_cst (TREE_TYPE (value), divisor);
14105 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14106 value = size_binop_loc (loc, MULT_EXPR, value, div);
14109 return value;
14112 /* Likewise, but round down. */
14114 tree
14115 round_down_loc (location_t loc, tree value, int divisor)
14117 tree div = NULL_TREE;
14119 gcc_assert (divisor > 0);
14120 if (divisor == 1)
14121 return value;
14123 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14124 have to do anything. Only do this when we are not given a const,
14125 because in that case, this check is more expensive than just
14126 doing it. */
14127 if (TREE_CODE (value) != INTEGER_CST)
14129 div = build_int_cst (TREE_TYPE (value), divisor);
14131 if (multiple_of_p (TREE_TYPE (value), value, div))
14132 return value;
14135 /* If divisor is a power of two, simplify this to bit manipulation. */
14136 if (pow2_or_zerop (divisor))
14138 tree t;
14140 t = build_int_cst (TREE_TYPE (value), -divisor);
14141 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14143 else
14145 if (!div)
14146 div = build_int_cst (TREE_TYPE (value), divisor);
14147 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14148 value = size_binop_loc (loc, MULT_EXPR, value, div);
14151 return value;
14154 /* Returns the pointer to the base of the object addressed by EXP and
14155 extracts the information about the offset of the access, storing it
14156 to PBITPOS and POFFSET. */
14158 static tree
14159 split_address_to_core_and_offset (tree exp,
14160 HOST_WIDE_INT *pbitpos, tree *poffset)
14162 tree core;
14163 machine_mode mode;
14164 int unsignedp, reversep, volatilep;
14165 HOST_WIDE_INT bitsize;
14166 location_t loc = EXPR_LOCATION (exp);
14168 if (TREE_CODE (exp) == ADDR_EXPR)
14170 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14171 poffset, &mode, &unsignedp, &reversep,
14172 &volatilep);
14173 core = build_fold_addr_expr_loc (loc, core);
14175 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14177 core = TREE_OPERAND (exp, 0);
14178 STRIP_NOPS (core);
14179 *pbitpos = 0;
14180 *poffset = TREE_OPERAND (exp, 1);
14181 if (TREE_CODE (*poffset) == INTEGER_CST)
14183 offset_int tem = wi::sext (wi::to_offset (*poffset),
14184 TYPE_PRECISION (TREE_TYPE (*poffset)));
14185 tem <<= LOG2_BITS_PER_UNIT;
14186 if (wi::fits_shwi_p (tem))
14188 *pbitpos = tem.to_shwi ();
14189 *poffset = NULL_TREE;
14193 else
14195 core = exp;
14196 *pbitpos = 0;
14197 *poffset = NULL_TREE;
14200 return core;
14203 /* Returns true if addresses of E1 and E2 differ by a constant, false
14204 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14206 bool
14207 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14209 tree core1, core2;
14210 HOST_WIDE_INT bitpos1, bitpos2;
14211 tree toffset1, toffset2, tdiff, type;
14213 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14214 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14216 if (bitpos1 % BITS_PER_UNIT != 0
14217 || bitpos2 % BITS_PER_UNIT != 0
14218 || !operand_equal_p (core1, core2, 0))
14219 return false;
14221 if (toffset1 && toffset2)
14223 type = TREE_TYPE (toffset1);
14224 if (type != TREE_TYPE (toffset2))
14225 toffset2 = fold_convert (type, toffset2);
14227 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14228 if (!cst_and_fits_in_hwi (tdiff))
14229 return false;
14231 *diff = int_cst_value (tdiff);
14233 else if (toffset1 || toffset2)
14235 /* If only one of the offsets is non-constant, the difference cannot
14236 be a constant. */
14237 return false;
14239 else
14240 *diff = 0;
14242 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14243 return true;
14246 /* Return OFF converted to a pointer offset type suitable as offset for
14247 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14248 tree
14249 convert_to_ptrofftype_loc (location_t loc, tree off)
14251 return fold_convert_loc (loc, sizetype, off);
14254 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14255 tree
14256 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14258 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14259 ptr, convert_to_ptrofftype_loc (loc, off));
14262 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14263 tree
14264 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14266 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14267 ptr, size_int (off));
14270 /* Return a char pointer for a C string if it is a string constant
14271 or sum of string constant and integer constant. We only support
14272 string constants properly terminated with '\0' character.
14273 If STRLEN is a valid pointer, length (including terminating character)
14274 of returned string is stored to the argument. */
14276 const char *
14277 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14279 tree offset_node;
14281 if (strlen)
14282 *strlen = 0;
14284 src = string_constant (src, &offset_node);
14285 if (src == 0)
14286 return NULL;
14288 unsigned HOST_WIDE_INT offset = 0;
14289 if (offset_node != NULL_TREE)
14291 if (!tree_fits_uhwi_p (offset_node))
14292 return NULL;
14293 else
14294 offset = tree_to_uhwi (offset_node);
14297 unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14298 const char *string = TREE_STRING_POINTER (src);
14300 /* Support only properly null-terminated strings. */
14301 if (string_length == 0
14302 || string[string_length - 1] != '\0'
14303 || offset >= string_length)
14304 return NULL;
14306 if (strlen)
14307 *strlen = string_length - offset;
14308 return string + offset;
14311 #if CHECKING_P
14313 namespace selftest {
14315 /* Helper functions for writing tests of folding trees. */
14317 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT. */
14319 static void
14320 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14321 tree constant)
14323 ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14326 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14327 wrapping WRAPPED_EXPR. */
14329 static void
14330 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14331 tree wrapped_expr)
14333 tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14334 ASSERT_NE (wrapped_expr, result);
14335 ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14336 ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14339 /* Verify that various arithmetic binary operations are folded
14340 correctly. */
14342 static void
14343 test_arithmetic_folding ()
14345 tree type = integer_type_node;
14346 tree x = create_tmp_var_raw (type, "x");
14347 tree zero = build_zero_cst (type);
14348 tree one = build_int_cst (type, 1);
14350 /* Addition. */
14351 /* 1 <-- (0 + 1) */
14352 assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14353 one);
14354 assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14355 one);
14357 /* (nonlvalue)x <-- (x + 0) */
14358 assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14361 /* Subtraction. */
14362 /* 0 <-- (x - x) */
14363 assert_binop_folds_to_const (x, MINUS_EXPR, x,
14364 zero);
14365 assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14368 /* Multiplication. */
14369 /* 0 <-- (x * 0) */
14370 assert_binop_folds_to_const (x, MULT_EXPR, zero,
14371 zero);
14373 /* (nonlvalue)x <-- (x * 1) */
14374 assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14378 /* Verify that various binary operations on vectors are folded
14379 correctly. */
14381 static void
14382 test_vector_folding ()
14384 tree inner_type = integer_type_node;
14385 tree type = build_vector_type (inner_type, 4);
14386 tree zero = build_zero_cst (type);
14387 tree one = build_one_cst (type);
14389 /* Verify equality tests that return a scalar boolean result. */
14390 tree res_type = boolean_type_node;
14391 ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14392 ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14393 ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14394 ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14397 /* Run all of the selftests within this file. */
14399 void
14400 fold_const_c_tests ()
14402 test_arithmetic_folding ();
14403 test_vector_folding ();
14406 } // namespace selftest
14408 #endif /* CHECKING_P */