Remove assert in get_def_bb_for_const
[official-gcc.git] / gcc / fold-const.c
blob5058746c4eb84e2fb3b9c18ccf489a2467439ee9
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (location_t, tree, tree, enum tree_code,
113 tree *, tree *, tree *, int);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 tree, tree, tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree optimize_minmax_comparison (location_t, enum tree_code,
131 tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (location_t,
135 enum tree_code, tree,
136 tree, tree,
137 tree, tree, int);
138 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (const_tree, const_tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (const_tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
143 static tree fold_convert_const (enum tree_code, tree, tree);
144 static tree fold_view_convert_expr (tree, tree);
145 static bool vec_cst_ctor_to_array (tree, tree *);
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
151 static location_t
152 expr_location_or (tree t, location_t loc)
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
173 return x;
176 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
177 division and returns the quotient. Otherwise returns
178 NULL_TREE. */
180 tree
181 div_if_zero_remainder (const_tree arg1, const_tree arg2)
183 widest_int quo;
185 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
186 SIGNED, &quo))
187 return wide_int_to_tree (TREE_TYPE (arg1), quo);
189 return NULL_TREE;
192 /* This is nonzero if we should defer warnings about undefined
193 overflow. This facility exists because these warnings are a
194 special case. The code to estimate loop iterations does not want
195 to issue any warnings, since it works with expressions which do not
196 occur in user code. Various bits of cleanup code call fold(), but
197 only use the result if it has certain characteristics (e.g., is a
198 constant); that code only wants to issue a warning if the result is
199 used. */
201 static int fold_deferring_overflow_warnings;
203 /* If a warning about undefined overflow is deferred, this is the
204 warning. Note that this may cause us to turn two warnings into
205 one, but that is fine since it is sufficient to only give one
206 warning per expression. */
208 static const char* fold_deferred_overflow_warning;
210 /* If a warning about undefined overflow is deferred, this is the
211 level at which the warning should be emitted. */
213 static enum warn_strict_overflow_code fold_deferred_overflow_code;
215 /* Start deferring overflow warnings. We could use a stack here to
216 permit nested calls, but at present it is not necessary. */
218 void
219 fold_defer_overflow_warnings (void)
221 ++fold_deferring_overflow_warnings;
224 /* Stop deferring overflow warnings. If there is a pending warning,
225 and ISSUE is true, then issue the warning if appropriate. STMT is
226 the statement with which the warning should be associated (used for
227 location information); STMT may be NULL. CODE is the level of the
228 warning--a warn_strict_overflow_code value. This function will use
229 the smaller of CODE and the deferred code when deciding whether to
230 issue the warning. CODE may be zero to mean to always use the
231 deferred code. */
233 void
234 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
236 const char *warnmsg;
237 location_t locus;
239 gcc_assert (fold_deferring_overflow_warnings > 0);
240 --fold_deferring_overflow_warnings;
241 if (fold_deferring_overflow_warnings > 0)
243 if (fold_deferred_overflow_warning != NULL
244 && code != 0
245 && code < (int) fold_deferred_overflow_code)
246 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
247 return;
250 warnmsg = fold_deferred_overflow_warning;
251 fold_deferred_overflow_warning = NULL;
253 if (!issue || warnmsg == NULL)
254 return;
256 if (gimple_no_warning_p (stmt))
257 return;
259 /* Use the smallest code level when deciding to issue the
260 warning. */
261 if (code == 0 || code > (int) fold_deferred_overflow_code)
262 code = fold_deferred_overflow_code;
264 if (!issue_strict_overflow_warning (code))
265 return;
267 if (stmt == NULL)
268 locus = input_location;
269 else
270 locus = gimple_location (stmt);
271 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
274 /* Stop deferring overflow warnings, ignoring any deferred
275 warnings. */
277 void
278 fold_undefer_and_ignore_overflow_warnings (void)
280 fold_undefer_overflow_warnings (false, NULL, 0);
283 /* Whether we are deferring overflow warnings. */
285 bool
286 fold_deferring_overflow_warnings_p (void)
288 return fold_deferring_overflow_warnings > 0;
291 /* This is called when we fold something based on the fact that signed
292 overflow is undefined. */
294 void
295 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
297 if (fold_deferring_overflow_warnings > 0)
299 if (fold_deferred_overflow_warning == NULL
300 || wc < fold_deferred_overflow_code)
302 fold_deferred_overflow_warning = gmsgid;
303 fold_deferred_overflow_code = wc;
306 else if (issue_strict_overflow_warning (wc))
307 warning (OPT_Wstrict_overflow, gmsgid);
310 /* Return true if the built-in mathematical function specified by CODE
311 is odd, i.e. -f(x) == f(-x). */
313 bool
314 negate_mathfn_p (combined_fn fn)
316 switch (fn)
318 CASE_CFN_ASIN:
319 CASE_CFN_ASINH:
320 CASE_CFN_ATAN:
321 CASE_CFN_ATANH:
322 CASE_CFN_CASIN:
323 CASE_CFN_CASINH:
324 CASE_CFN_CATAN:
325 CASE_CFN_CATANH:
326 CASE_CFN_CBRT:
327 CASE_CFN_CPROJ:
328 CASE_CFN_CSIN:
329 CASE_CFN_CSINH:
330 CASE_CFN_CTAN:
331 CASE_CFN_CTANH:
332 CASE_CFN_ERF:
333 CASE_CFN_LLROUND:
334 CASE_CFN_LROUND:
335 CASE_CFN_ROUND:
336 CASE_CFN_SIN:
337 CASE_CFN_SINH:
338 CASE_CFN_TAN:
339 CASE_CFN_TANH:
340 CASE_CFN_TRUNC:
341 return true;
343 CASE_CFN_LLRINT:
344 CASE_CFN_LRINT:
345 CASE_CFN_NEARBYINT:
346 CASE_CFN_RINT:
347 return !flag_rounding_math;
349 default:
350 break;
352 return false;
355 /* Check whether we may negate an integer constant T without causing
356 overflow. */
358 bool
359 may_negate_without_overflow_p (const_tree t)
361 tree type;
363 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365 type = TREE_TYPE (t);
366 if (TYPE_UNSIGNED (type))
367 return false;
369 return !wi::only_sign_bit_p (t);
372 /* Determine whether an expression T can be cheaply negated using
373 the function negate_expr without introducing undefined overflow. */
375 static bool
376 negate_expr_p (tree t)
378 tree type;
380 if (t == 0)
381 return false;
383 type = TREE_TYPE (t);
385 STRIP_SIGN_NOPS (t);
386 switch (TREE_CODE (t))
388 case INTEGER_CST:
389 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
390 return true;
392 /* Check that -CST will not overflow type. */
393 return may_negate_without_overflow_p (t);
394 case BIT_NOT_EXPR:
395 return (INTEGRAL_TYPE_P (type)
396 && TYPE_OVERFLOW_WRAPS (type));
398 case FIXED_CST:
399 return true;
401 case NEGATE_EXPR:
402 return !TYPE_OVERFLOW_SANITIZED (type);
404 case REAL_CST:
405 /* We want to canonicalize to positive real constants. Pretend
406 that only negative ones can be easily negated. */
407 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
409 case COMPLEX_CST:
410 return negate_expr_p (TREE_REALPART (t))
411 && negate_expr_p (TREE_IMAGPART (t));
413 case VECTOR_CST:
415 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
416 return true;
418 int count = TYPE_VECTOR_SUBPARTS (type), i;
420 for (i = 0; i < count; i++)
421 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
422 return false;
424 return true;
427 case COMPLEX_EXPR:
428 return negate_expr_p (TREE_OPERAND (t, 0))
429 && negate_expr_p (TREE_OPERAND (t, 1));
431 case CONJ_EXPR:
432 return negate_expr_p (TREE_OPERAND (t, 0));
434 case PLUS_EXPR:
435 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
436 || HONOR_SIGNED_ZEROS (element_mode (type))
437 || (INTEGRAL_TYPE_P (type)
438 && ! TYPE_OVERFLOW_WRAPS (type)))
439 return false;
440 /* -(A + B) -> (-B) - A. */
441 if (negate_expr_p (TREE_OPERAND (t, 1))
442 && reorder_operands_p (TREE_OPERAND (t, 0),
443 TREE_OPERAND (t, 1)))
444 return true;
445 /* -(A + B) -> (-A) - B. */
446 return negate_expr_p (TREE_OPERAND (t, 0));
448 case MINUS_EXPR:
449 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
450 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
451 && !HONOR_SIGNED_ZEROS (element_mode (type))
452 && (! INTEGRAL_TYPE_P (type)
453 || TYPE_OVERFLOW_WRAPS (type))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1));
457 case MULT_EXPR:
458 if (TYPE_UNSIGNED (type))
459 break;
460 /* INT_MIN/n * n doesn't overflow while negating one operand it does
461 if n is a power of two. */
462 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
463 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
464 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
465 && ! integer_pow2p (TREE_OPERAND (t, 0)))
466 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
467 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
468 break;
470 /* Fall through. */
472 case RDIV_EXPR:
473 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
474 return negate_expr_p (TREE_OPERAND (t, 1))
475 || negate_expr_p (TREE_OPERAND (t, 0));
476 break;
478 case TRUNC_DIV_EXPR:
479 case ROUND_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 if (TYPE_UNSIGNED (type))
482 break;
483 if (negate_expr_p (TREE_OPERAND (t, 0)))
484 return true;
485 /* In general we can't negate B in A / B, because if A is INT_MIN and
486 B is 1, we may turn this into INT_MIN / -1 which is undefined
487 and actually traps on some architectures. */
488 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
489 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
490 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
491 && ! integer_onep (TREE_OPERAND (t, 1))))
492 return negate_expr_p (TREE_OPERAND (t, 1));
493 break;
495 case NOP_EXPR:
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type) == REAL_TYPE)
499 tree tem = strip_float_extensions (t);
500 if (tem != t)
501 return negate_expr_p (tem);
503 break;
505 case CALL_EXPR:
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (get_call_combined_fn (t)))
508 return negate_expr_p (CALL_EXPR_ARG (t, 0));
509 break;
511 case RSHIFT_EXPR:
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
513 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
515 tree op1 = TREE_OPERAND (t, 1);
516 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
517 return true;
519 break;
521 default:
522 break;
524 return false;
527 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
528 simplification is possible.
529 If negate_expr_p would return true for T, NULL_TREE will never be
530 returned. */
532 static tree
533 fold_negate_expr (location_t loc, tree t)
535 tree type = TREE_TYPE (t);
536 tree tem;
538 switch (TREE_CODE (t))
540 /* Convert - (~A) to A + 1. */
541 case BIT_NOT_EXPR:
542 if (INTEGRAL_TYPE_P (type))
543 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
544 build_one_cst (type));
545 break;
547 case INTEGER_CST:
548 tem = fold_negate_const (t, type);
549 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
550 || (ANY_INTEGRAL_TYPE_P (type)
551 && !TYPE_OVERFLOW_TRAPS (type)
552 && TYPE_OVERFLOW_WRAPS (type))
553 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
554 return tem;
555 break;
557 case REAL_CST:
558 tem = fold_negate_const (t, type);
559 return tem;
561 case FIXED_CST:
562 tem = fold_negate_const (t, type);
563 return tem;
565 case COMPLEX_CST:
567 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
568 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
569 if (rpart && ipart)
570 return build_complex (type, rpart, ipart);
572 break;
574 case VECTOR_CST:
576 int count = TYPE_VECTOR_SUBPARTS (type), i;
577 tree *elts = XALLOCAVEC (tree, count);
579 for (i = 0; i < count; i++)
581 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
582 if (elts[i] == NULL_TREE)
583 return NULL_TREE;
586 return build_vector (type, elts);
589 case COMPLEX_EXPR:
590 if (negate_expr_p (t))
591 return fold_build2_loc (loc, COMPLEX_EXPR, type,
592 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
593 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
594 break;
596 case CONJ_EXPR:
597 if (negate_expr_p (t))
598 return fold_build1_loc (loc, CONJ_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
600 break;
602 case NEGATE_EXPR:
603 if (!TYPE_OVERFLOW_SANITIZED (type))
604 return TREE_OPERAND (t, 0);
605 break;
607 case PLUS_EXPR:
608 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
609 && !HONOR_SIGNED_ZEROS (element_mode (type)))
611 /* -(A + B) -> (-B) - A. */
612 if (negate_expr_p (TREE_OPERAND (t, 1))
613 && reorder_operands_p (TREE_OPERAND (t, 0),
614 TREE_OPERAND (t, 1)))
616 tem = negate_expr (TREE_OPERAND (t, 1));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 0));
621 /* -(A + B) -> (-A) - B. */
622 if (negate_expr_p (TREE_OPERAND (t, 0)))
624 tem = negate_expr (TREE_OPERAND (t, 0));
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 tem, TREE_OPERAND (t, 1));
629 break;
631 case MINUS_EXPR:
632 /* - (A - B) -> B - A */
633 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
634 && !HONOR_SIGNED_ZEROS (element_mode (type))
635 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
636 return fold_build2_loc (loc, MINUS_EXPR, type,
637 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
638 break;
640 case MULT_EXPR:
641 if (TYPE_UNSIGNED (type))
642 break;
644 /* Fall through. */
646 case RDIV_EXPR:
647 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
649 tem = TREE_OPERAND (t, 1);
650 if (negate_expr_p (tem))
651 return fold_build2_loc (loc, TREE_CODE (t), type,
652 TREE_OPERAND (t, 0), negate_expr (tem));
653 tem = TREE_OPERAND (t, 0);
654 if (negate_expr_p (tem))
655 return fold_build2_loc (loc, TREE_CODE (t), type,
656 negate_expr (tem), TREE_OPERAND (t, 1));
658 break;
660 case TRUNC_DIV_EXPR:
661 case ROUND_DIV_EXPR:
662 case EXACT_DIV_EXPR:
663 if (TYPE_UNSIGNED (type))
664 break;
665 if (negate_expr_p (TREE_OPERAND (t, 0)))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 negate_expr (TREE_OPERAND (t, 0)),
668 TREE_OPERAND (t, 1));
669 /* In general we can't negate B in A / B, because if A is INT_MIN and
670 B is 1, we may turn this into INT_MIN / -1 which is undefined
671 and actually traps on some architectures. */
672 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
673 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
674 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
675 && ! integer_onep (TREE_OPERAND (t, 1))))
676 && negate_expr_p (TREE_OPERAND (t, 1)))
677 return fold_build2_loc (loc, TREE_CODE (t), type,
678 TREE_OPERAND (t, 0),
679 negate_expr (TREE_OPERAND (t, 1)));
680 break;
682 case NOP_EXPR:
683 /* Convert -((double)float) into (double)(-float). */
684 if (TREE_CODE (type) == REAL_TYPE)
686 tem = strip_float_extensions (t);
687 if (tem != t && negate_expr_p (tem))
688 return fold_convert_loc (loc, type, negate_expr (tem));
690 break;
692 case CALL_EXPR:
693 /* Negate -f(x) as f(-x). */
694 if (negate_mathfn_p (get_call_combined_fn (t))
695 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
697 tree fndecl, arg;
699 fndecl = get_callee_fndecl (t);
700 arg = negate_expr (CALL_EXPR_ARG (t, 0));
701 return build_call_expr_loc (loc, fndecl, 1, arg);
703 break;
705 case RSHIFT_EXPR:
706 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
707 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
709 tree op1 = TREE_OPERAND (t, 1);
710 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
712 tree ntype = TYPE_UNSIGNED (type)
713 ? signed_type_for (type)
714 : unsigned_type_for (type);
715 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
716 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
717 return fold_convert_loc (loc, type, temp);
720 break;
722 default:
723 break;
726 return NULL_TREE;
729 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
730 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
731 return NULL_TREE. */
733 static tree
734 negate_expr (tree t)
736 tree type, tem;
737 location_t loc;
739 if (t == NULL_TREE)
740 return NULL_TREE;
742 loc = EXPR_LOCATION (t);
743 type = TREE_TYPE (t);
744 STRIP_SIGN_NOPS (t);
746 tem = fold_negate_expr (loc, t);
747 if (!tem)
748 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
749 return fold_convert_loc (loc, type, tem);
752 /* Split a tree IN into a constant, literal and variable parts that could be
753 combined with CODE to make IN. "constant" means an expression with
754 TREE_CONSTANT but that isn't an actual constant. CODE must be a
755 commutative arithmetic operation. Store the constant part into *CONP,
756 the literal in *LITP and return the variable part. If a part isn't
757 present, set it to null. If the tree does not decompose in this way,
758 return the entire tree as the variable part and the other parts as null.
760 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
761 case, we negate an operand that was subtracted. Except if it is a
762 literal for which we use *MINUS_LITP instead.
764 If NEGATE_P is true, we are negating all of IN, again except a literal
765 for which we use *MINUS_LITP instead. If a variable part is of pointer
766 type, it is negated after converting to TYPE. This prevents us from
767 generating illegal MINUS pointer expression. LOC is the location of
768 the converted variable part.
770 If IN is itself a literal or constant, return it as appropriate.
772 Note that we do not guarantee that any of the three values will be the
773 same type as IN, but they will have the same signedness and mode. */
775 static tree
776 split_tree (location_t loc, tree in, tree type, enum tree_code code,
777 tree *conp, tree *litp, tree *minus_litp, int negate_p)
779 tree var = 0;
781 *conp = 0;
782 *litp = 0;
783 *minus_litp = 0;
785 /* Strip any conversions that don't change the machine mode or signedness. */
786 STRIP_SIGN_NOPS (in);
788 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
789 || TREE_CODE (in) == FIXED_CST)
790 *litp = in;
791 else if (TREE_CODE (in) == code
792 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
793 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
794 /* We can associate addition and subtraction together (even
795 though the C standard doesn't say so) for integers because
796 the value is not affected. For reals, the value might be
797 affected, so we can't. */
798 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
799 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
801 tree op0 = TREE_OPERAND (in, 0);
802 tree op1 = TREE_OPERAND (in, 1);
803 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
804 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
806 /* First see if either of the operands is a literal, then a constant. */
807 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
808 || TREE_CODE (op0) == FIXED_CST)
809 *litp = op0, op0 = 0;
810 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
811 || TREE_CODE (op1) == FIXED_CST)
812 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
814 if (op0 != 0 && TREE_CONSTANT (op0))
815 *conp = op0, op0 = 0;
816 else if (op1 != 0 && TREE_CONSTANT (op1))
817 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
819 /* If we haven't dealt with either operand, this is not a case we can
820 decompose. Otherwise, VAR is either of the ones remaining, if any. */
821 if (op0 != 0 && op1 != 0)
822 var = in;
823 else if (op0 != 0)
824 var = op0;
825 else
826 var = op1, neg_var_p = neg1_p;
828 /* Now do any needed negations. */
829 if (neg_litp_p)
830 *minus_litp = *litp, *litp = 0;
831 if (neg_conp_p)
832 *conp = negate_expr (*conp);
833 if (neg_var_p && var)
835 /* Convert to TYPE before negating. */
836 var = fold_convert_loc (loc, type, var);
837 var = negate_expr (var);
840 else if (TREE_CODE (in) == BIT_NOT_EXPR
841 && code == PLUS_EXPR)
843 /* -X - 1 is folded to ~X, undo that here. */
844 *minus_litp = build_one_cst (TREE_TYPE (in));
845 var = negate_expr (TREE_OPERAND (in, 0));
847 else if (TREE_CONSTANT (in))
848 *conp = in;
849 else
850 var = in;
852 if (negate_p)
854 if (*litp)
855 *minus_litp = *litp, *litp = 0;
856 else if (*minus_litp)
857 *litp = *minus_litp, *minus_litp = 0;
858 *conp = negate_expr (*conp);
859 if (var)
861 /* Convert to TYPE before negating. */
862 var = fold_convert_loc (loc, type, var);
863 var = negate_expr (var);
867 return var;
870 /* Re-associate trees split by the above function. T1 and T2 are
871 either expressions to associate or null. Return the new
872 expression, if any. LOC is the location of the new expression. If
873 we build an operation, do it in TYPE and with CODE. */
875 static tree
876 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
878 if (t1 == 0)
879 return t2;
880 else if (t2 == 0)
881 return t1;
883 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
884 try to fold this since we will have infinite recursion. But do
885 deal with any NEGATE_EXPRs. */
886 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
887 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
889 if (code == PLUS_EXPR)
891 if (TREE_CODE (t1) == NEGATE_EXPR)
892 return build2_loc (loc, MINUS_EXPR, type,
893 fold_convert_loc (loc, type, t2),
894 fold_convert_loc (loc, type,
895 TREE_OPERAND (t1, 0)));
896 else if (TREE_CODE (t2) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t1),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t2, 0)));
901 else if (integer_zerop (t2))
902 return fold_convert_loc (loc, type, t1);
904 else if (code == MINUS_EXPR)
906 if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
910 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
911 fold_convert_loc (loc, type, t2));
914 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
915 fold_convert_loc (loc, type, t2));
918 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
919 for use in int_const_binop, size_binop and size_diffop. */
921 static bool
922 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
924 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
925 return false;
926 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
927 return false;
929 switch (code)
931 case LSHIFT_EXPR:
932 case RSHIFT_EXPR:
933 case LROTATE_EXPR:
934 case RROTATE_EXPR:
935 return true;
937 default:
938 break;
941 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
942 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
943 && TYPE_MODE (type1) == TYPE_MODE (type2);
947 /* Combine two integer constants ARG1 and ARG2 under operation CODE
948 to produce a new constant. Return NULL_TREE if we don't know how
949 to evaluate CODE at compile-time. */
951 static tree
952 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
953 int overflowable)
955 wide_int res;
956 tree t;
957 tree type = TREE_TYPE (arg1);
958 signop sign = TYPE_SIGN (type);
959 bool overflow = false;
961 wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
963 switch (code)
965 case BIT_IOR_EXPR:
966 res = wi::bit_or (arg1, arg2);
967 break;
969 case BIT_XOR_EXPR:
970 res = wi::bit_xor (arg1, arg2);
971 break;
973 case BIT_AND_EXPR:
974 res = wi::bit_and (arg1, arg2);
975 break;
977 case RSHIFT_EXPR:
978 case LSHIFT_EXPR:
979 if (wi::neg_p (arg2))
981 arg2 = -arg2;
982 if (code == RSHIFT_EXPR)
983 code = LSHIFT_EXPR;
984 else
985 code = RSHIFT_EXPR;
988 if (code == RSHIFT_EXPR)
989 /* It's unclear from the C standard whether shifts can overflow.
990 The following code ignores overflow; perhaps a C standard
991 interpretation ruling is needed. */
992 res = wi::rshift (arg1, arg2, sign);
993 else
994 res = wi::lshift (arg1, arg2);
995 break;
997 case RROTATE_EXPR:
998 case LROTATE_EXPR:
999 if (wi::neg_p (arg2))
1001 arg2 = -arg2;
1002 if (code == RROTATE_EXPR)
1003 code = LROTATE_EXPR;
1004 else
1005 code = RROTATE_EXPR;
1008 if (code == RROTATE_EXPR)
1009 res = wi::rrotate (arg1, arg2);
1010 else
1011 res = wi::lrotate (arg1, arg2);
1012 break;
1014 case PLUS_EXPR:
1015 res = wi::add (arg1, arg2, sign, &overflow);
1016 break;
1018 case MINUS_EXPR:
1019 res = wi::sub (arg1, arg2, sign, &overflow);
1020 break;
1022 case MULT_EXPR:
1023 res = wi::mul (arg1, arg2, sign, &overflow);
1024 break;
1026 case MULT_HIGHPART_EXPR:
1027 res = wi::mul_high (arg1, arg2, sign);
1028 break;
1030 case TRUNC_DIV_EXPR:
1031 case EXACT_DIV_EXPR:
1032 if (arg2 == 0)
1033 return NULL_TREE;
1034 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1035 break;
1037 case FLOOR_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_floor (arg1, arg2, sign, &overflow);
1041 break;
1043 case CEIL_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1047 break;
1049 case ROUND_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_round (arg1, arg2, sign, &overflow);
1053 break;
1055 case TRUNC_MOD_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1059 break;
1061 case FLOOR_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1065 break;
1067 case CEIL_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1071 break;
1073 case ROUND_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_round (arg1, arg2, sign, &overflow);
1077 break;
1079 case MIN_EXPR:
1080 res = wi::min (arg1, arg2, sign);
1081 break;
1083 case MAX_EXPR:
1084 res = wi::max (arg1, arg2, sign);
1085 break;
1087 default:
1088 return NULL_TREE;
1091 t = force_fit_type (type, res, overflowable,
1092 (((sign == SIGNED || overflowable == -1)
1093 && overflow)
1094 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1096 return t;
1099 tree
1100 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1102 return int_const_binop_1 (code, arg1, arg2, 1);
1105 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1106 constant. We assume ARG1 and ARG2 have the same data type, or at least
1107 are the same kind of constant and the same machine mode. Return zero if
1108 combining the constants is not allowed in the current operating mode. */
1110 static tree
1111 const_binop (enum tree_code code, tree arg1, tree arg2)
1113 /* Sanity check for the recursive cases. */
1114 if (!arg1 || !arg2)
1115 return NULL_TREE;
1117 STRIP_NOPS (arg1);
1118 STRIP_NOPS (arg2);
1120 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1122 if (code == POINTER_PLUS_EXPR)
1123 return int_const_binop (PLUS_EXPR,
1124 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1126 return int_const_binop (code, arg1, arg2);
1129 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1131 machine_mode mode;
1132 REAL_VALUE_TYPE d1;
1133 REAL_VALUE_TYPE d2;
1134 REAL_VALUE_TYPE value;
1135 REAL_VALUE_TYPE result;
1136 bool inexact;
1137 tree t, type;
1139 /* The following codes are handled by real_arithmetic. */
1140 switch (code)
1142 case PLUS_EXPR:
1143 case MINUS_EXPR:
1144 case MULT_EXPR:
1145 case RDIV_EXPR:
1146 case MIN_EXPR:
1147 case MAX_EXPR:
1148 break;
1150 default:
1151 return NULL_TREE;
1154 d1 = TREE_REAL_CST (arg1);
1155 d2 = TREE_REAL_CST (arg2);
1157 type = TREE_TYPE (arg1);
1158 mode = TYPE_MODE (type);
1160 /* Don't perform operation if we honor signaling NaNs and
1161 either operand is a signaling NaN. */
1162 if (HONOR_SNANS (mode)
1163 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1164 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1165 return NULL_TREE;
1167 /* Don't perform operation if it would raise a division
1168 by zero exception. */
1169 if (code == RDIV_EXPR
1170 && real_equal (&d2, &dconst0)
1171 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1172 return NULL_TREE;
1174 /* If either operand is a NaN, just return it. Otherwise, set up
1175 for floating-point trap; we return an overflow. */
1176 if (REAL_VALUE_ISNAN (d1))
1178 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1179 is off. */
1180 d1.signalling = 0;
1181 t = build_real (type, d1);
1182 return t;
1184 else if (REAL_VALUE_ISNAN (d2))
1186 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1187 is off. */
1188 d2.signalling = 0;
1189 t = build_real (type, d2);
1190 return t;
1193 inexact = real_arithmetic (&value, code, &d1, &d2);
1194 real_convert (&result, mode, &value);
1196 /* Don't constant fold this floating point operation if
1197 the result has overflowed and flag_trapping_math. */
1198 if (flag_trapping_math
1199 && MODE_HAS_INFINITIES (mode)
1200 && REAL_VALUE_ISINF (result)
1201 && !REAL_VALUE_ISINF (d1)
1202 && !REAL_VALUE_ISINF (d2))
1203 return NULL_TREE;
1205 /* Don't constant fold this floating point operation if the
1206 result may dependent upon the run-time rounding mode and
1207 flag_rounding_math is set, or if GCC's software emulation
1208 is unable to accurately represent the result. */
1209 if ((flag_rounding_math
1210 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1211 && (inexact || !real_identical (&result, &value)))
1212 return NULL_TREE;
1214 t = build_real (type, result);
1216 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1217 return t;
1220 if (TREE_CODE (arg1) == FIXED_CST)
1222 FIXED_VALUE_TYPE f1;
1223 FIXED_VALUE_TYPE f2;
1224 FIXED_VALUE_TYPE result;
1225 tree t, type;
1226 int sat_p;
1227 bool overflow_p;
1229 /* The following codes are handled by fixed_arithmetic. */
1230 switch (code)
1232 case PLUS_EXPR:
1233 case MINUS_EXPR:
1234 case MULT_EXPR:
1235 case TRUNC_DIV_EXPR:
1236 if (TREE_CODE (arg2) != FIXED_CST)
1237 return NULL_TREE;
1238 f2 = TREE_FIXED_CST (arg2);
1239 break;
1241 case LSHIFT_EXPR:
1242 case RSHIFT_EXPR:
1244 if (TREE_CODE (arg2) != INTEGER_CST)
1245 return NULL_TREE;
1246 wide_int w2 = arg2;
1247 f2.data.high = w2.elt (1);
1248 f2.data.low = w2.elt (0);
1249 f2.mode = SImode;
1251 break;
1253 default:
1254 return NULL_TREE;
1257 f1 = TREE_FIXED_CST (arg1);
1258 type = TREE_TYPE (arg1);
1259 sat_p = TYPE_SATURATING (type);
1260 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1261 t = build_fixed (type, result);
1262 /* Propagate overflow flags. */
1263 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1264 TREE_OVERFLOW (t) = 1;
1265 return t;
1268 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1270 tree type = TREE_TYPE (arg1);
1271 tree r1 = TREE_REALPART (arg1);
1272 tree i1 = TREE_IMAGPART (arg1);
1273 tree r2 = TREE_REALPART (arg2);
1274 tree i2 = TREE_IMAGPART (arg2);
1275 tree real, imag;
1277 switch (code)
1279 case PLUS_EXPR:
1280 case MINUS_EXPR:
1281 real = const_binop (code, r1, r2);
1282 imag = const_binop (code, i1, i2);
1283 break;
1285 case MULT_EXPR:
1286 if (COMPLEX_FLOAT_TYPE_P (type))
1287 return do_mpc_arg2 (arg1, arg2, type,
1288 /* do_nonfinite= */ folding_initializer,
1289 mpc_mul);
1291 real = const_binop (MINUS_EXPR,
1292 const_binop (MULT_EXPR, r1, r2),
1293 const_binop (MULT_EXPR, i1, i2));
1294 imag = const_binop (PLUS_EXPR,
1295 const_binop (MULT_EXPR, r1, i2),
1296 const_binop (MULT_EXPR, i1, r2));
1297 break;
1299 case RDIV_EXPR:
1300 if (COMPLEX_FLOAT_TYPE_P (type))
1301 return do_mpc_arg2 (arg1, arg2, type,
1302 /* do_nonfinite= */ folding_initializer,
1303 mpc_div);
1304 /* Fallthru ... */
1305 case TRUNC_DIV_EXPR:
1306 case CEIL_DIV_EXPR:
1307 case FLOOR_DIV_EXPR:
1308 case ROUND_DIV_EXPR:
1309 if (flag_complex_method == 0)
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_straight().
1314 Expand complex division to scalars, straightforward algorithm.
1315 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1316 t = br*br + bi*bi
1318 tree magsquared
1319 = const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR, r2, r2),
1321 const_binop (MULT_EXPR, i2, i2));
1322 tree t1
1323 = const_binop (PLUS_EXPR,
1324 const_binop (MULT_EXPR, r1, r2),
1325 const_binop (MULT_EXPR, i1, i2));
1326 tree t2
1327 = const_binop (MINUS_EXPR,
1328 const_binop (MULT_EXPR, i1, r2),
1329 const_binop (MULT_EXPR, r1, i2));
1331 real = const_binop (code, t1, magsquared);
1332 imag = const_binop (code, t2, magsquared);
1334 else
1336 /* Keep this algorithm in sync with
1337 tree-complex.c:expand_complex_div_wide().
1339 Expand complex division to scalars, modified algorithm to minimize
1340 overflow with wide input ranges. */
1341 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1342 fold_abs_const (r2, TREE_TYPE (type)),
1343 fold_abs_const (i2, TREE_TYPE (type)));
1345 if (integer_nonzerop (compare))
1347 /* In the TRUE branch, we compute
1348 ratio = br/bi;
1349 div = (br * ratio) + bi;
1350 tr = (ar * ratio) + ai;
1351 ti = (ai * ratio) - ar;
1352 tr = tr / div;
1353 ti = ti / div; */
1354 tree ratio = const_binop (code, r2, i2);
1355 tree div = const_binop (PLUS_EXPR, i2,
1356 const_binop (MULT_EXPR, r2, ratio));
1357 real = const_binop (MULT_EXPR, r1, ratio);
1358 real = const_binop (PLUS_EXPR, real, i1);
1359 real = const_binop (code, real, div);
1361 imag = const_binop (MULT_EXPR, i1, ratio);
1362 imag = const_binop (MINUS_EXPR, imag, r1);
1363 imag = const_binop (code, imag, div);
1365 else
1367 /* In the FALSE branch, we compute
1368 ratio = d/c;
1369 divisor = (d * ratio) + c;
1370 tr = (b * ratio) + a;
1371 ti = b - (a * ratio);
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, i2, r2);
1375 tree div = const_binop (PLUS_EXPR, r2,
1376 const_binop (MULT_EXPR, i2, ratio));
1378 real = const_binop (MULT_EXPR, i1, ratio);
1379 real = const_binop (PLUS_EXPR, real, r1);
1380 real = const_binop (code, real, div);
1382 imag = const_binop (MULT_EXPR, r1, ratio);
1383 imag = const_binop (MINUS_EXPR, i1, imag);
1384 imag = const_binop (code, imag, div);
1387 break;
1389 default:
1390 return NULL_TREE;
1393 if (real && imag)
1394 return build_complex (type, real, imag);
1397 if (TREE_CODE (arg1) == VECTOR_CST
1398 && TREE_CODE (arg2) == VECTOR_CST)
1400 tree type = TREE_TYPE (arg1);
1401 int count = TYPE_VECTOR_SUBPARTS (type), i;
1402 tree *elts = XALLOCAVEC (tree, count);
1404 for (i = 0; i < count; i++)
1406 tree elem1 = VECTOR_CST_ELT (arg1, i);
1407 tree elem2 = VECTOR_CST_ELT (arg2, i);
1409 elts[i] = const_binop (code, elem1, elem2);
1411 /* It is possible that const_binop cannot handle the given
1412 code and return NULL_TREE */
1413 if (elts[i] == NULL_TREE)
1414 return NULL_TREE;
1417 return build_vector (type, elts);
1420 /* Shifts allow a scalar offset for a vector. */
1421 if (TREE_CODE (arg1) == VECTOR_CST
1422 && TREE_CODE (arg2) == INTEGER_CST)
1424 tree type = TREE_TYPE (arg1);
1425 int count = TYPE_VECTOR_SUBPARTS (type), i;
1426 tree *elts = XALLOCAVEC (tree, count);
1428 for (i = 0; i < count; i++)
1430 tree elem1 = VECTOR_CST_ELT (arg1, i);
1432 elts[i] = const_binop (code, elem1, arg2);
1434 /* It is possible that const_binop cannot handle the given
1435 code and return NULL_TREE. */
1436 if (elts[i] == NULL_TREE)
1437 return NULL_TREE;
1440 return build_vector (type, elts);
1442 return NULL_TREE;
1445 /* Overload that adds a TYPE parameter to be able to dispatch
1446 to fold_relational_const. */
1448 tree
1449 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1451 if (TREE_CODE_CLASS (code) == tcc_comparison)
1452 return fold_relational_const (code, type, arg1, arg2);
1454 /* ??? Until we make the const_binop worker take the type of the
1455 result as argument put those cases that need it here. */
1456 switch (code)
1458 case COMPLEX_EXPR:
1459 if ((TREE_CODE (arg1) == REAL_CST
1460 && TREE_CODE (arg2) == REAL_CST)
1461 || (TREE_CODE (arg1) == INTEGER_CST
1462 && TREE_CODE (arg2) == INTEGER_CST))
1463 return build_complex (type, arg1, arg2);
1464 return NULL_TREE;
1466 case VEC_PACK_TRUNC_EXPR:
1467 case VEC_PACK_FIX_TRUNC_EXPR:
1469 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1470 tree *elts;
1472 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1473 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1474 if (TREE_CODE (arg1) != VECTOR_CST
1475 || TREE_CODE (arg2) != VECTOR_CST)
1476 return NULL_TREE;
1478 elts = XALLOCAVEC (tree, nelts);
1479 if (!vec_cst_ctor_to_array (arg1, elts)
1480 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1481 return NULL_TREE;
1483 for (i = 0; i < nelts; i++)
1485 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1486 ? NOP_EXPR : FIX_TRUNC_EXPR,
1487 TREE_TYPE (type), elts[i]);
1488 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1489 return NULL_TREE;
1492 return build_vector (type, elts);
1495 case VEC_WIDEN_MULT_LO_EXPR:
1496 case VEC_WIDEN_MULT_HI_EXPR:
1497 case VEC_WIDEN_MULT_EVEN_EXPR:
1498 case VEC_WIDEN_MULT_ODD_EXPR:
1500 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1501 unsigned int out, ofs, scale;
1502 tree *elts;
1504 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1505 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1506 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1507 return NULL_TREE;
1509 elts = XALLOCAVEC (tree, nelts * 4);
1510 if (!vec_cst_ctor_to_array (arg1, elts)
1511 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1512 return NULL_TREE;
1514 if (code == VEC_WIDEN_MULT_LO_EXPR)
1515 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1516 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1517 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1518 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1519 scale = 1, ofs = 0;
1520 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1521 scale = 1, ofs = 1;
1523 for (out = 0; out < nelts; out++)
1525 unsigned int in1 = (out << scale) + ofs;
1526 unsigned int in2 = in1 + nelts * 2;
1527 tree t1, t2;
1529 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1530 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1532 if (t1 == NULL_TREE || t2 == NULL_TREE)
1533 return NULL_TREE;
1534 elts[out] = const_binop (MULT_EXPR, t1, t2);
1535 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1536 return NULL_TREE;
1539 return build_vector (type, elts);
1542 default:;
1545 if (TREE_CODE_CLASS (code) != tcc_binary)
1546 return NULL_TREE;
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type)
1550 == TYPE_SATURATING (TREE_TYPE (arg1)));
1552 return const_binop (code, arg1, arg2);
1555 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1556 Return zero if computing the constants is not possible. */
1558 tree
1559 const_unop (enum tree_code code, tree type, tree arg0)
1561 /* Don't perform the operation, other than NEGATE and ABS, if
1562 flag_signaling_nans is on and the operand is a signaling NaN. */
1563 if (TREE_CODE (arg0) == REAL_CST
1564 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1565 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1566 && code != NEGATE_EXPR
1567 && code != ABS_EXPR)
1568 return NULL_TREE;
1570 switch (code)
1572 CASE_CONVERT:
1573 case FLOAT_EXPR:
1574 case FIX_TRUNC_EXPR:
1575 case FIXED_CONVERT_EXPR:
1576 return fold_convert_const (code, type, arg0);
1578 case ADDR_SPACE_CONVERT_EXPR:
1579 /* If the source address is 0, and the source address space
1580 cannot have a valid object at 0, fold to dest type null. */
1581 if (integer_zerop (arg0)
1582 && !(targetm.addr_space.zero_address_valid
1583 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1584 return fold_convert_const (code, type, arg0);
1585 break;
1587 case VIEW_CONVERT_EXPR:
1588 return fold_view_convert_expr (type, arg0);
1590 case NEGATE_EXPR:
1592 /* Can't call fold_negate_const directly here as that doesn't
1593 handle all cases and we might not be able to negate some
1594 constants. */
1595 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1596 if (tem && CONSTANT_CLASS_P (tem))
1597 return tem;
1598 break;
1601 case ABS_EXPR:
1602 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1603 return fold_abs_const (arg0, type);
1604 break;
1606 case CONJ_EXPR:
1607 if (TREE_CODE (arg0) == COMPLEX_CST)
1609 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1610 TREE_TYPE (type));
1611 return build_complex (type, TREE_REALPART (arg0), ipart);
1613 break;
1615 case BIT_NOT_EXPR:
1616 if (TREE_CODE (arg0) == INTEGER_CST)
1617 return fold_not_const (arg0, type);
1618 /* Perform BIT_NOT_EXPR on each element individually. */
1619 else if (TREE_CODE (arg0) == VECTOR_CST)
1621 tree *elements;
1622 tree elem;
1623 unsigned count = VECTOR_CST_NELTS (arg0), i;
1625 elements = XALLOCAVEC (tree, count);
1626 for (i = 0; i < count; i++)
1628 elem = VECTOR_CST_ELT (arg0, i);
1629 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1630 if (elem == NULL_TREE)
1631 break;
1632 elements[i] = elem;
1634 if (i == count)
1635 return build_vector (type, elements);
1637 break;
1639 case TRUTH_NOT_EXPR:
1640 if (TREE_CODE (arg0) == INTEGER_CST)
1641 return constant_boolean_node (integer_zerop (arg0), type);
1642 break;
1644 case REALPART_EXPR:
1645 if (TREE_CODE (arg0) == COMPLEX_CST)
1646 return fold_convert (type, TREE_REALPART (arg0));
1647 break;
1649 case IMAGPART_EXPR:
1650 if (TREE_CODE (arg0) == COMPLEX_CST)
1651 return fold_convert (type, TREE_IMAGPART (arg0));
1652 break;
1654 case VEC_UNPACK_LO_EXPR:
1655 case VEC_UNPACK_HI_EXPR:
1656 case VEC_UNPACK_FLOAT_LO_EXPR:
1657 case VEC_UNPACK_FLOAT_HI_EXPR:
1659 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1660 tree *elts;
1661 enum tree_code subcode;
1663 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1664 if (TREE_CODE (arg0) != VECTOR_CST)
1665 return NULL_TREE;
1667 elts = XALLOCAVEC (tree, nelts * 2);
1668 if (!vec_cst_ctor_to_array (arg0, elts))
1669 return NULL_TREE;
1671 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1672 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1673 elts += nelts;
1675 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1676 subcode = NOP_EXPR;
1677 else
1678 subcode = FLOAT_EXPR;
1680 for (i = 0; i < nelts; i++)
1682 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1683 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1684 return NULL_TREE;
1687 return build_vector (type, elts);
1690 case REDUC_MIN_EXPR:
1691 case REDUC_MAX_EXPR:
1692 case REDUC_PLUS_EXPR:
1694 unsigned int nelts, i;
1695 tree *elts;
1696 enum tree_code subcode;
1698 if (TREE_CODE (arg0) != VECTOR_CST)
1699 return NULL_TREE;
1700 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1702 elts = XALLOCAVEC (tree, nelts);
1703 if (!vec_cst_ctor_to_array (arg0, elts))
1704 return NULL_TREE;
1706 switch (code)
1708 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1709 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1710 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1711 default: gcc_unreachable ();
1714 for (i = 1; i < nelts; i++)
1716 elts[0] = const_binop (subcode, elts[0], elts[i]);
1717 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1718 return NULL_TREE;
1721 return elts[0];
1724 default:
1725 break;
1728 return NULL_TREE;
1731 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1732 indicates which particular sizetype to create. */
1734 tree
1735 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1737 return build_int_cst (sizetype_tab[(int) kind], number);
1740 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1741 is a tree code. The type of the result is taken from the operands.
1742 Both must be equivalent integer types, ala int_binop_types_match_p.
1743 If the operands are constant, so is the result. */
1745 tree
1746 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1748 tree type = TREE_TYPE (arg0);
1750 if (arg0 == error_mark_node || arg1 == error_mark_node)
1751 return error_mark_node;
1753 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1754 TREE_TYPE (arg1)));
1756 /* Handle the special case of two integer constants faster. */
1757 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1759 /* And some specific cases even faster than that. */
1760 if (code == PLUS_EXPR)
1762 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1763 return arg1;
1764 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1765 return arg0;
1767 else if (code == MINUS_EXPR)
1769 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1770 return arg0;
1772 else if (code == MULT_EXPR)
1774 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1775 return arg1;
1778 /* Handle general case of two integer constants. For sizetype
1779 constant calculations we always want to know about overflow,
1780 even in the unsigned case. */
1781 return int_const_binop_1 (code, arg0, arg1, -1);
1784 return fold_build2_loc (loc, code, type, arg0, arg1);
1787 /* Given two values, either both of sizetype or both of bitsizetype,
1788 compute the difference between the two values. Return the value
1789 in signed type corresponding to the type of the operands. */
1791 tree
1792 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1794 tree type = TREE_TYPE (arg0);
1795 tree ctype;
1797 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1798 TREE_TYPE (arg1)));
1800 /* If the type is already signed, just do the simple thing. */
1801 if (!TYPE_UNSIGNED (type))
1802 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1804 if (type == sizetype)
1805 ctype = ssizetype;
1806 else if (type == bitsizetype)
1807 ctype = sbitsizetype;
1808 else
1809 ctype = signed_type_for (type);
1811 /* If either operand is not a constant, do the conversions to the signed
1812 type and subtract. The hardware will do the right thing with any
1813 overflow in the subtraction. */
1814 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1815 return size_binop_loc (loc, MINUS_EXPR,
1816 fold_convert_loc (loc, ctype, arg0),
1817 fold_convert_loc (loc, ctype, arg1));
1819 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1820 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1821 overflow) and negate (which can't either). Special-case a result
1822 of zero while we're here. */
1823 if (tree_int_cst_equal (arg0, arg1))
1824 return build_int_cst (ctype, 0);
1825 else if (tree_int_cst_lt (arg1, arg0))
1826 return fold_convert_loc (loc, ctype,
1827 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1828 else
1829 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1830 fold_convert_loc (loc, ctype,
1831 size_binop_loc (loc,
1832 MINUS_EXPR,
1833 arg1, arg0)));
1836 /* A subroutine of fold_convert_const handling conversions of an
1837 INTEGER_CST to another integer type. */
1839 static tree
1840 fold_convert_const_int_from_int (tree type, const_tree arg1)
1842 /* Given an integer constant, make new constant with new type,
1843 appropriately sign-extended or truncated. Use widest_int
1844 so that any extension is done according ARG1's type. */
1845 return force_fit_type (type, wi::to_widest (arg1),
1846 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1847 TREE_OVERFLOW (arg1));
1850 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1851 to an integer type. */
1853 static tree
1854 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1856 bool overflow = false;
1857 tree t;
1859 /* The following code implements the floating point to integer
1860 conversion rules required by the Java Language Specification,
1861 that IEEE NaNs are mapped to zero and values that overflow
1862 the target precision saturate, i.e. values greater than
1863 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1864 are mapped to INT_MIN. These semantics are allowed by the
1865 C and C++ standards that simply state that the behavior of
1866 FP-to-integer conversion is unspecified upon overflow. */
1868 wide_int val;
1869 REAL_VALUE_TYPE r;
1870 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1872 switch (code)
1874 case FIX_TRUNC_EXPR:
1875 real_trunc (&r, VOIDmode, &x);
1876 break;
1878 default:
1879 gcc_unreachable ();
1882 /* If R is NaN, return zero and show we have an overflow. */
1883 if (REAL_VALUE_ISNAN (r))
1885 overflow = true;
1886 val = wi::zero (TYPE_PRECISION (type));
1889 /* See if R is less than the lower bound or greater than the
1890 upper bound. */
1892 if (! overflow)
1894 tree lt = TYPE_MIN_VALUE (type);
1895 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1896 if (real_less (&r, &l))
1898 overflow = true;
1899 val = lt;
1903 if (! overflow)
1905 tree ut = TYPE_MAX_VALUE (type);
1906 if (ut)
1908 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1909 if (real_less (&u, &r))
1911 overflow = true;
1912 val = ut;
1917 if (! overflow)
1918 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1920 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1921 return t;
1924 /* A subroutine of fold_convert_const handling conversions of a
1925 FIXED_CST to an integer type. */
1927 static tree
1928 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1930 tree t;
1931 double_int temp, temp_trunc;
1932 unsigned int mode;
1934 /* Right shift FIXED_CST to temp by fbit. */
1935 temp = TREE_FIXED_CST (arg1).data;
1936 mode = TREE_FIXED_CST (arg1).mode;
1937 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1939 temp = temp.rshift (GET_MODE_FBIT (mode),
1940 HOST_BITS_PER_DOUBLE_INT,
1941 SIGNED_FIXED_POINT_MODE_P (mode));
1943 /* Left shift temp to temp_trunc by fbit. */
1944 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1945 HOST_BITS_PER_DOUBLE_INT,
1946 SIGNED_FIXED_POINT_MODE_P (mode));
1948 else
1950 temp = double_int_zero;
1951 temp_trunc = double_int_zero;
1954 /* If FIXED_CST is negative, we need to round the value toward 0.
1955 By checking if the fractional bits are not zero to add 1 to temp. */
1956 if (SIGNED_FIXED_POINT_MODE_P (mode)
1957 && temp_trunc.is_negative ()
1958 && TREE_FIXED_CST (arg1).data != temp_trunc)
1959 temp += double_int_one;
1961 /* Given a fixed-point constant, make new constant with new type,
1962 appropriately sign-extended or truncated. */
1963 t = force_fit_type (type, temp, -1,
1964 (temp.is_negative ()
1965 && (TYPE_UNSIGNED (type)
1966 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1967 | TREE_OVERFLOW (arg1));
1969 return t;
1972 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1973 to another floating point type. */
1975 static tree
1976 fold_convert_const_real_from_real (tree type, const_tree arg1)
1978 REAL_VALUE_TYPE value;
1979 tree t;
1981 /* Don't perform the operation if flag_signaling_nans is on
1982 and the operand is a signaling NaN. */
1983 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1984 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1985 return NULL_TREE;
1987 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1988 t = build_real (type, value);
1990 /* If converting an infinity or NAN to a representation that doesn't
1991 have one, set the overflow bit so that we can produce some kind of
1992 error message at the appropriate point if necessary. It's not the
1993 most user-friendly message, but it's better than nothing. */
1994 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1995 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1996 TREE_OVERFLOW (t) = 1;
1997 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1998 && !MODE_HAS_NANS (TYPE_MODE (type)))
1999 TREE_OVERFLOW (t) = 1;
2000 /* Regular overflow, conversion produced an infinity in a mode that
2001 can't represent them. */
2002 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2003 && REAL_VALUE_ISINF (value)
2004 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2005 TREE_OVERFLOW (t) = 1;
2006 else
2007 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2008 return t;
2011 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2012 to a floating point type. */
2014 static tree
2015 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2017 REAL_VALUE_TYPE value;
2018 tree t;
2020 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2021 t = build_real (type, value);
2023 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2024 return t;
2027 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2028 to another fixed-point type. */
2030 static tree
2031 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2033 FIXED_VALUE_TYPE value;
2034 tree t;
2035 bool overflow_p;
2037 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2038 TYPE_SATURATING (type));
2039 t = build_fixed (type, value);
2041 /* Propagate overflow flags. */
2042 if (overflow_p | TREE_OVERFLOW (arg1))
2043 TREE_OVERFLOW (t) = 1;
2044 return t;
2047 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2048 to a fixed-point type. */
2050 static tree
2051 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2053 FIXED_VALUE_TYPE value;
2054 tree t;
2055 bool overflow_p;
2056 double_int di;
2058 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2060 di.low = TREE_INT_CST_ELT (arg1, 0);
2061 if (TREE_INT_CST_NUNITS (arg1) == 1)
2062 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2063 else
2064 di.high = TREE_INT_CST_ELT (arg1, 1);
2066 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2067 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2068 TYPE_SATURATING (type));
2069 t = build_fixed (type, value);
2071 /* Propagate overflow flags. */
2072 if (overflow_p | TREE_OVERFLOW (arg1))
2073 TREE_OVERFLOW (t) = 1;
2074 return t;
2077 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2078 to a fixed-point type. */
2080 static tree
2081 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2083 FIXED_VALUE_TYPE value;
2084 tree t;
2085 bool overflow_p;
2087 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2088 &TREE_REAL_CST (arg1),
2089 TYPE_SATURATING (type));
2090 t = build_fixed (type, value);
2092 /* Propagate overflow flags. */
2093 if (overflow_p | TREE_OVERFLOW (arg1))
2094 TREE_OVERFLOW (t) = 1;
2095 return t;
2098 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2099 type TYPE. If no simplification can be done return NULL_TREE. */
2101 static tree
2102 fold_convert_const (enum tree_code code, tree type, tree arg1)
2104 if (TREE_TYPE (arg1) == type)
2105 return arg1;
2107 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2108 || TREE_CODE (type) == OFFSET_TYPE)
2110 if (TREE_CODE (arg1) == INTEGER_CST)
2111 return fold_convert_const_int_from_int (type, arg1);
2112 else if (TREE_CODE (arg1) == REAL_CST)
2113 return fold_convert_const_int_from_real (code, type, arg1);
2114 else if (TREE_CODE (arg1) == FIXED_CST)
2115 return fold_convert_const_int_from_fixed (type, arg1);
2117 else if (TREE_CODE (type) == REAL_TYPE)
2119 if (TREE_CODE (arg1) == INTEGER_CST)
2120 return build_real_from_int_cst (type, arg1);
2121 else if (TREE_CODE (arg1) == REAL_CST)
2122 return fold_convert_const_real_from_real (type, arg1);
2123 else if (TREE_CODE (arg1) == FIXED_CST)
2124 return fold_convert_const_real_from_fixed (type, arg1);
2126 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2128 if (TREE_CODE (arg1) == FIXED_CST)
2129 return fold_convert_const_fixed_from_fixed (type, arg1);
2130 else if (TREE_CODE (arg1) == INTEGER_CST)
2131 return fold_convert_const_fixed_from_int (type, arg1);
2132 else if (TREE_CODE (arg1) == REAL_CST)
2133 return fold_convert_const_fixed_from_real (type, arg1);
2135 else if (TREE_CODE (type) == VECTOR_TYPE)
2137 if (TREE_CODE (arg1) == VECTOR_CST
2138 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2140 int len = TYPE_VECTOR_SUBPARTS (type);
2141 tree elttype = TREE_TYPE (type);
2142 tree *v = XALLOCAVEC (tree, len);
2143 for (int i = 0; i < len; ++i)
2145 tree elt = VECTOR_CST_ELT (arg1, i);
2146 tree cvt = fold_convert_const (code, elttype, elt);
2147 if (cvt == NULL_TREE)
2148 return NULL_TREE;
2149 v[i] = cvt;
2151 return build_vector (type, v);
2154 return NULL_TREE;
2157 /* Construct a vector of zero elements of vector type TYPE. */
2159 static tree
2160 build_zero_vector (tree type)
2162 tree t;
2164 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2165 return build_vector_from_val (type, t);
2168 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2170 bool
2171 fold_convertible_p (const_tree type, const_tree arg)
2173 tree orig = TREE_TYPE (arg);
2175 if (type == orig)
2176 return true;
2178 if (TREE_CODE (arg) == ERROR_MARK
2179 || TREE_CODE (type) == ERROR_MARK
2180 || TREE_CODE (orig) == ERROR_MARK)
2181 return false;
2183 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2184 return true;
2186 switch (TREE_CODE (type))
2188 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2189 case POINTER_TYPE: case REFERENCE_TYPE:
2190 case OFFSET_TYPE:
2191 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2192 || TREE_CODE (orig) == OFFSET_TYPE);
2194 case REAL_TYPE:
2195 case FIXED_POINT_TYPE:
2196 case COMPLEX_TYPE:
2197 case VECTOR_TYPE:
2198 case VOID_TYPE:
2199 return TREE_CODE (type) == TREE_CODE (orig);
2201 default:
2202 return false;
2206 /* Convert expression ARG to type TYPE. Used by the middle-end for
2207 simple conversions in preference to calling the front-end's convert. */
2209 tree
2210 fold_convert_loc (location_t loc, tree type, tree arg)
2212 tree orig = TREE_TYPE (arg);
2213 tree tem;
2215 if (type == orig)
2216 return arg;
2218 if (TREE_CODE (arg) == ERROR_MARK
2219 || TREE_CODE (type) == ERROR_MARK
2220 || TREE_CODE (orig) == ERROR_MARK)
2221 return error_mark_node;
2223 switch (TREE_CODE (type))
2225 case POINTER_TYPE:
2226 case REFERENCE_TYPE:
2227 /* Handle conversions between pointers to different address spaces. */
2228 if (POINTER_TYPE_P (orig)
2229 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2230 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2231 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2232 /* fall through */
2234 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2235 case OFFSET_TYPE:
2236 if (TREE_CODE (arg) == INTEGER_CST)
2238 tem = fold_convert_const (NOP_EXPR, type, arg);
2239 if (tem != NULL_TREE)
2240 return tem;
2242 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2243 || TREE_CODE (orig) == OFFSET_TYPE)
2244 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2245 if (TREE_CODE (orig) == COMPLEX_TYPE)
2246 return fold_convert_loc (loc, type,
2247 fold_build1_loc (loc, REALPART_EXPR,
2248 TREE_TYPE (orig), arg));
2249 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2250 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2251 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2253 case REAL_TYPE:
2254 if (TREE_CODE (arg) == INTEGER_CST)
2256 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2257 if (tem != NULL_TREE)
2258 return tem;
2260 else if (TREE_CODE (arg) == REAL_CST)
2262 tem = fold_convert_const (NOP_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 return tem;
2266 else if (TREE_CODE (arg) == FIXED_CST)
2268 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2273 switch (TREE_CODE (orig))
2275 case INTEGER_TYPE:
2276 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2277 case POINTER_TYPE: case REFERENCE_TYPE:
2278 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2280 case REAL_TYPE:
2281 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2283 case FIXED_POINT_TYPE:
2284 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2286 case COMPLEX_TYPE:
2287 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2288 return fold_convert_loc (loc, type, tem);
2290 default:
2291 gcc_unreachable ();
2294 case FIXED_POINT_TYPE:
2295 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2296 || TREE_CODE (arg) == REAL_CST)
2298 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2299 if (tem != NULL_TREE)
2300 goto fold_convert_exit;
2303 switch (TREE_CODE (orig))
2305 case FIXED_POINT_TYPE:
2306 case INTEGER_TYPE:
2307 case ENUMERAL_TYPE:
2308 case BOOLEAN_TYPE:
2309 case REAL_TYPE:
2310 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2312 case COMPLEX_TYPE:
2313 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2314 return fold_convert_loc (loc, type, tem);
2316 default:
2317 gcc_unreachable ();
2320 case COMPLEX_TYPE:
2321 switch (TREE_CODE (orig))
2323 case INTEGER_TYPE:
2324 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2325 case POINTER_TYPE: case REFERENCE_TYPE:
2326 case REAL_TYPE:
2327 case FIXED_POINT_TYPE:
2328 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2329 fold_convert_loc (loc, TREE_TYPE (type), arg),
2330 fold_convert_loc (loc, TREE_TYPE (type),
2331 integer_zero_node));
2332 case COMPLEX_TYPE:
2334 tree rpart, ipart;
2336 if (TREE_CODE (arg) == COMPLEX_EXPR)
2338 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2339 TREE_OPERAND (arg, 0));
2340 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2341 TREE_OPERAND (arg, 1));
2342 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2345 arg = save_expr (arg);
2346 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2347 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2348 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2349 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2350 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2353 default:
2354 gcc_unreachable ();
2357 case VECTOR_TYPE:
2358 if (integer_zerop (arg))
2359 return build_zero_vector (type);
2360 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2361 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2362 || TREE_CODE (orig) == VECTOR_TYPE);
2363 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2365 case VOID_TYPE:
2366 tem = fold_ignored_result (arg);
2367 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2369 default:
2370 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2371 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2372 gcc_unreachable ();
2374 fold_convert_exit:
2375 protected_set_expr_location_unshare (tem, loc);
2376 return tem;
2379 /* Return false if expr can be assumed not to be an lvalue, true
2380 otherwise. */
2382 static bool
2383 maybe_lvalue_p (const_tree x)
2385 /* We only need to wrap lvalue tree codes. */
2386 switch (TREE_CODE (x))
2388 case VAR_DECL:
2389 case PARM_DECL:
2390 case RESULT_DECL:
2391 case LABEL_DECL:
2392 case FUNCTION_DECL:
2393 case SSA_NAME:
2395 case COMPONENT_REF:
2396 case MEM_REF:
2397 case INDIRECT_REF:
2398 case ARRAY_REF:
2399 case ARRAY_RANGE_REF:
2400 case BIT_FIELD_REF:
2401 case OBJ_TYPE_REF:
2403 case REALPART_EXPR:
2404 case IMAGPART_EXPR:
2405 case PREINCREMENT_EXPR:
2406 case PREDECREMENT_EXPR:
2407 case SAVE_EXPR:
2408 case TRY_CATCH_EXPR:
2409 case WITH_CLEANUP_EXPR:
2410 case COMPOUND_EXPR:
2411 case MODIFY_EXPR:
2412 case TARGET_EXPR:
2413 case COND_EXPR:
2414 case BIND_EXPR:
2415 break;
2417 default:
2418 /* Assume the worst for front-end tree codes. */
2419 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2420 break;
2421 return false;
2424 return true;
2427 /* Return an expr equal to X but certainly not valid as an lvalue. */
2429 tree
2430 non_lvalue_loc (location_t loc, tree x)
2432 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2433 us. */
2434 if (in_gimple_form)
2435 return x;
2437 if (! maybe_lvalue_p (x))
2438 return x;
2439 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2442 /* When pedantic, return an expr equal to X but certainly not valid as a
2443 pedantic lvalue. Otherwise, return X. */
2445 static tree
2446 pedantic_non_lvalue_loc (location_t loc, tree x)
2448 return protected_set_expr_location_unshare (x, loc);
2451 /* Given a tree comparison code, return the code that is the logical inverse.
2452 It is generally not safe to do this for floating-point comparisons, except
2453 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2454 ERROR_MARK in this case. */
2456 enum tree_code
2457 invert_tree_comparison (enum tree_code code, bool honor_nans)
2459 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2460 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2461 return ERROR_MARK;
2463 switch (code)
2465 case EQ_EXPR:
2466 return NE_EXPR;
2467 case NE_EXPR:
2468 return EQ_EXPR;
2469 case GT_EXPR:
2470 return honor_nans ? UNLE_EXPR : LE_EXPR;
2471 case GE_EXPR:
2472 return honor_nans ? UNLT_EXPR : LT_EXPR;
2473 case LT_EXPR:
2474 return honor_nans ? UNGE_EXPR : GE_EXPR;
2475 case LE_EXPR:
2476 return honor_nans ? UNGT_EXPR : GT_EXPR;
2477 case LTGT_EXPR:
2478 return UNEQ_EXPR;
2479 case UNEQ_EXPR:
2480 return LTGT_EXPR;
2481 case UNGT_EXPR:
2482 return LE_EXPR;
2483 case UNGE_EXPR:
2484 return LT_EXPR;
2485 case UNLT_EXPR:
2486 return GE_EXPR;
2487 case UNLE_EXPR:
2488 return GT_EXPR;
2489 case ORDERED_EXPR:
2490 return UNORDERED_EXPR;
2491 case UNORDERED_EXPR:
2492 return ORDERED_EXPR;
2493 default:
2494 gcc_unreachable ();
2498 /* Similar, but return the comparison that results if the operands are
2499 swapped. This is safe for floating-point. */
2501 enum tree_code
2502 swap_tree_comparison (enum tree_code code)
2504 switch (code)
2506 case EQ_EXPR:
2507 case NE_EXPR:
2508 case ORDERED_EXPR:
2509 case UNORDERED_EXPR:
2510 case LTGT_EXPR:
2511 case UNEQ_EXPR:
2512 return code;
2513 case GT_EXPR:
2514 return LT_EXPR;
2515 case GE_EXPR:
2516 return LE_EXPR;
2517 case LT_EXPR:
2518 return GT_EXPR;
2519 case LE_EXPR:
2520 return GE_EXPR;
2521 case UNGT_EXPR:
2522 return UNLT_EXPR;
2523 case UNGE_EXPR:
2524 return UNLE_EXPR;
2525 case UNLT_EXPR:
2526 return UNGT_EXPR;
2527 case UNLE_EXPR:
2528 return UNGE_EXPR;
2529 default:
2530 gcc_unreachable ();
2535 /* Convert a comparison tree code from an enum tree_code representation
2536 into a compcode bit-based encoding. This function is the inverse of
2537 compcode_to_comparison. */
2539 static enum comparison_code
2540 comparison_to_compcode (enum tree_code code)
2542 switch (code)
2544 case LT_EXPR:
2545 return COMPCODE_LT;
2546 case EQ_EXPR:
2547 return COMPCODE_EQ;
2548 case LE_EXPR:
2549 return COMPCODE_LE;
2550 case GT_EXPR:
2551 return COMPCODE_GT;
2552 case NE_EXPR:
2553 return COMPCODE_NE;
2554 case GE_EXPR:
2555 return COMPCODE_GE;
2556 case ORDERED_EXPR:
2557 return COMPCODE_ORD;
2558 case UNORDERED_EXPR:
2559 return COMPCODE_UNORD;
2560 case UNLT_EXPR:
2561 return COMPCODE_UNLT;
2562 case UNEQ_EXPR:
2563 return COMPCODE_UNEQ;
2564 case UNLE_EXPR:
2565 return COMPCODE_UNLE;
2566 case UNGT_EXPR:
2567 return COMPCODE_UNGT;
2568 case LTGT_EXPR:
2569 return COMPCODE_LTGT;
2570 case UNGE_EXPR:
2571 return COMPCODE_UNGE;
2572 default:
2573 gcc_unreachable ();
2577 /* Convert a compcode bit-based encoding of a comparison operator back
2578 to GCC's enum tree_code representation. This function is the
2579 inverse of comparison_to_compcode. */
2581 static enum tree_code
2582 compcode_to_comparison (enum comparison_code code)
2584 switch (code)
2586 case COMPCODE_LT:
2587 return LT_EXPR;
2588 case COMPCODE_EQ:
2589 return EQ_EXPR;
2590 case COMPCODE_LE:
2591 return LE_EXPR;
2592 case COMPCODE_GT:
2593 return GT_EXPR;
2594 case COMPCODE_NE:
2595 return NE_EXPR;
2596 case COMPCODE_GE:
2597 return GE_EXPR;
2598 case COMPCODE_ORD:
2599 return ORDERED_EXPR;
2600 case COMPCODE_UNORD:
2601 return UNORDERED_EXPR;
2602 case COMPCODE_UNLT:
2603 return UNLT_EXPR;
2604 case COMPCODE_UNEQ:
2605 return UNEQ_EXPR;
2606 case COMPCODE_UNLE:
2607 return UNLE_EXPR;
2608 case COMPCODE_UNGT:
2609 return UNGT_EXPR;
2610 case COMPCODE_LTGT:
2611 return LTGT_EXPR;
2612 case COMPCODE_UNGE:
2613 return UNGE_EXPR;
2614 default:
2615 gcc_unreachable ();
2619 /* Return a tree for the comparison which is the combination of
2620 doing the AND or OR (depending on CODE) of the two operations LCODE
2621 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2622 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2623 if this makes the transformation invalid. */
2625 tree
2626 combine_comparisons (location_t loc,
2627 enum tree_code code, enum tree_code lcode,
2628 enum tree_code rcode, tree truth_type,
2629 tree ll_arg, tree lr_arg)
2631 bool honor_nans = HONOR_NANS (ll_arg);
2632 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2633 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2634 int compcode;
2636 switch (code)
2638 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2639 compcode = lcompcode & rcompcode;
2640 break;
2642 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2643 compcode = lcompcode | rcompcode;
2644 break;
2646 default:
2647 return NULL_TREE;
2650 if (!honor_nans)
2652 /* Eliminate unordered comparisons, as well as LTGT and ORD
2653 which are not used unless the mode has NaNs. */
2654 compcode &= ~COMPCODE_UNORD;
2655 if (compcode == COMPCODE_LTGT)
2656 compcode = COMPCODE_NE;
2657 else if (compcode == COMPCODE_ORD)
2658 compcode = COMPCODE_TRUE;
2660 else if (flag_trapping_math)
2662 /* Check that the original operation and the optimized ones will trap
2663 under the same condition. */
2664 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2665 && (lcompcode != COMPCODE_EQ)
2666 && (lcompcode != COMPCODE_ORD);
2667 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2668 && (rcompcode != COMPCODE_EQ)
2669 && (rcompcode != COMPCODE_ORD);
2670 bool trap = (compcode & COMPCODE_UNORD) == 0
2671 && (compcode != COMPCODE_EQ)
2672 && (compcode != COMPCODE_ORD);
2674 /* In a short-circuited boolean expression the LHS might be
2675 such that the RHS, if evaluated, will never trap. For
2676 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2677 if neither x nor y is NaN. (This is a mixed blessing: for
2678 example, the expression above will never trap, hence
2679 optimizing it to x < y would be invalid). */
2680 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2681 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2682 rtrap = false;
2684 /* If the comparison was short-circuited, and only the RHS
2685 trapped, we may now generate a spurious trap. */
2686 if (rtrap && !ltrap
2687 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2688 return NULL_TREE;
2690 /* If we changed the conditions that cause a trap, we lose. */
2691 if ((ltrap || rtrap) != trap)
2692 return NULL_TREE;
2695 if (compcode == COMPCODE_TRUE)
2696 return constant_boolean_node (true, truth_type);
2697 else if (compcode == COMPCODE_FALSE)
2698 return constant_boolean_node (false, truth_type);
2699 else
2701 enum tree_code tcode;
2703 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2704 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2708 /* Return nonzero if two operands (typically of the same tree node)
2709 are necessarily equal. FLAGS modifies behavior as follows:
2711 If OEP_ONLY_CONST is set, only return nonzero for constants.
2712 This function tests whether the operands are indistinguishable;
2713 it does not test whether they are equal using C's == operation.
2714 The distinction is important for IEEE floating point, because
2715 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2716 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2718 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2719 even though it may hold multiple values during a function.
2720 This is because a GCC tree node guarantees that nothing else is
2721 executed between the evaluation of its "operands" (which may often
2722 be evaluated in arbitrary order). Hence if the operands themselves
2723 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2724 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2725 unset means assuming isochronic (or instantaneous) tree equivalence.
2726 Unless comparing arbitrary expression trees, such as from different
2727 statements, this flag can usually be left unset.
2729 If OEP_PURE_SAME is set, then pure functions with identical arguments
2730 are considered the same. It is used when the caller has other ways
2731 to ensure that global memory is unchanged in between.
2733 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2734 not values of expressions.
2736 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2737 any operand with side effect. This is unnecesarily conservative in the
2738 case we know that arg0 and arg1 are in disjoint code paths (such as in
2739 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2740 addresses with TREE_CONSTANT flag set so we know that &var == &var
2741 even if var is volatile. */
2744 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2746 /* When checking, verify at the outermost operand_equal_p call that
2747 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2748 hash value. */
2749 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2751 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2753 if (arg0 != arg1)
2755 inchash::hash hstate0 (0), hstate1 (0);
2756 inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2757 inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2758 hashval_t h0 = hstate0.end ();
2759 hashval_t h1 = hstate1.end ();
2760 gcc_assert (h0 == h1);
2762 return 1;
2764 else
2765 return 0;
2768 /* If either is ERROR_MARK, they aren't equal. */
2769 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2770 || TREE_TYPE (arg0) == error_mark_node
2771 || TREE_TYPE (arg1) == error_mark_node)
2772 return 0;
2774 /* Similar, if either does not have a type (like a released SSA name),
2775 they aren't equal. */
2776 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2777 return 0;
2779 /* We cannot consider pointers to different address space equal. */
2780 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2781 && POINTER_TYPE_P (TREE_TYPE (arg1))
2782 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2783 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2784 return 0;
2786 /* Check equality of integer constants before bailing out due to
2787 precision differences. */
2788 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2790 /* Address of INTEGER_CST is not defined; check that we did not forget
2791 to drop the OEP_ADDRESS_OF flags. */
2792 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2793 return tree_int_cst_equal (arg0, arg1);
2796 if (!(flags & OEP_ADDRESS_OF))
2798 /* If both types don't have the same signedness, then we can't consider
2799 them equal. We must check this before the STRIP_NOPS calls
2800 because they may change the signedness of the arguments. As pointers
2801 strictly don't have a signedness, require either two pointers or
2802 two non-pointers as well. */
2803 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2804 || POINTER_TYPE_P (TREE_TYPE (arg0))
2805 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2806 return 0;
2808 /* If both types don't have the same precision, then it is not safe
2809 to strip NOPs. */
2810 if (element_precision (TREE_TYPE (arg0))
2811 != element_precision (TREE_TYPE (arg1)))
2812 return 0;
2814 STRIP_NOPS (arg0);
2815 STRIP_NOPS (arg1);
2817 #if 0
2818 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2819 sanity check once the issue is solved. */
2820 else
2821 /* Addresses of conversions and SSA_NAMEs (and many other things)
2822 are not defined. Check that we did not forget to drop the
2823 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2824 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2825 && TREE_CODE (arg0) != SSA_NAME);
2826 #endif
2828 /* In case both args are comparisons but with different comparison
2829 code, try to swap the comparison operands of one arg to produce
2830 a match and compare that variant. */
2831 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2832 && COMPARISON_CLASS_P (arg0)
2833 && COMPARISON_CLASS_P (arg1))
2835 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2837 if (TREE_CODE (arg0) == swap_code)
2838 return operand_equal_p (TREE_OPERAND (arg0, 0),
2839 TREE_OPERAND (arg1, 1), flags)
2840 && operand_equal_p (TREE_OPERAND (arg0, 1),
2841 TREE_OPERAND (arg1, 0), flags);
2844 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2846 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2847 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2849 else if (flags & OEP_ADDRESS_OF)
2851 /* If we are interested in comparing addresses ignore
2852 MEM_REF wrappings of the base that can appear just for
2853 TBAA reasons. */
2854 if (TREE_CODE (arg0) == MEM_REF
2855 && DECL_P (arg1)
2856 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2857 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2858 && integer_zerop (TREE_OPERAND (arg0, 1)))
2859 return 1;
2860 else if (TREE_CODE (arg1) == MEM_REF
2861 && DECL_P (arg0)
2862 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2863 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2864 && integer_zerop (TREE_OPERAND (arg1, 1)))
2865 return 1;
2866 return 0;
2868 else
2869 return 0;
2872 /* When not checking adddresses, this is needed for conversions and for
2873 COMPONENT_REF. Might as well play it safe and always test this. */
2874 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2875 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2876 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2877 && !(flags & OEP_ADDRESS_OF)))
2878 return 0;
2880 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2881 We don't care about side effects in that case because the SAVE_EXPR
2882 takes care of that for us. In all other cases, two expressions are
2883 equal if they have no side effects. If we have two identical
2884 expressions with side effects that should be treated the same due
2885 to the only side effects being identical SAVE_EXPR's, that will
2886 be detected in the recursive calls below.
2887 If we are taking an invariant address of two identical objects
2888 they are necessarily equal as well. */
2889 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2890 && (TREE_CODE (arg0) == SAVE_EXPR
2891 || (flags & OEP_MATCH_SIDE_EFFECTS)
2892 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2893 return 1;
2895 /* Next handle constant cases, those for which we can return 1 even
2896 if ONLY_CONST is set. */
2897 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2898 switch (TREE_CODE (arg0))
2900 case INTEGER_CST:
2901 return tree_int_cst_equal (arg0, arg1);
2903 case FIXED_CST:
2904 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2905 TREE_FIXED_CST (arg1));
2907 case REAL_CST:
2908 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2909 return 1;
2912 if (!HONOR_SIGNED_ZEROS (arg0))
2914 /* If we do not distinguish between signed and unsigned zero,
2915 consider them equal. */
2916 if (real_zerop (arg0) && real_zerop (arg1))
2917 return 1;
2919 return 0;
2921 case VECTOR_CST:
2923 unsigned i;
2925 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2926 return 0;
2928 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2930 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2931 VECTOR_CST_ELT (arg1, i), flags))
2932 return 0;
2934 return 1;
2937 case COMPLEX_CST:
2938 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2939 flags)
2940 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2941 flags));
2943 case STRING_CST:
2944 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2945 && ! memcmp (TREE_STRING_POINTER (arg0),
2946 TREE_STRING_POINTER (arg1),
2947 TREE_STRING_LENGTH (arg0)));
2949 case ADDR_EXPR:
2950 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2951 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2952 flags | OEP_ADDRESS_OF
2953 | OEP_MATCH_SIDE_EFFECTS);
2954 case CONSTRUCTOR:
2955 /* In GIMPLE empty constructors are allowed in initializers of
2956 aggregates. */
2957 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2958 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2959 default:
2960 break;
2963 if (flags & OEP_ONLY_CONST)
2964 return 0;
2966 /* Define macros to test an operand from arg0 and arg1 for equality and a
2967 variant that allows null and views null as being different from any
2968 non-null value. In the latter case, if either is null, the both
2969 must be; otherwise, do the normal comparison. */
2970 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2971 TREE_OPERAND (arg1, N), flags)
2973 #define OP_SAME_WITH_NULL(N) \
2974 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2975 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2977 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2979 case tcc_unary:
2980 /* Two conversions are equal only if signedness and modes match. */
2981 switch (TREE_CODE (arg0))
2983 CASE_CONVERT:
2984 case FIX_TRUNC_EXPR:
2985 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2986 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2987 return 0;
2988 break;
2989 default:
2990 break;
2993 return OP_SAME (0);
2996 case tcc_comparison:
2997 case tcc_binary:
2998 if (OP_SAME (0) && OP_SAME (1))
2999 return 1;
3001 /* For commutative ops, allow the other order. */
3002 return (commutative_tree_code (TREE_CODE (arg0))
3003 && operand_equal_p (TREE_OPERAND (arg0, 0),
3004 TREE_OPERAND (arg1, 1), flags)
3005 && operand_equal_p (TREE_OPERAND (arg0, 1),
3006 TREE_OPERAND (arg1, 0), flags));
3008 case tcc_reference:
3009 /* If either of the pointer (or reference) expressions we are
3010 dereferencing contain a side effect, these cannot be equal,
3011 but their addresses can be. */
3012 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3013 && (TREE_SIDE_EFFECTS (arg0)
3014 || TREE_SIDE_EFFECTS (arg1)))
3015 return 0;
3017 switch (TREE_CODE (arg0))
3019 case INDIRECT_REF:
3020 if (!(flags & OEP_ADDRESS_OF)
3021 && (TYPE_ALIGN (TREE_TYPE (arg0))
3022 != TYPE_ALIGN (TREE_TYPE (arg1))))
3023 return 0;
3024 flags &= ~OEP_ADDRESS_OF;
3025 return OP_SAME (0);
3027 case IMAGPART_EXPR:
3028 /* Require the same offset. */
3029 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3030 TYPE_SIZE (TREE_TYPE (arg1)),
3031 flags & ~OEP_ADDRESS_OF))
3032 return 0;
3034 /* Fallthru. */
3035 case REALPART_EXPR:
3036 case VIEW_CONVERT_EXPR:
3037 return OP_SAME (0);
3039 case TARGET_MEM_REF:
3040 case MEM_REF:
3041 if (!(flags & OEP_ADDRESS_OF))
3043 /* Require equal access sizes */
3044 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3045 && (!TYPE_SIZE (TREE_TYPE (arg0))
3046 || !TYPE_SIZE (TREE_TYPE (arg1))
3047 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3048 TYPE_SIZE (TREE_TYPE (arg1)),
3049 flags)))
3050 return 0;
3051 /* Verify that access happens in similar types. */
3052 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3053 return 0;
3054 /* Verify that accesses are TBAA compatible. */
3055 if (!alias_ptr_types_compatible_p
3056 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3057 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3058 || (MR_DEPENDENCE_CLIQUE (arg0)
3059 != MR_DEPENDENCE_CLIQUE (arg1))
3060 || (MR_DEPENDENCE_BASE (arg0)
3061 != MR_DEPENDENCE_BASE (arg1)))
3062 return 0;
3063 /* Verify that alignment is compatible. */
3064 if (TYPE_ALIGN (TREE_TYPE (arg0))
3065 != TYPE_ALIGN (TREE_TYPE (arg1)))
3066 return 0;
3068 flags &= ~OEP_ADDRESS_OF;
3069 return (OP_SAME (0) && OP_SAME (1)
3070 /* TARGET_MEM_REF require equal extra operands. */
3071 && (TREE_CODE (arg0) != TARGET_MEM_REF
3072 || (OP_SAME_WITH_NULL (2)
3073 && OP_SAME_WITH_NULL (3)
3074 && OP_SAME_WITH_NULL (4))));
3076 case ARRAY_REF:
3077 case ARRAY_RANGE_REF:
3078 if (!OP_SAME (0))
3079 return 0;
3080 flags &= ~OEP_ADDRESS_OF;
3081 /* Compare the array index by value if it is constant first as we
3082 may have different types but same value here. */
3083 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3084 TREE_OPERAND (arg1, 1))
3085 || OP_SAME (1))
3086 && OP_SAME_WITH_NULL (2)
3087 && OP_SAME_WITH_NULL (3)
3088 /* Compare low bound and element size as with OEP_ADDRESS_OF
3089 we have to account for the offset of the ref. */
3090 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3091 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3092 || (operand_equal_p (array_ref_low_bound
3093 (CONST_CAST_TREE (arg0)),
3094 array_ref_low_bound
3095 (CONST_CAST_TREE (arg1)), flags)
3096 && operand_equal_p (array_ref_element_size
3097 (CONST_CAST_TREE (arg0)),
3098 array_ref_element_size
3099 (CONST_CAST_TREE (arg1)),
3100 flags))));
3102 case COMPONENT_REF:
3103 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3104 may be NULL when we're called to compare MEM_EXPRs. */
3105 if (!OP_SAME_WITH_NULL (0)
3106 || !OP_SAME (1))
3107 return 0;
3108 flags &= ~OEP_ADDRESS_OF;
3109 return OP_SAME_WITH_NULL (2);
3111 case BIT_FIELD_REF:
3112 if (!OP_SAME (0))
3113 return 0;
3114 flags &= ~OEP_ADDRESS_OF;
3115 return OP_SAME (1) && OP_SAME (2);
3117 default:
3118 return 0;
3121 case tcc_expression:
3122 switch (TREE_CODE (arg0))
3124 case ADDR_EXPR:
3125 /* Be sure we pass right ADDRESS_OF flag. */
3126 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3127 return operand_equal_p (TREE_OPERAND (arg0, 0),
3128 TREE_OPERAND (arg1, 0),
3129 flags | OEP_ADDRESS_OF);
3131 case TRUTH_NOT_EXPR:
3132 return OP_SAME (0);
3134 case TRUTH_ANDIF_EXPR:
3135 case TRUTH_ORIF_EXPR:
3136 return OP_SAME (0) && OP_SAME (1);
3138 case FMA_EXPR:
3139 case WIDEN_MULT_PLUS_EXPR:
3140 case WIDEN_MULT_MINUS_EXPR:
3141 if (!OP_SAME (2))
3142 return 0;
3143 /* The multiplcation operands are commutative. */
3144 /* FALLTHRU */
3146 case TRUTH_AND_EXPR:
3147 case TRUTH_OR_EXPR:
3148 case TRUTH_XOR_EXPR:
3149 if (OP_SAME (0) && OP_SAME (1))
3150 return 1;
3152 /* Otherwise take into account this is a commutative operation. */
3153 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3154 TREE_OPERAND (arg1, 1), flags)
3155 && operand_equal_p (TREE_OPERAND (arg0, 1),
3156 TREE_OPERAND (arg1, 0), flags));
3158 case COND_EXPR:
3159 if (! OP_SAME (1) || ! OP_SAME (2))
3160 return 0;
3161 flags &= ~OEP_ADDRESS_OF;
3162 return OP_SAME (0);
3164 case VEC_COND_EXPR:
3165 case DOT_PROD_EXPR:
3166 case BIT_INSERT_EXPR:
3167 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3169 default:
3170 return 0;
3173 case tcc_vl_exp:
3174 switch (TREE_CODE (arg0))
3176 case CALL_EXPR:
3177 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3178 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3179 /* If not both CALL_EXPRs are either internal or normal function
3180 functions, then they are not equal. */
3181 return 0;
3182 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3184 /* If the CALL_EXPRs call different internal functions, then they
3185 are not equal. */
3186 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3187 return 0;
3189 else
3191 /* If the CALL_EXPRs call different functions, then they are not
3192 equal. */
3193 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3194 flags))
3195 return 0;
3198 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3200 unsigned int cef = call_expr_flags (arg0);
3201 if (flags & OEP_PURE_SAME)
3202 cef &= ECF_CONST | ECF_PURE;
3203 else
3204 cef &= ECF_CONST;
3205 if (!cef)
3206 return 0;
3209 /* Now see if all the arguments are the same. */
3211 const_call_expr_arg_iterator iter0, iter1;
3212 const_tree a0, a1;
3213 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3214 a1 = first_const_call_expr_arg (arg1, &iter1);
3215 a0 && a1;
3216 a0 = next_const_call_expr_arg (&iter0),
3217 a1 = next_const_call_expr_arg (&iter1))
3218 if (! operand_equal_p (a0, a1, flags))
3219 return 0;
3221 /* If we get here and both argument lists are exhausted
3222 then the CALL_EXPRs are equal. */
3223 return ! (a0 || a1);
3225 default:
3226 return 0;
3229 case tcc_declaration:
3230 /* Consider __builtin_sqrt equal to sqrt. */
3231 return (TREE_CODE (arg0) == FUNCTION_DECL
3232 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3233 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3234 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3236 case tcc_exceptional:
3237 if (TREE_CODE (arg0) == CONSTRUCTOR)
3239 /* In GIMPLE constructors are used only to build vectors from
3240 elements. Individual elements in the constructor must be
3241 indexed in increasing order and form an initial sequence.
3243 We make no effort to compare constructors in generic.
3244 (see sem_variable::equals in ipa-icf which can do so for
3245 constants). */
3246 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3247 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3248 return 0;
3250 /* Be sure that vectors constructed have the same representation.
3251 We only tested element precision and modes to match.
3252 Vectors may be BLKmode and thus also check that the number of
3253 parts match. */
3254 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3255 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3256 return 0;
3258 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3259 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3260 unsigned int len = vec_safe_length (v0);
3262 if (len != vec_safe_length (v1))
3263 return 0;
3265 for (unsigned int i = 0; i < len; i++)
3267 constructor_elt *c0 = &(*v0)[i];
3268 constructor_elt *c1 = &(*v1)[i];
3270 if (!operand_equal_p (c0->value, c1->value, flags)
3271 /* In GIMPLE the indexes can be either NULL or matching i.
3272 Double check this so we won't get false
3273 positives for GENERIC. */
3274 || (c0->index
3275 && (TREE_CODE (c0->index) != INTEGER_CST
3276 || !compare_tree_int (c0->index, i)))
3277 || (c1->index
3278 && (TREE_CODE (c1->index) != INTEGER_CST
3279 || !compare_tree_int (c1->index, i))))
3280 return 0;
3282 return 1;
3284 return 0;
3286 default:
3287 return 0;
3290 #undef OP_SAME
3291 #undef OP_SAME_WITH_NULL
3294 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3295 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3297 When in doubt, return 0. */
3299 static int
3300 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3302 int unsignedp1, unsignedpo;
3303 tree primarg0, primarg1, primother;
3304 unsigned int correct_width;
3306 if (operand_equal_p (arg0, arg1, 0))
3307 return 1;
3309 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3310 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3311 return 0;
3313 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3314 and see if the inner values are the same. This removes any
3315 signedness comparison, which doesn't matter here. */
3316 primarg0 = arg0, primarg1 = arg1;
3317 STRIP_NOPS (primarg0);
3318 STRIP_NOPS (primarg1);
3319 if (operand_equal_p (primarg0, primarg1, 0))
3320 return 1;
3322 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3323 actual comparison operand, ARG0.
3325 First throw away any conversions to wider types
3326 already present in the operands. */
3328 primarg1 = get_narrower (arg1, &unsignedp1);
3329 primother = get_narrower (other, &unsignedpo);
3331 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3332 if (unsignedp1 == unsignedpo
3333 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3334 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3336 tree type = TREE_TYPE (arg0);
3338 /* Make sure shorter operand is extended the right way
3339 to match the longer operand. */
3340 primarg1 = fold_convert (signed_or_unsigned_type_for
3341 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3343 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3344 return 1;
3347 return 0;
3350 /* See if ARG is an expression that is either a comparison or is performing
3351 arithmetic on comparisons. The comparisons must only be comparing
3352 two different values, which will be stored in *CVAL1 and *CVAL2; if
3353 they are nonzero it means that some operands have already been found.
3354 No variables may be used anywhere else in the expression except in the
3355 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3356 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3358 If this is true, return 1. Otherwise, return zero. */
3360 static int
3361 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3363 enum tree_code code = TREE_CODE (arg);
3364 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3366 /* We can handle some of the tcc_expression cases here. */
3367 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3368 tclass = tcc_unary;
3369 else if (tclass == tcc_expression
3370 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3371 || code == COMPOUND_EXPR))
3372 tclass = tcc_binary;
3374 else if (tclass == tcc_expression && code == SAVE_EXPR
3375 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3377 /* If we've already found a CVAL1 or CVAL2, this expression is
3378 two complex to handle. */
3379 if (*cval1 || *cval2)
3380 return 0;
3382 tclass = tcc_unary;
3383 *save_p = 1;
3386 switch (tclass)
3388 case tcc_unary:
3389 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3391 case tcc_binary:
3392 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3393 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3394 cval1, cval2, save_p));
3396 case tcc_constant:
3397 return 1;
3399 case tcc_expression:
3400 if (code == COND_EXPR)
3401 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3402 cval1, cval2, save_p)
3403 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3404 cval1, cval2, save_p)
3405 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3406 cval1, cval2, save_p));
3407 return 0;
3409 case tcc_comparison:
3410 /* First see if we can handle the first operand, then the second. For
3411 the second operand, we know *CVAL1 can't be zero. It must be that
3412 one side of the comparison is each of the values; test for the
3413 case where this isn't true by failing if the two operands
3414 are the same. */
3416 if (operand_equal_p (TREE_OPERAND (arg, 0),
3417 TREE_OPERAND (arg, 1), 0))
3418 return 0;
3420 if (*cval1 == 0)
3421 *cval1 = TREE_OPERAND (arg, 0);
3422 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3424 else if (*cval2 == 0)
3425 *cval2 = TREE_OPERAND (arg, 0);
3426 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3428 else
3429 return 0;
3431 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3433 else if (*cval2 == 0)
3434 *cval2 = TREE_OPERAND (arg, 1);
3435 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3437 else
3438 return 0;
3440 return 1;
3442 default:
3443 return 0;
3447 /* ARG is a tree that is known to contain just arithmetic operations and
3448 comparisons. Evaluate the operations in the tree substituting NEW0 for
3449 any occurrence of OLD0 as an operand of a comparison and likewise for
3450 NEW1 and OLD1. */
3452 static tree
3453 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3454 tree old1, tree new1)
3456 tree type = TREE_TYPE (arg);
3457 enum tree_code code = TREE_CODE (arg);
3458 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3460 /* We can handle some of the tcc_expression cases here. */
3461 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3462 tclass = tcc_unary;
3463 else if (tclass == tcc_expression
3464 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3465 tclass = tcc_binary;
3467 switch (tclass)
3469 case tcc_unary:
3470 return fold_build1_loc (loc, code, type,
3471 eval_subst (loc, TREE_OPERAND (arg, 0),
3472 old0, new0, old1, new1));
3474 case tcc_binary:
3475 return fold_build2_loc (loc, code, type,
3476 eval_subst (loc, TREE_OPERAND (arg, 0),
3477 old0, new0, old1, new1),
3478 eval_subst (loc, TREE_OPERAND (arg, 1),
3479 old0, new0, old1, new1));
3481 case tcc_expression:
3482 switch (code)
3484 case SAVE_EXPR:
3485 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3486 old1, new1);
3488 case COMPOUND_EXPR:
3489 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3490 old1, new1);
3492 case COND_EXPR:
3493 return fold_build3_loc (loc, code, type,
3494 eval_subst (loc, TREE_OPERAND (arg, 0),
3495 old0, new0, old1, new1),
3496 eval_subst (loc, TREE_OPERAND (arg, 1),
3497 old0, new0, old1, new1),
3498 eval_subst (loc, TREE_OPERAND (arg, 2),
3499 old0, new0, old1, new1));
3500 default:
3501 break;
3503 /* Fall through - ??? */
3505 case tcc_comparison:
3507 tree arg0 = TREE_OPERAND (arg, 0);
3508 tree arg1 = TREE_OPERAND (arg, 1);
3510 /* We need to check both for exact equality and tree equality. The
3511 former will be true if the operand has a side-effect. In that
3512 case, we know the operand occurred exactly once. */
3514 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3515 arg0 = new0;
3516 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3517 arg0 = new1;
3519 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3520 arg1 = new0;
3521 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3522 arg1 = new1;
3524 return fold_build2_loc (loc, code, type, arg0, arg1);
3527 default:
3528 return arg;
3532 /* Return a tree for the case when the result of an expression is RESULT
3533 converted to TYPE and OMITTED was previously an operand of the expression
3534 but is now not needed (e.g., we folded OMITTED * 0).
3536 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3537 the conversion of RESULT to TYPE. */
3539 tree
3540 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3542 tree t = fold_convert_loc (loc, type, result);
3544 /* If the resulting operand is an empty statement, just return the omitted
3545 statement casted to void. */
3546 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3547 return build1_loc (loc, NOP_EXPR, void_type_node,
3548 fold_ignored_result (omitted));
3550 if (TREE_SIDE_EFFECTS (omitted))
3551 return build2_loc (loc, COMPOUND_EXPR, type,
3552 fold_ignored_result (omitted), t);
3554 return non_lvalue_loc (loc, t);
3557 /* Return a tree for the case when the result of an expression is RESULT
3558 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3559 of the expression but are now not needed.
3561 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3562 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3563 evaluated before OMITTED2. Otherwise, if neither has side effects,
3564 just do the conversion of RESULT to TYPE. */
3566 tree
3567 omit_two_operands_loc (location_t loc, tree type, tree result,
3568 tree omitted1, tree omitted2)
3570 tree t = fold_convert_loc (loc, type, result);
3572 if (TREE_SIDE_EFFECTS (omitted2))
3573 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3574 if (TREE_SIDE_EFFECTS (omitted1))
3575 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3577 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3581 /* Return a simplified tree node for the truth-negation of ARG. This
3582 never alters ARG itself. We assume that ARG is an operation that
3583 returns a truth value (0 or 1).
3585 FIXME: one would think we would fold the result, but it causes
3586 problems with the dominator optimizer. */
3588 static tree
3589 fold_truth_not_expr (location_t loc, tree arg)
3591 tree type = TREE_TYPE (arg);
3592 enum tree_code code = TREE_CODE (arg);
3593 location_t loc1, loc2;
3595 /* If this is a comparison, we can simply invert it, except for
3596 floating-point non-equality comparisons, in which case we just
3597 enclose a TRUTH_NOT_EXPR around what we have. */
3599 if (TREE_CODE_CLASS (code) == tcc_comparison)
3601 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3602 if (FLOAT_TYPE_P (op_type)
3603 && flag_trapping_math
3604 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3605 && code != NE_EXPR && code != EQ_EXPR)
3606 return NULL_TREE;
3608 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3609 if (code == ERROR_MARK)
3610 return NULL_TREE;
3612 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3613 TREE_OPERAND (arg, 1));
3614 if (TREE_NO_WARNING (arg))
3615 TREE_NO_WARNING (ret) = 1;
3616 return ret;
3619 switch (code)
3621 case INTEGER_CST:
3622 return constant_boolean_node (integer_zerop (arg), type);
3624 case TRUTH_AND_EXPR:
3625 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3626 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3627 return build2_loc (loc, TRUTH_OR_EXPR, type,
3628 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3629 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3631 case TRUTH_OR_EXPR:
3632 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3633 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3634 return build2_loc (loc, TRUTH_AND_EXPR, type,
3635 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3636 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3638 case TRUTH_XOR_EXPR:
3639 /* Here we can invert either operand. We invert the first operand
3640 unless the second operand is a TRUTH_NOT_EXPR in which case our
3641 result is the XOR of the first operand with the inside of the
3642 negation of the second operand. */
3644 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3645 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3646 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3647 else
3648 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3649 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3650 TREE_OPERAND (arg, 1));
3652 case TRUTH_ANDIF_EXPR:
3653 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3654 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3655 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3656 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3657 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3659 case TRUTH_ORIF_EXPR:
3660 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3661 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3662 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3663 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3664 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3666 case TRUTH_NOT_EXPR:
3667 return TREE_OPERAND (arg, 0);
3669 case COND_EXPR:
3671 tree arg1 = TREE_OPERAND (arg, 1);
3672 tree arg2 = TREE_OPERAND (arg, 2);
3674 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3675 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3677 /* A COND_EXPR may have a throw as one operand, which
3678 then has void type. Just leave void operands
3679 as they are. */
3680 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3681 VOID_TYPE_P (TREE_TYPE (arg1))
3682 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3683 VOID_TYPE_P (TREE_TYPE (arg2))
3684 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3687 case COMPOUND_EXPR:
3688 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3689 return build2_loc (loc, COMPOUND_EXPR, type,
3690 TREE_OPERAND (arg, 0),
3691 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3693 case NON_LVALUE_EXPR:
3694 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3695 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3697 CASE_CONVERT:
3698 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3699 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3701 /* ... fall through ... */
3703 case FLOAT_EXPR:
3704 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3705 return build1_loc (loc, TREE_CODE (arg), type,
3706 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3708 case BIT_AND_EXPR:
3709 if (!integer_onep (TREE_OPERAND (arg, 1)))
3710 return NULL_TREE;
3711 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3713 case SAVE_EXPR:
3714 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3716 case CLEANUP_POINT_EXPR:
3717 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3718 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3719 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3721 default:
3722 return NULL_TREE;
3726 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3727 assume that ARG is an operation that returns a truth value (0 or 1
3728 for scalars, 0 or -1 for vectors). Return the folded expression if
3729 folding is successful. Otherwise, return NULL_TREE. */
3731 static tree
3732 fold_invert_truthvalue (location_t loc, tree arg)
3734 tree type = TREE_TYPE (arg);
3735 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3736 ? BIT_NOT_EXPR
3737 : TRUTH_NOT_EXPR,
3738 type, arg);
3741 /* Return a simplified tree node for the truth-negation of ARG. This
3742 never alters ARG itself. We assume that ARG is an operation that
3743 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3745 tree
3746 invert_truthvalue_loc (location_t loc, tree arg)
3748 if (TREE_CODE (arg) == ERROR_MARK)
3749 return arg;
3751 tree type = TREE_TYPE (arg);
3752 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3753 ? BIT_NOT_EXPR
3754 : TRUTH_NOT_EXPR,
3755 type, arg);
3758 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3759 with code CODE. This optimization is unsafe. */
3760 static tree
3761 distribute_real_division (location_t loc, enum tree_code code, tree type,
3762 tree arg0, tree arg1)
3764 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3765 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3767 /* (A / C) +- (B / C) -> (A +- B) / C. */
3768 if (mul0 == mul1
3769 && operand_equal_p (TREE_OPERAND (arg0, 1),
3770 TREE_OPERAND (arg1, 1), 0))
3771 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3772 fold_build2_loc (loc, code, type,
3773 TREE_OPERAND (arg0, 0),
3774 TREE_OPERAND (arg1, 0)),
3775 TREE_OPERAND (arg0, 1));
3777 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3778 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3779 TREE_OPERAND (arg1, 0), 0)
3780 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3781 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3783 REAL_VALUE_TYPE r0, r1;
3784 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3785 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3786 if (!mul0)
3787 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3788 if (!mul1)
3789 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3790 real_arithmetic (&r0, code, &r0, &r1);
3791 return fold_build2_loc (loc, MULT_EXPR, type,
3792 TREE_OPERAND (arg0, 0),
3793 build_real (type, r0));
3796 return NULL_TREE;
3799 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3800 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3801 and uses reverse storage order if REVERSEP is nonzero. ORIG_INNER
3802 is the original memory reference used to preserve the alias set of
3803 the access. */
3805 static tree
3806 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3807 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3808 int unsignedp, int reversep)
3810 tree result, bftype;
3812 if (get_alias_set (inner) != get_alias_set (orig_inner))
3813 inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3814 build_fold_addr_expr (inner),
3815 build_int_cst
3816 (reference_alias_ptr_type (orig_inner), 0));
3818 if (bitpos == 0 && !reversep)
3820 tree size = TYPE_SIZE (TREE_TYPE (inner));
3821 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3822 || POINTER_TYPE_P (TREE_TYPE (inner)))
3823 && tree_fits_shwi_p (size)
3824 && tree_to_shwi (size) == bitsize)
3825 return fold_convert_loc (loc, type, inner);
3828 bftype = type;
3829 if (TYPE_PRECISION (bftype) != bitsize
3830 || TYPE_UNSIGNED (bftype) == !unsignedp)
3831 bftype = build_nonstandard_integer_type (bitsize, 0);
3833 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3834 size_int (bitsize), bitsize_int (bitpos));
3835 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3837 if (bftype != type)
3838 result = fold_convert_loc (loc, type, result);
3840 return result;
3843 /* Optimize a bit-field compare.
3845 There are two cases: First is a compare against a constant and the
3846 second is a comparison of two items where the fields are at the same
3847 bit position relative to the start of a chunk (byte, halfword, word)
3848 large enough to contain it. In these cases we can avoid the shift
3849 implicit in bitfield extractions.
3851 For constants, we emit a compare of the shifted constant with the
3852 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3853 compared. For two fields at the same position, we do the ANDs with the
3854 similar mask and compare the result of the ANDs.
3856 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3857 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3858 are the left and right operands of the comparison, respectively.
3860 If the optimization described above can be done, we return the resulting
3861 tree. Otherwise we return zero. */
3863 static tree
3864 optimize_bit_field_compare (location_t loc, enum tree_code code,
3865 tree compare_type, tree lhs, tree rhs)
3867 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3868 tree type = TREE_TYPE (lhs);
3869 tree unsigned_type;
3870 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3871 machine_mode lmode, rmode, nmode;
3872 int lunsignedp, runsignedp;
3873 int lreversep, rreversep;
3874 int lvolatilep = 0, rvolatilep = 0;
3875 tree linner, rinner = NULL_TREE;
3876 tree mask;
3877 tree offset;
3879 /* Get all the information about the extractions being done. If the bit size
3880 if the same as the size of the underlying object, we aren't doing an
3881 extraction at all and so can do nothing. We also don't want to
3882 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3883 then will no longer be able to replace it. */
3884 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3885 &lunsignedp, &lreversep, &lvolatilep, false);
3886 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3887 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3888 return 0;
3890 if (const_p)
3891 rreversep = lreversep;
3892 else
3894 /* If this is not a constant, we can only do something if bit positions,
3895 sizes, signedness and storage order are the same. */
3896 rinner
3897 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3898 &runsignedp, &rreversep, &rvolatilep, false);
3900 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3901 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3902 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3903 return 0;
3906 /* See if we can find a mode to refer to this field. We should be able to,
3907 but fail if we can't. */
3908 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3909 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3910 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3911 TYPE_ALIGN (TREE_TYPE (rinner))),
3912 word_mode, false);
3913 if (nmode == VOIDmode)
3914 return 0;
3916 /* Set signed and unsigned types of the precision of this mode for the
3917 shifts below. */
3918 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3920 /* Compute the bit position and size for the new reference and our offset
3921 within it. If the new reference is the same size as the original, we
3922 won't optimize anything, so return zero. */
3923 nbitsize = GET_MODE_BITSIZE (nmode);
3924 nbitpos = lbitpos & ~ (nbitsize - 1);
3925 lbitpos -= nbitpos;
3926 if (nbitsize == lbitsize)
3927 return 0;
3929 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3930 lbitpos = nbitsize - lbitsize - lbitpos;
3932 /* Make the mask to be used against the extracted field. */
3933 mask = build_int_cst_type (unsigned_type, -1);
3934 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3935 mask = const_binop (RSHIFT_EXPR, mask,
3936 size_int (nbitsize - lbitsize - lbitpos));
3938 if (! const_p)
3939 /* If not comparing with constant, just rework the comparison
3940 and return. */
3941 return fold_build2_loc (loc, code, compare_type,
3942 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3943 make_bit_field_ref (loc, linner, lhs,
3944 unsigned_type,
3945 nbitsize, nbitpos,
3946 1, lreversep),
3947 mask),
3948 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3949 make_bit_field_ref (loc, rinner, rhs,
3950 unsigned_type,
3951 nbitsize, nbitpos,
3952 1, rreversep),
3953 mask));
3955 /* Otherwise, we are handling the constant case. See if the constant is too
3956 big for the field. Warn and return a tree for 0 (false) if so. We do
3957 this not only for its own sake, but to avoid having to test for this
3958 error case below. If we didn't, we might generate wrong code.
3960 For unsigned fields, the constant shifted right by the field length should
3961 be all zero. For signed fields, the high-order bits should agree with
3962 the sign bit. */
3964 if (lunsignedp)
3966 if (wi::lrshift (rhs, lbitsize) != 0)
3968 warning (0, "comparison is always %d due to width of bit-field",
3969 code == NE_EXPR);
3970 return constant_boolean_node (code == NE_EXPR, compare_type);
3973 else
3975 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3976 if (tem != 0 && tem != -1)
3978 warning (0, "comparison is always %d due to width of bit-field",
3979 code == NE_EXPR);
3980 return constant_boolean_node (code == NE_EXPR, compare_type);
3984 /* Single-bit compares should always be against zero. */
3985 if (lbitsize == 1 && ! integer_zerop (rhs))
3987 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3988 rhs = build_int_cst (type, 0);
3991 /* Make a new bitfield reference, shift the constant over the
3992 appropriate number of bits and mask it with the computed mask
3993 (in case this was a signed field). If we changed it, make a new one. */
3994 lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
3995 nbitsize, nbitpos, 1, lreversep);
3997 rhs = const_binop (BIT_AND_EXPR,
3998 const_binop (LSHIFT_EXPR,
3999 fold_convert_loc (loc, unsigned_type, rhs),
4000 size_int (lbitpos)),
4001 mask);
4003 lhs = build2_loc (loc, code, compare_type,
4004 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4005 return lhs;
4008 /* Subroutine for fold_truth_andor_1: decode a field reference.
4010 If EXP is a comparison reference, we return the innermost reference.
4012 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4013 set to the starting bit number.
4015 If the innermost field can be completely contained in a mode-sized
4016 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4018 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4019 otherwise it is not changed.
4021 *PUNSIGNEDP is set to the signedness of the field.
4023 *PREVERSEP is set to the storage order of the field.
4025 *PMASK is set to the mask used. This is either contained in a
4026 BIT_AND_EXPR or derived from the width of the field.
4028 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4030 Return 0 if this is not a component reference or is one that we can't
4031 do anything with. */
4033 static tree
4034 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4035 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4036 int *punsignedp, int *preversep, int *pvolatilep,
4037 tree *pmask, tree *pand_mask)
4039 tree exp = *exp_;
4040 tree outer_type = 0;
4041 tree and_mask = 0;
4042 tree mask, inner, offset;
4043 tree unsigned_type;
4044 unsigned int precision;
4046 /* All the optimizations using this function assume integer fields.
4047 There are problems with FP fields since the type_for_size call
4048 below can fail for, e.g., XFmode. */
4049 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4050 return 0;
4052 /* We are interested in the bare arrangement of bits, so strip everything
4053 that doesn't affect the machine mode. However, record the type of the
4054 outermost expression if it may matter below. */
4055 if (CONVERT_EXPR_P (exp)
4056 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4057 outer_type = TREE_TYPE (exp);
4058 STRIP_NOPS (exp);
4060 if (TREE_CODE (exp) == BIT_AND_EXPR)
4062 and_mask = TREE_OPERAND (exp, 1);
4063 exp = TREE_OPERAND (exp, 0);
4064 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4065 if (TREE_CODE (and_mask) != INTEGER_CST)
4066 return 0;
4069 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4070 punsignedp, preversep, pvolatilep, false);
4071 if ((inner == exp && and_mask == 0)
4072 || *pbitsize < 0 || offset != 0
4073 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4074 return 0;
4076 *exp_ = exp;
4078 /* If the number of bits in the reference is the same as the bitsize of
4079 the outer type, then the outer type gives the signedness. Otherwise
4080 (in case of a small bitfield) the signedness is unchanged. */
4081 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4082 *punsignedp = TYPE_UNSIGNED (outer_type);
4084 /* Compute the mask to access the bitfield. */
4085 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4086 precision = TYPE_PRECISION (unsigned_type);
4088 mask = build_int_cst_type (unsigned_type, -1);
4090 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4091 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4093 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4094 if (and_mask != 0)
4095 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4096 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4098 *pmask = mask;
4099 *pand_mask = and_mask;
4100 return inner;
4103 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4104 bit positions and MASK is SIGNED. */
4106 static int
4107 all_ones_mask_p (const_tree mask, unsigned int size)
4109 tree type = TREE_TYPE (mask);
4110 unsigned int precision = TYPE_PRECISION (type);
4112 /* If this function returns true when the type of the mask is
4113 UNSIGNED, then there will be errors. In particular see
4114 gcc.c-torture/execute/990326-1.c. There does not appear to be
4115 any documentation paper trail as to why this is so. But the pre
4116 wide-int worked with that restriction and it has been preserved
4117 here. */
4118 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4119 return false;
4121 return wi::mask (size, false, precision) == mask;
4124 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4125 represents the sign bit of EXP's type. If EXP represents a sign
4126 or zero extension, also test VAL against the unextended type.
4127 The return value is the (sub)expression whose sign bit is VAL,
4128 or NULL_TREE otherwise. */
4130 tree
4131 sign_bit_p (tree exp, const_tree val)
4133 int width;
4134 tree t;
4136 /* Tree EXP must have an integral type. */
4137 t = TREE_TYPE (exp);
4138 if (! INTEGRAL_TYPE_P (t))
4139 return NULL_TREE;
4141 /* Tree VAL must be an integer constant. */
4142 if (TREE_CODE (val) != INTEGER_CST
4143 || TREE_OVERFLOW (val))
4144 return NULL_TREE;
4146 width = TYPE_PRECISION (t);
4147 if (wi::only_sign_bit_p (val, width))
4148 return exp;
4150 /* Handle extension from a narrower type. */
4151 if (TREE_CODE (exp) == NOP_EXPR
4152 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4153 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4155 return NULL_TREE;
4158 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4159 to be evaluated unconditionally. */
4161 static int
4162 simple_operand_p (const_tree exp)
4164 /* Strip any conversions that don't change the machine mode. */
4165 STRIP_NOPS (exp);
4167 return (CONSTANT_CLASS_P (exp)
4168 || TREE_CODE (exp) == SSA_NAME
4169 || (DECL_P (exp)
4170 && ! TREE_ADDRESSABLE (exp)
4171 && ! TREE_THIS_VOLATILE (exp)
4172 && ! DECL_NONLOCAL (exp)
4173 /* Don't regard global variables as simple. They may be
4174 allocated in ways unknown to the compiler (shared memory,
4175 #pragma weak, etc). */
4176 && ! TREE_PUBLIC (exp)
4177 && ! DECL_EXTERNAL (exp)
4178 /* Weakrefs are not safe to be read, since they can be NULL.
4179 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4180 have DECL_WEAK flag set. */
4181 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4182 /* Loading a static variable is unduly expensive, but global
4183 registers aren't expensive. */
4184 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4187 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4188 to be evaluated unconditionally.
4189 I addition to simple_operand_p, we assume that comparisons, conversions,
4190 and logic-not operations are simple, if their operands are simple, too. */
4192 static bool
4193 simple_operand_p_2 (tree exp)
4195 enum tree_code code;
4197 if (TREE_SIDE_EFFECTS (exp)
4198 || tree_could_trap_p (exp))
4199 return false;
4201 while (CONVERT_EXPR_P (exp))
4202 exp = TREE_OPERAND (exp, 0);
4204 code = TREE_CODE (exp);
4206 if (TREE_CODE_CLASS (code) == tcc_comparison)
4207 return (simple_operand_p (TREE_OPERAND (exp, 0))
4208 && simple_operand_p (TREE_OPERAND (exp, 1)));
4210 if (code == TRUTH_NOT_EXPR)
4211 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4213 return simple_operand_p (exp);
4217 /* The following functions are subroutines to fold_range_test and allow it to
4218 try to change a logical combination of comparisons into a range test.
4220 For example, both
4221 X == 2 || X == 3 || X == 4 || X == 5
4223 X >= 2 && X <= 5
4224 are converted to
4225 (unsigned) (X - 2) <= 3
4227 We describe each set of comparisons as being either inside or outside
4228 a range, using a variable named like IN_P, and then describe the
4229 range with a lower and upper bound. If one of the bounds is omitted,
4230 it represents either the highest or lowest value of the type.
4232 In the comments below, we represent a range by two numbers in brackets
4233 preceded by a "+" to designate being inside that range, or a "-" to
4234 designate being outside that range, so the condition can be inverted by
4235 flipping the prefix. An omitted bound is represented by a "-". For
4236 example, "- [-, 10]" means being outside the range starting at the lowest
4237 possible value and ending at 10, in other words, being greater than 10.
4238 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4239 always false.
4241 We set up things so that the missing bounds are handled in a consistent
4242 manner so neither a missing bound nor "true" and "false" need to be
4243 handled using a special case. */
4245 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4246 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4247 and UPPER1_P are nonzero if the respective argument is an upper bound
4248 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4249 must be specified for a comparison. ARG1 will be converted to ARG0's
4250 type if both are specified. */
4252 static tree
4253 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4254 tree arg1, int upper1_p)
4256 tree tem;
4257 int result;
4258 int sgn0, sgn1;
4260 /* If neither arg represents infinity, do the normal operation.
4261 Else, if not a comparison, return infinity. Else handle the special
4262 comparison rules. Note that most of the cases below won't occur, but
4263 are handled for consistency. */
4265 if (arg0 != 0 && arg1 != 0)
4267 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4268 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4269 STRIP_NOPS (tem);
4270 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4273 if (TREE_CODE_CLASS (code) != tcc_comparison)
4274 return 0;
4276 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4277 for neither. In real maths, we cannot assume open ended ranges are
4278 the same. But, this is computer arithmetic, where numbers are finite.
4279 We can therefore make the transformation of any unbounded range with
4280 the value Z, Z being greater than any representable number. This permits
4281 us to treat unbounded ranges as equal. */
4282 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4283 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4284 switch (code)
4286 case EQ_EXPR:
4287 result = sgn0 == sgn1;
4288 break;
4289 case NE_EXPR:
4290 result = sgn0 != sgn1;
4291 break;
4292 case LT_EXPR:
4293 result = sgn0 < sgn1;
4294 break;
4295 case LE_EXPR:
4296 result = sgn0 <= sgn1;
4297 break;
4298 case GT_EXPR:
4299 result = sgn0 > sgn1;
4300 break;
4301 case GE_EXPR:
4302 result = sgn0 >= sgn1;
4303 break;
4304 default:
4305 gcc_unreachable ();
4308 return constant_boolean_node (result, type);
4311 /* Helper routine for make_range. Perform one step for it, return
4312 new expression if the loop should continue or NULL_TREE if it should
4313 stop. */
4315 tree
4316 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4317 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4318 bool *strict_overflow_p)
4320 tree arg0_type = TREE_TYPE (arg0);
4321 tree n_low, n_high, low = *p_low, high = *p_high;
4322 int in_p = *p_in_p, n_in_p;
4324 switch (code)
4326 case TRUTH_NOT_EXPR:
4327 /* We can only do something if the range is testing for zero. */
4328 if (low == NULL_TREE || high == NULL_TREE
4329 || ! integer_zerop (low) || ! integer_zerop (high))
4330 return NULL_TREE;
4331 *p_in_p = ! in_p;
4332 return arg0;
4334 case EQ_EXPR: case NE_EXPR:
4335 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4336 /* We can only do something if the range is testing for zero
4337 and if the second operand is an integer constant. Note that
4338 saying something is "in" the range we make is done by
4339 complementing IN_P since it will set in the initial case of
4340 being not equal to zero; "out" is leaving it alone. */
4341 if (low == NULL_TREE || high == NULL_TREE
4342 || ! integer_zerop (low) || ! integer_zerop (high)
4343 || TREE_CODE (arg1) != INTEGER_CST)
4344 return NULL_TREE;
4346 switch (code)
4348 case NE_EXPR: /* - [c, c] */
4349 low = high = arg1;
4350 break;
4351 case EQ_EXPR: /* + [c, c] */
4352 in_p = ! in_p, low = high = arg1;
4353 break;
4354 case GT_EXPR: /* - [-, c] */
4355 low = 0, high = arg1;
4356 break;
4357 case GE_EXPR: /* + [c, -] */
4358 in_p = ! in_p, low = arg1, high = 0;
4359 break;
4360 case LT_EXPR: /* - [c, -] */
4361 low = arg1, high = 0;
4362 break;
4363 case LE_EXPR: /* + [-, c] */
4364 in_p = ! in_p, low = 0, high = arg1;
4365 break;
4366 default:
4367 gcc_unreachable ();
4370 /* If this is an unsigned comparison, we also know that EXP is
4371 greater than or equal to zero. We base the range tests we make
4372 on that fact, so we record it here so we can parse existing
4373 range tests. We test arg0_type since often the return type
4374 of, e.g. EQ_EXPR, is boolean. */
4375 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4377 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4378 in_p, low, high, 1,
4379 build_int_cst (arg0_type, 0),
4380 NULL_TREE))
4381 return NULL_TREE;
4383 in_p = n_in_p, low = n_low, high = n_high;
4385 /* If the high bound is missing, but we have a nonzero low
4386 bound, reverse the range so it goes from zero to the low bound
4387 minus 1. */
4388 if (high == 0 && low && ! integer_zerop (low))
4390 in_p = ! in_p;
4391 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4392 build_int_cst (TREE_TYPE (low), 1), 0);
4393 low = build_int_cst (arg0_type, 0);
4397 *p_low = low;
4398 *p_high = high;
4399 *p_in_p = in_p;
4400 return arg0;
4402 case NEGATE_EXPR:
4403 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4404 low and high are non-NULL, then normalize will DTRT. */
4405 if (!TYPE_UNSIGNED (arg0_type)
4406 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4408 if (low == NULL_TREE)
4409 low = TYPE_MIN_VALUE (arg0_type);
4410 if (high == NULL_TREE)
4411 high = TYPE_MAX_VALUE (arg0_type);
4414 /* (-x) IN [a,b] -> x in [-b, -a] */
4415 n_low = range_binop (MINUS_EXPR, exp_type,
4416 build_int_cst (exp_type, 0),
4417 0, high, 1);
4418 n_high = range_binop (MINUS_EXPR, exp_type,
4419 build_int_cst (exp_type, 0),
4420 0, low, 0);
4421 if (n_high != 0 && TREE_OVERFLOW (n_high))
4422 return NULL_TREE;
4423 goto normalize;
4425 case BIT_NOT_EXPR:
4426 /* ~ X -> -X - 1 */
4427 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4428 build_int_cst (exp_type, 1));
4430 case PLUS_EXPR:
4431 case MINUS_EXPR:
4432 if (TREE_CODE (arg1) != INTEGER_CST)
4433 return NULL_TREE;
4435 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4436 move a constant to the other side. */
4437 if (!TYPE_UNSIGNED (arg0_type)
4438 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4439 return NULL_TREE;
4441 /* If EXP is signed, any overflow in the computation is undefined,
4442 so we don't worry about it so long as our computations on
4443 the bounds don't overflow. For unsigned, overflow is defined
4444 and this is exactly the right thing. */
4445 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4446 arg0_type, low, 0, arg1, 0);
4447 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4448 arg0_type, high, 1, arg1, 0);
4449 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4450 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4451 return NULL_TREE;
4453 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4454 *strict_overflow_p = true;
4456 normalize:
4457 /* Check for an unsigned range which has wrapped around the maximum
4458 value thus making n_high < n_low, and normalize it. */
4459 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4461 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4462 build_int_cst (TREE_TYPE (n_high), 1), 0);
4463 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4464 build_int_cst (TREE_TYPE (n_low), 1), 0);
4466 /* If the range is of the form +/- [ x+1, x ], we won't
4467 be able to normalize it. But then, it represents the
4468 whole range or the empty set, so make it
4469 +/- [ -, - ]. */
4470 if (tree_int_cst_equal (n_low, low)
4471 && tree_int_cst_equal (n_high, high))
4472 low = high = 0;
4473 else
4474 in_p = ! in_p;
4476 else
4477 low = n_low, high = n_high;
4479 *p_low = low;
4480 *p_high = high;
4481 *p_in_p = in_p;
4482 return arg0;
4484 CASE_CONVERT:
4485 case NON_LVALUE_EXPR:
4486 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4487 return NULL_TREE;
4489 if (! INTEGRAL_TYPE_P (arg0_type)
4490 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4491 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4492 return NULL_TREE;
4494 n_low = low, n_high = high;
4496 if (n_low != 0)
4497 n_low = fold_convert_loc (loc, arg0_type, n_low);
4499 if (n_high != 0)
4500 n_high = fold_convert_loc (loc, arg0_type, n_high);
4502 /* If we're converting arg0 from an unsigned type, to exp,
4503 a signed type, we will be doing the comparison as unsigned.
4504 The tests above have already verified that LOW and HIGH
4505 are both positive.
4507 So we have to ensure that we will handle large unsigned
4508 values the same way that the current signed bounds treat
4509 negative values. */
4511 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4513 tree high_positive;
4514 tree equiv_type;
4515 /* For fixed-point modes, we need to pass the saturating flag
4516 as the 2nd parameter. */
4517 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4518 equiv_type
4519 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4520 TYPE_SATURATING (arg0_type));
4521 else
4522 equiv_type
4523 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4525 /* A range without an upper bound is, naturally, unbounded.
4526 Since convert would have cropped a very large value, use
4527 the max value for the destination type. */
4528 high_positive
4529 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4530 : TYPE_MAX_VALUE (arg0_type);
4532 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4533 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4534 fold_convert_loc (loc, arg0_type,
4535 high_positive),
4536 build_int_cst (arg0_type, 1));
4538 /* If the low bound is specified, "and" the range with the
4539 range for which the original unsigned value will be
4540 positive. */
4541 if (low != 0)
4543 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4544 1, fold_convert_loc (loc, arg0_type,
4545 integer_zero_node),
4546 high_positive))
4547 return NULL_TREE;
4549 in_p = (n_in_p == in_p);
4551 else
4553 /* Otherwise, "or" the range with the range of the input
4554 that will be interpreted as negative. */
4555 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4556 1, fold_convert_loc (loc, arg0_type,
4557 integer_zero_node),
4558 high_positive))
4559 return NULL_TREE;
4561 in_p = (in_p != n_in_p);
4565 *p_low = n_low;
4566 *p_high = n_high;
4567 *p_in_p = in_p;
4568 return arg0;
4570 default:
4571 return NULL_TREE;
4575 /* Given EXP, a logical expression, set the range it is testing into
4576 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4577 actually being tested. *PLOW and *PHIGH will be made of the same
4578 type as the returned expression. If EXP is not a comparison, we
4579 will most likely not be returning a useful value and range. Set
4580 *STRICT_OVERFLOW_P to true if the return value is only valid
4581 because signed overflow is undefined; otherwise, do not change
4582 *STRICT_OVERFLOW_P. */
4584 tree
4585 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4586 bool *strict_overflow_p)
4588 enum tree_code code;
4589 tree arg0, arg1 = NULL_TREE;
4590 tree exp_type, nexp;
4591 int in_p;
4592 tree low, high;
4593 location_t loc = EXPR_LOCATION (exp);
4595 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4596 and see if we can refine the range. Some of the cases below may not
4597 happen, but it doesn't seem worth worrying about this. We "continue"
4598 the outer loop when we've changed something; otherwise we "break"
4599 the switch, which will "break" the while. */
4601 in_p = 0;
4602 low = high = build_int_cst (TREE_TYPE (exp), 0);
4604 while (1)
4606 code = TREE_CODE (exp);
4607 exp_type = TREE_TYPE (exp);
4608 arg0 = NULL_TREE;
4610 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4612 if (TREE_OPERAND_LENGTH (exp) > 0)
4613 arg0 = TREE_OPERAND (exp, 0);
4614 if (TREE_CODE_CLASS (code) == tcc_binary
4615 || TREE_CODE_CLASS (code) == tcc_comparison
4616 || (TREE_CODE_CLASS (code) == tcc_expression
4617 && TREE_OPERAND_LENGTH (exp) > 1))
4618 arg1 = TREE_OPERAND (exp, 1);
4620 if (arg0 == NULL_TREE)
4621 break;
4623 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4624 &high, &in_p, strict_overflow_p);
4625 if (nexp == NULL_TREE)
4626 break;
4627 exp = nexp;
4630 /* If EXP is a constant, we can evaluate whether this is true or false. */
4631 if (TREE_CODE (exp) == INTEGER_CST)
4633 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4634 exp, 0, low, 0))
4635 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4636 exp, 1, high, 1)));
4637 low = high = 0;
4638 exp = 0;
4641 *pin_p = in_p, *plow = low, *phigh = high;
4642 return exp;
4645 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4646 type, TYPE, return an expression to test if EXP is in (or out of, depending
4647 on IN_P) the range. Return 0 if the test couldn't be created. */
4649 tree
4650 build_range_check (location_t loc, tree type, tree exp, int in_p,
4651 tree low, tree high)
4653 tree etype = TREE_TYPE (exp), value;
4655 /* Disable this optimization for function pointer expressions
4656 on targets that require function pointer canonicalization. */
4657 if (targetm.have_canonicalize_funcptr_for_compare ()
4658 && TREE_CODE (etype) == POINTER_TYPE
4659 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4660 return NULL_TREE;
4662 if (! in_p)
4664 value = build_range_check (loc, type, exp, 1, low, high);
4665 if (value != 0)
4666 return invert_truthvalue_loc (loc, value);
4668 return 0;
4671 if (low == 0 && high == 0)
4672 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4674 if (low == 0)
4675 return fold_build2_loc (loc, LE_EXPR, type, exp,
4676 fold_convert_loc (loc, etype, high));
4678 if (high == 0)
4679 return fold_build2_loc (loc, GE_EXPR, type, exp,
4680 fold_convert_loc (loc, etype, low));
4682 if (operand_equal_p (low, high, 0))
4683 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4684 fold_convert_loc (loc, etype, low));
4686 if (integer_zerop (low))
4688 if (! TYPE_UNSIGNED (etype))
4690 etype = unsigned_type_for (etype);
4691 high = fold_convert_loc (loc, etype, high);
4692 exp = fold_convert_loc (loc, etype, exp);
4694 return build_range_check (loc, type, exp, 1, 0, high);
4697 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4698 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4700 int prec = TYPE_PRECISION (etype);
4702 if (wi::mask (prec - 1, false, prec) == high)
4704 if (TYPE_UNSIGNED (etype))
4706 tree signed_etype = signed_type_for (etype);
4707 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4708 etype
4709 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4710 else
4711 etype = signed_etype;
4712 exp = fold_convert_loc (loc, etype, exp);
4714 return fold_build2_loc (loc, GT_EXPR, type, exp,
4715 build_int_cst (etype, 0));
4719 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4720 This requires wrap-around arithmetics for the type of the expression.
4721 First make sure that arithmetics in this type is valid, then make sure
4722 that it wraps around. */
4723 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4724 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4725 TYPE_UNSIGNED (etype));
4727 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4729 tree utype, minv, maxv;
4731 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4732 for the type in question, as we rely on this here. */
4733 utype = unsigned_type_for (etype);
4734 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4735 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4736 build_int_cst (TREE_TYPE (maxv), 1), 1);
4737 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4739 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4740 minv, 1, maxv, 1)))
4741 etype = utype;
4742 else
4743 return 0;
4746 high = fold_convert_loc (loc, etype, high);
4747 low = fold_convert_loc (loc, etype, low);
4748 exp = fold_convert_loc (loc, etype, exp);
4750 value = const_binop (MINUS_EXPR, high, low);
4753 if (POINTER_TYPE_P (etype))
4755 if (value != 0 && !TREE_OVERFLOW (value))
4757 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4758 return build_range_check (loc, type,
4759 fold_build_pointer_plus_loc (loc, exp, low),
4760 1, build_int_cst (etype, 0), value);
4762 return 0;
4765 if (value != 0 && !TREE_OVERFLOW (value))
4766 return build_range_check (loc, type,
4767 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4768 1, build_int_cst (etype, 0), value);
4770 return 0;
4773 /* Return the predecessor of VAL in its type, handling the infinite case. */
4775 static tree
4776 range_predecessor (tree val)
4778 tree type = TREE_TYPE (val);
4780 if (INTEGRAL_TYPE_P (type)
4781 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4782 return 0;
4783 else
4784 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4785 build_int_cst (TREE_TYPE (val), 1), 0);
4788 /* Return the successor of VAL in its type, handling the infinite case. */
4790 static tree
4791 range_successor (tree val)
4793 tree type = TREE_TYPE (val);
4795 if (INTEGRAL_TYPE_P (type)
4796 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4797 return 0;
4798 else
4799 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4800 build_int_cst (TREE_TYPE (val), 1), 0);
4803 /* Given two ranges, see if we can merge them into one. Return 1 if we
4804 can, 0 if we can't. Set the output range into the specified parameters. */
4806 bool
4807 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4808 tree high0, int in1_p, tree low1, tree high1)
4810 int no_overlap;
4811 int subset;
4812 int temp;
4813 tree tem;
4814 int in_p;
4815 tree low, high;
4816 int lowequal = ((low0 == 0 && low1 == 0)
4817 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4818 low0, 0, low1, 0)));
4819 int highequal = ((high0 == 0 && high1 == 0)
4820 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4821 high0, 1, high1, 1)));
4823 /* Make range 0 be the range that starts first, or ends last if they
4824 start at the same value. Swap them if it isn't. */
4825 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4826 low0, 0, low1, 0))
4827 || (lowequal
4828 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4829 high1, 1, high0, 1))))
4831 temp = in0_p, in0_p = in1_p, in1_p = temp;
4832 tem = low0, low0 = low1, low1 = tem;
4833 tem = high0, high0 = high1, high1 = tem;
4836 /* Now flag two cases, whether the ranges are disjoint or whether the
4837 second range is totally subsumed in the first. Note that the tests
4838 below are simplified by the ones above. */
4839 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4840 high0, 1, low1, 0));
4841 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4842 high1, 1, high0, 1));
4844 /* We now have four cases, depending on whether we are including or
4845 excluding the two ranges. */
4846 if (in0_p && in1_p)
4848 /* If they don't overlap, the result is false. If the second range
4849 is a subset it is the result. Otherwise, the range is from the start
4850 of the second to the end of the first. */
4851 if (no_overlap)
4852 in_p = 0, low = high = 0;
4853 else if (subset)
4854 in_p = 1, low = low1, high = high1;
4855 else
4856 in_p = 1, low = low1, high = high0;
4859 else if (in0_p && ! in1_p)
4861 /* If they don't overlap, the result is the first range. If they are
4862 equal, the result is false. If the second range is a subset of the
4863 first, and the ranges begin at the same place, we go from just after
4864 the end of the second range to the end of the first. If the second
4865 range is not a subset of the first, or if it is a subset and both
4866 ranges end at the same place, the range starts at the start of the
4867 first range and ends just before the second range.
4868 Otherwise, we can't describe this as a single range. */
4869 if (no_overlap)
4870 in_p = 1, low = low0, high = high0;
4871 else if (lowequal && highequal)
4872 in_p = 0, low = high = 0;
4873 else if (subset && lowequal)
4875 low = range_successor (high1);
4876 high = high0;
4877 in_p = 1;
4878 if (low == 0)
4880 /* We are in the weird situation where high0 > high1 but
4881 high1 has no successor. Punt. */
4882 return 0;
4885 else if (! subset || highequal)
4887 low = low0;
4888 high = range_predecessor (low1);
4889 in_p = 1;
4890 if (high == 0)
4892 /* low0 < low1 but low1 has no predecessor. Punt. */
4893 return 0;
4896 else
4897 return 0;
4900 else if (! in0_p && in1_p)
4902 /* If they don't overlap, the result is the second range. If the second
4903 is a subset of the first, the result is false. Otherwise,
4904 the range starts just after the first range and ends at the
4905 end of the second. */
4906 if (no_overlap)
4907 in_p = 1, low = low1, high = high1;
4908 else if (subset || highequal)
4909 in_p = 0, low = high = 0;
4910 else
4912 low = range_successor (high0);
4913 high = high1;
4914 in_p = 1;
4915 if (low == 0)
4917 /* high1 > high0 but high0 has no successor. Punt. */
4918 return 0;
4923 else
4925 /* The case where we are excluding both ranges. Here the complex case
4926 is if they don't overlap. In that case, the only time we have a
4927 range is if they are adjacent. If the second is a subset of the
4928 first, the result is the first. Otherwise, the range to exclude
4929 starts at the beginning of the first range and ends at the end of the
4930 second. */
4931 if (no_overlap)
4933 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4934 range_successor (high0),
4935 1, low1, 0)))
4936 in_p = 0, low = low0, high = high1;
4937 else
4939 /* Canonicalize - [min, x] into - [-, x]. */
4940 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4941 switch (TREE_CODE (TREE_TYPE (low0)))
4943 case ENUMERAL_TYPE:
4944 if (TYPE_PRECISION (TREE_TYPE (low0))
4945 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4946 break;
4947 /* FALLTHROUGH */
4948 case INTEGER_TYPE:
4949 if (tree_int_cst_equal (low0,
4950 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4951 low0 = 0;
4952 break;
4953 case POINTER_TYPE:
4954 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4955 && integer_zerop (low0))
4956 low0 = 0;
4957 break;
4958 default:
4959 break;
4962 /* Canonicalize - [x, max] into - [x, -]. */
4963 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4964 switch (TREE_CODE (TREE_TYPE (high1)))
4966 case ENUMERAL_TYPE:
4967 if (TYPE_PRECISION (TREE_TYPE (high1))
4968 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4969 break;
4970 /* FALLTHROUGH */
4971 case INTEGER_TYPE:
4972 if (tree_int_cst_equal (high1,
4973 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4974 high1 = 0;
4975 break;
4976 case POINTER_TYPE:
4977 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4978 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4979 high1, 1,
4980 build_int_cst (TREE_TYPE (high1), 1),
4981 1)))
4982 high1 = 0;
4983 break;
4984 default:
4985 break;
4988 /* The ranges might be also adjacent between the maximum and
4989 minimum values of the given type. For
4990 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4991 return + [x + 1, y - 1]. */
4992 if (low0 == 0 && high1 == 0)
4994 low = range_successor (high0);
4995 high = range_predecessor (low1);
4996 if (low == 0 || high == 0)
4997 return 0;
4999 in_p = 1;
5001 else
5002 return 0;
5005 else if (subset)
5006 in_p = 0, low = low0, high = high0;
5007 else
5008 in_p = 0, low = low0, high = high1;
5011 *pin_p = in_p, *plow = low, *phigh = high;
5012 return 1;
5016 /* Subroutine of fold, looking inside expressions of the form
5017 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5018 of the COND_EXPR. This function is being used also to optimize
5019 A op B ? C : A, by reversing the comparison first.
5021 Return a folded expression whose code is not a COND_EXPR
5022 anymore, or NULL_TREE if no folding opportunity is found. */
5024 static tree
5025 fold_cond_expr_with_comparison (location_t loc, tree type,
5026 tree arg0, tree arg1, tree arg2)
5028 enum tree_code comp_code = TREE_CODE (arg0);
5029 tree arg00 = TREE_OPERAND (arg0, 0);
5030 tree arg01 = TREE_OPERAND (arg0, 1);
5031 tree arg1_type = TREE_TYPE (arg1);
5032 tree tem;
5034 STRIP_NOPS (arg1);
5035 STRIP_NOPS (arg2);
5037 /* If we have A op 0 ? A : -A, consider applying the following
5038 transformations:
5040 A == 0? A : -A same as -A
5041 A != 0? A : -A same as A
5042 A >= 0? A : -A same as abs (A)
5043 A > 0? A : -A same as abs (A)
5044 A <= 0? A : -A same as -abs (A)
5045 A < 0? A : -A same as -abs (A)
5047 None of these transformations work for modes with signed
5048 zeros. If A is +/-0, the first two transformations will
5049 change the sign of the result (from +0 to -0, or vice
5050 versa). The last four will fix the sign of the result,
5051 even though the original expressions could be positive or
5052 negative, depending on the sign of A.
5054 Note that all these transformations are correct if A is
5055 NaN, since the two alternatives (A and -A) are also NaNs. */
5056 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5057 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5058 ? real_zerop (arg01)
5059 : integer_zerop (arg01))
5060 && ((TREE_CODE (arg2) == NEGATE_EXPR
5061 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5062 /* In the case that A is of the form X-Y, '-A' (arg2) may
5063 have already been folded to Y-X, check for that. */
5064 || (TREE_CODE (arg1) == MINUS_EXPR
5065 && TREE_CODE (arg2) == MINUS_EXPR
5066 && operand_equal_p (TREE_OPERAND (arg1, 0),
5067 TREE_OPERAND (arg2, 1), 0)
5068 && operand_equal_p (TREE_OPERAND (arg1, 1),
5069 TREE_OPERAND (arg2, 0), 0))))
5070 switch (comp_code)
5072 case EQ_EXPR:
5073 case UNEQ_EXPR:
5074 tem = fold_convert_loc (loc, arg1_type, arg1);
5075 return pedantic_non_lvalue_loc (loc,
5076 fold_convert_loc (loc, type,
5077 negate_expr (tem)));
5078 case NE_EXPR:
5079 case LTGT_EXPR:
5080 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5081 case UNGE_EXPR:
5082 case UNGT_EXPR:
5083 if (flag_trapping_math)
5084 break;
5085 /* Fall through. */
5086 case GE_EXPR:
5087 case GT_EXPR:
5088 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5089 break;
5090 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5091 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5092 case UNLE_EXPR:
5093 case UNLT_EXPR:
5094 if (flag_trapping_math)
5095 break;
5096 case LE_EXPR:
5097 case LT_EXPR:
5098 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5099 break;
5100 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5101 return negate_expr (fold_convert_loc (loc, type, tem));
5102 default:
5103 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5104 break;
5107 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5108 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5109 both transformations are correct when A is NaN: A != 0
5110 is then true, and A == 0 is false. */
5112 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5113 && integer_zerop (arg01) && integer_zerop (arg2))
5115 if (comp_code == NE_EXPR)
5116 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5117 else if (comp_code == EQ_EXPR)
5118 return build_zero_cst (type);
5121 /* Try some transformations of A op B ? A : B.
5123 A == B? A : B same as B
5124 A != B? A : B same as A
5125 A >= B? A : B same as max (A, B)
5126 A > B? A : B same as max (B, A)
5127 A <= B? A : B same as min (A, B)
5128 A < B? A : B same as min (B, A)
5130 As above, these transformations don't work in the presence
5131 of signed zeros. For example, if A and B are zeros of
5132 opposite sign, the first two transformations will change
5133 the sign of the result. In the last four, the original
5134 expressions give different results for (A=+0, B=-0) and
5135 (A=-0, B=+0), but the transformed expressions do not.
5137 The first two transformations are correct if either A or B
5138 is a NaN. In the first transformation, the condition will
5139 be false, and B will indeed be chosen. In the case of the
5140 second transformation, the condition A != B will be true,
5141 and A will be chosen.
5143 The conversions to max() and min() are not correct if B is
5144 a number and A is not. The conditions in the original
5145 expressions will be false, so all four give B. The min()
5146 and max() versions would give a NaN instead. */
5147 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5148 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5149 /* Avoid these transformations if the COND_EXPR may be used
5150 as an lvalue in the C++ front-end. PR c++/19199. */
5151 && (in_gimple_form
5152 || VECTOR_TYPE_P (type)
5153 || (! lang_GNU_CXX ()
5154 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5155 || ! maybe_lvalue_p (arg1)
5156 || ! maybe_lvalue_p (arg2)))
5158 tree comp_op0 = arg00;
5159 tree comp_op1 = arg01;
5160 tree comp_type = TREE_TYPE (comp_op0);
5162 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5163 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5165 comp_type = type;
5166 comp_op0 = arg1;
5167 comp_op1 = arg2;
5170 switch (comp_code)
5172 case EQ_EXPR:
5173 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5174 case NE_EXPR:
5175 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5176 case LE_EXPR:
5177 case LT_EXPR:
5178 case UNLE_EXPR:
5179 case UNLT_EXPR:
5180 /* In C++ a ?: expression can be an lvalue, so put the
5181 operand which will be used if they are equal first
5182 so that we can convert this back to the
5183 corresponding COND_EXPR. */
5184 if (!HONOR_NANS (arg1))
5186 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5187 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5188 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5189 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5190 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5191 comp_op1, comp_op0);
5192 return pedantic_non_lvalue_loc (loc,
5193 fold_convert_loc (loc, type, tem));
5195 break;
5196 case GE_EXPR:
5197 case GT_EXPR:
5198 case UNGE_EXPR:
5199 case UNGT_EXPR:
5200 if (!HONOR_NANS (arg1))
5202 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5203 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5204 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5205 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5206 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5207 comp_op1, comp_op0);
5208 return pedantic_non_lvalue_loc (loc,
5209 fold_convert_loc (loc, type, tem));
5211 break;
5212 case UNEQ_EXPR:
5213 if (!HONOR_NANS (arg1))
5214 return pedantic_non_lvalue_loc (loc,
5215 fold_convert_loc (loc, type, arg2));
5216 break;
5217 case LTGT_EXPR:
5218 if (!HONOR_NANS (arg1))
5219 return pedantic_non_lvalue_loc (loc,
5220 fold_convert_loc (loc, type, arg1));
5221 break;
5222 default:
5223 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5224 break;
5228 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5229 we might still be able to simplify this. For example,
5230 if C1 is one less or one more than C2, this might have started
5231 out as a MIN or MAX and been transformed by this function.
5232 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5234 if (INTEGRAL_TYPE_P (type)
5235 && TREE_CODE (arg01) == INTEGER_CST
5236 && TREE_CODE (arg2) == INTEGER_CST)
5237 switch (comp_code)
5239 case EQ_EXPR:
5240 if (TREE_CODE (arg1) == INTEGER_CST)
5241 break;
5242 /* We can replace A with C1 in this case. */
5243 arg1 = fold_convert_loc (loc, type, arg01);
5244 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5246 case LT_EXPR:
5247 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5248 MIN_EXPR, to preserve the signedness of the comparison. */
5249 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5250 OEP_ONLY_CONST)
5251 && operand_equal_p (arg01,
5252 const_binop (PLUS_EXPR, arg2,
5253 build_int_cst (type, 1)),
5254 OEP_ONLY_CONST))
5256 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5257 fold_convert_loc (loc, TREE_TYPE (arg00),
5258 arg2));
5259 return pedantic_non_lvalue_loc (loc,
5260 fold_convert_loc (loc, type, tem));
5262 break;
5264 case LE_EXPR:
5265 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5266 as above. */
5267 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5268 OEP_ONLY_CONST)
5269 && operand_equal_p (arg01,
5270 const_binop (MINUS_EXPR, arg2,
5271 build_int_cst (type, 1)),
5272 OEP_ONLY_CONST))
5274 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5275 fold_convert_loc (loc, TREE_TYPE (arg00),
5276 arg2));
5277 return pedantic_non_lvalue_loc (loc,
5278 fold_convert_loc (loc, type, tem));
5280 break;
5282 case GT_EXPR:
5283 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5284 MAX_EXPR, to preserve the signedness of the comparison. */
5285 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5286 OEP_ONLY_CONST)
5287 && operand_equal_p (arg01,
5288 const_binop (MINUS_EXPR, arg2,
5289 build_int_cst (type, 1)),
5290 OEP_ONLY_CONST))
5292 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5293 fold_convert_loc (loc, TREE_TYPE (arg00),
5294 arg2));
5295 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5297 break;
5299 case GE_EXPR:
5300 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5301 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5302 OEP_ONLY_CONST)
5303 && operand_equal_p (arg01,
5304 const_binop (PLUS_EXPR, arg2,
5305 build_int_cst (type, 1)),
5306 OEP_ONLY_CONST))
5308 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5309 fold_convert_loc (loc, TREE_TYPE (arg00),
5310 arg2));
5311 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5313 break;
5314 case NE_EXPR:
5315 break;
5316 default:
5317 gcc_unreachable ();
5320 return NULL_TREE;
5325 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5326 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5327 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5328 false) >= 2)
5329 #endif
5331 /* EXP is some logical combination of boolean tests. See if we can
5332 merge it into some range test. Return the new tree if so. */
5334 static tree
5335 fold_range_test (location_t loc, enum tree_code code, tree type,
5336 tree op0, tree op1)
5338 int or_op = (code == TRUTH_ORIF_EXPR
5339 || code == TRUTH_OR_EXPR);
5340 int in0_p, in1_p, in_p;
5341 tree low0, low1, low, high0, high1, high;
5342 bool strict_overflow_p = false;
5343 tree tem, lhs, rhs;
5344 const char * const warnmsg = G_("assuming signed overflow does not occur "
5345 "when simplifying range test");
5347 if (!INTEGRAL_TYPE_P (type))
5348 return 0;
5350 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5351 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5353 /* If this is an OR operation, invert both sides; we will invert
5354 again at the end. */
5355 if (or_op)
5356 in0_p = ! in0_p, in1_p = ! in1_p;
5358 /* If both expressions are the same, if we can merge the ranges, and we
5359 can build the range test, return it or it inverted. If one of the
5360 ranges is always true or always false, consider it to be the same
5361 expression as the other. */
5362 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5363 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5364 in1_p, low1, high1)
5365 && 0 != (tem = (build_range_check (loc, type,
5366 lhs != 0 ? lhs
5367 : rhs != 0 ? rhs : integer_zero_node,
5368 in_p, low, high))))
5370 if (strict_overflow_p)
5371 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5372 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5375 /* On machines where the branch cost is expensive, if this is a
5376 short-circuited branch and the underlying object on both sides
5377 is the same, make a non-short-circuit operation. */
5378 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5379 && lhs != 0 && rhs != 0
5380 && (code == TRUTH_ANDIF_EXPR
5381 || code == TRUTH_ORIF_EXPR)
5382 && operand_equal_p (lhs, rhs, 0))
5384 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5385 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5386 which cases we can't do this. */
5387 if (simple_operand_p (lhs))
5388 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5389 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5390 type, op0, op1);
5392 else if (!lang_hooks.decls.global_bindings_p ()
5393 && !CONTAINS_PLACEHOLDER_P (lhs))
5395 tree common = save_expr (lhs);
5397 if (0 != (lhs = build_range_check (loc, type, common,
5398 or_op ? ! in0_p : in0_p,
5399 low0, high0))
5400 && (0 != (rhs = build_range_check (loc, type, common,
5401 or_op ? ! in1_p : in1_p,
5402 low1, high1))))
5404 if (strict_overflow_p)
5405 fold_overflow_warning (warnmsg,
5406 WARN_STRICT_OVERFLOW_COMPARISON);
5407 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5408 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5409 type, lhs, rhs);
5414 return 0;
5417 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5418 bit value. Arrange things so the extra bits will be set to zero if and
5419 only if C is signed-extended to its full width. If MASK is nonzero,
5420 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5422 static tree
5423 unextend (tree c, int p, int unsignedp, tree mask)
5425 tree type = TREE_TYPE (c);
5426 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5427 tree temp;
5429 if (p == modesize || unsignedp)
5430 return c;
5432 /* We work by getting just the sign bit into the low-order bit, then
5433 into the high-order bit, then sign-extend. We then XOR that value
5434 with C. */
5435 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5437 /* We must use a signed type in order to get an arithmetic right shift.
5438 However, we must also avoid introducing accidental overflows, so that
5439 a subsequent call to integer_zerop will work. Hence we must
5440 do the type conversion here. At this point, the constant is either
5441 zero or one, and the conversion to a signed type can never overflow.
5442 We could get an overflow if this conversion is done anywhere else. */
5443 if (TYPE_UNSIGNED (type))
5444 temp = fold_convert (signed_type_for (type), temp);
5446 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5447 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5448 if (mask != 0)
5449 temp = const_binop (BIT_AND_EXPR, temp,
5450 fold_convert (TREE_TYPE (c), mask));
5451 /* If necessary, convert the type back to match the type of C. */
5452 if (TYPE_UNSIGNED (type))
5453 temp = fold_convert (type, temp);
5455 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5458 /* For an expression that has the form
5459 (A && B) || ~B
5461 (A || B) && ~B,
5462 we can drop one of the inner expressions and simplify to
5463 A || ~B
5465 A && ~B
5466 LOC is the location of the resulting expression. OP is the inner
5467 logical operation; the left-hand side in the examples above, while CMPOP
5468 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5469 removing a condition that guards another, as in
5470 (A != NULL && A->...) || A == NULL
5471 which we must not transform. If RHS_ONLY is true, only eliminate the
5472 right-most operand of the inner logical operation. */
5474 static tree
5475 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5476 bool rhs_only)
5478 tree type = TREE_TYPE (cmpop);
5479 enum tree_code code = TREE_CODE (cmpop);
5480 enum tree_code truthop_code = TREE_CODE (op);
5481 tree lhs = TREE_OPERAND (op, 0);
5482 tree rhs = TREE_OPERAND (op, 1);
5483 tree orig_lhs = lhs, orig_rhs = rhs;
5484 enum tree_code rhs_code = TREE_CODE (rhs);
5485 enum tree_code lhs_code = TREE_CODE (lhs);
5486 enum tree_code inv_code;
5488 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5489 return NULL_TREE;
5491 if (TREE_CODE_CLASS (code) != tcc_comparison)
5492 return NULL_TREE;
5494 if (rhs_code == truthop_code)
5496 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5497 if (newrhs != NULL_TREE)
5499 rhs = newrhs;
5500 rhs_code = TREE_CODE (rhs);
5503 if (lhs_code == truthop_code && !rhs_only)
5505 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5506 if (newlhs != NULL_TREE)
5508 lhs = newlhs;
5509 lhs_code = TREE_CODE (lhs);
5513 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5514 if (inv_code == rhs_code
5515 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5516 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5517 return lhs;
5518 if (!rhs_only && inv_code == lhs_code
5519 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5520 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5521 return rhs;
5522 if (rhs != orig_rhs || lhs != orig_lhs)
5523 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5524 lhs, rhs);
5525 return NULL_TREE;
5528 /* Find ways of folding logical expressions of LHS and RHS:
5529 Try to merge two comparisons to the same innermost item.
5530 Look for range tests like "ch >= '0' && ch <= '9'".
5531 Look for combinations of simple terms on machines with expensive branches
5532 and evaluate the RHS unconditionally.
5534 For example, if we have p->a == 2 && p->b == 4 and we can make an
5535 object large enough to span both A and B, we can do this with a comparison
5536 against the object ANDed with the a mask.
5538 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5539 operations to do this with one comparison.
5541 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5542 function and the one above.
5544 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5545 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5547 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5548 two operands.
5550 We return the simplified tree or 0 if no optimization is possible. */
5552 static tree
5553 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5554 tree lhs, tree rhs)
5556 /* If this is the "or" of two comparisons, we can do something if
5557 the comparisons are NE_EXPR. If this is the "and", we can do something
5558 if the comparisons are EQ_EXPR. I.e.,
5559 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5561 WANTED_CODE is this operation code. For single bit fields, we can
5562 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5563 comparison for one-bit fields. */
5565 enum tree_code wanted_code;
5566 enum tree_code lcode, rcode;
5567 tree ll_arg, lr_arg, rl_arg, rr_arg;
5568 tree ll_inner, lr_inner, rl_inner, rr_inner;
5569 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5570 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5571 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5572 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5573 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5574 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5575 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5576 machine_mode lnmode, rnmode;
5577 tree ll_mask, lr_mask, rl_mask, rr_mask;
5578 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5579 tree l_const, r_const;
5580 tree lntype, rntype, result;
5581 HOST_WIDE_INT first_bit, end_bit;
5582 int volatilep;
5584 /* Start by getting the comparison codes. Fail if anything is volatile.
5585 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5586 it were surrounded with a NE_EXPR. */
5588 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5589 return 0;
5591 lcode = TREE_CODE (lhs);
5592 rcode = TREE_CODE (rhs);
5594 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5596 lhs = build2 (NE_EXPR, truth_type, lhs,
5597 build_int_cst (TREE_TYPE (lhs), 0));
5598 lcode = NE_EXPR;
5601 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5603 rhs = build2 (NE_EXPR, truth_type, rhs,
5604 build_int_cst (TREE_TYPE (rhs), 0));
5605 rcode = NE_EXPR;
5608 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5609 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5610 return 0;
5612 ll_arg = TREE_OPERAND (lhs, 0);
5613 lr_arg = TREE_OPERAND (lhs, 1);
5614 rl_arg = TREE_OPERAND (rhs, 0);
5615 rr_arg = TREE_OPERAND (rhs, 1);
5617 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5618 if (simple_operand_p (ll_arg)
5619 && simple_operand_p (lr_arg))
5621 if (operand_equal_p (ll_arg, rl_arg, 0)
5622 && operand_equal_p (lr_arg, rr_arg, 0))
5624 result = combine_comparisons (loc, code, lcode, rcode,
5625 truth_type, ll_arg, lr_arg);
5626 if (result)
5627 return result;
5629 else if (operand_equal_p (ll_arg, rr_arg, 0)
5630 && operand_equal_p (lr_arg, rl_arg, 0))
5632 result = combine_comparisons (loc, code, lcode,
5633 swap_tree_comparison (rcode),
5634 truth_type, ll_arg, lr_arg);
5635 if (result)
5636 return result;
5640 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5641 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5643 /* If the RHS can be evaluated unconditionally and its operands are
5644 simple, it wins to evaluate the RHS unconditionally on machines
5645 with expensive branches. In this case, this isn't a comparison
5646 that can be merged. */
5648 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5649 false) >= 2
5650 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5651 && simple_operand_p (rl_arg)
5652 && simple_operand_p (rr_arg))
5654 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5655 if (code == TRUTH_OR_EXPR
5656 && lcode == NE_EXPR && integer_zerop (lr_arg)
5657 && rcode == NE_EXPR && integer_zerop (rr_arg)
5658 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5659 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5660 return build2_loc (loc, NE_EXPR, truth_type,
5661 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5662 ll_arg, rl_arg),
5663 build_int_cst (TREE_TYPE (ll_arg), 0));
5665 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5666 if (code == TRUTH_AND_EXPR
5667 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5668 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5669 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5670 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5671 return build2_loc (loc, EQ_EXPR, truth_type,
5672 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5673 ll_arg, rl_arg),
5674 build_int_cst (TREE_TYPE (ll_arg), 0));
5677 /* See if the comparisons can be merged. Then get all the parameters for
5678 each side. */
5680 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5681 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5682 return 0;
5684 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5685 volatilep = 0;
5686 ll_inner = decode_field_reference (loc, &ll_arg,
5687 &ll_bitsize, &ll_bitpos, &ll_mode,
5688 &ll_unsignedp, &ll_reversep, &volatilep,
5689 &ll_mask, &ll_and_mask);
5690 lr_inner = decode_field_reference (loc, &lr_arg,
5691 &lr_bitsize, &lr_bitpos, &lr_mode,
5692 &lr_unsignedp, &lr_reversep, &volatilep,
5693 &lr_mask, &lr_and_mask);
5694 rl_inner = decode_field_reference (loc, &rl_arg,
5695 &rl_bitsize, &rl_bitpos, &rl_mode,
5696 &rl_unsignedp, &rl_reversep, &volatilep,
5697 &rl_mask, &rl_and_mask);
5698 rr_inner = decode_field_reference (loc, &rr_arg,
5699 &rr_bitsize, &rr_bitpos, &rr_mode,
5700 &rr_unsignedp, &rr_reversep, &volatilep,
5701 &rr_mask, &rr_and_mask);
5703 /* It must be true that the inner operation on the lhs of each
5704 comparison must be the same if we are to be able to do anything.
5705 Then see if we have constants. If not, the same must be true for
5706 the rhs's. */
5707 if (volatilep
5708 || ll_reversep != rl_reversep
5709 || ll_inner == 0 || rl_inner == 0
5710 || ! operand_equal_p (ll_inner, rl_inner, 0))
5711 return 0;
5713 if (TREE_CODE (lr_arg) == INTEGER_CST
5714 && TREE_CODE (rr_arg) == INTEGER_CST)
5716 l_const = lr_arg, r_const = rr_arg;
5717 lr_reversep = ll_reversep;
5719 else if (lr_reversep != rr_reversep
5720 || lr_inner == 0 || rr_inner == 0
5721 || ! operand_equal_p (lr_inner, rr_inner, 0))
5722 return 0;
5723 else
5724 l_const = r_const = 0;
5726 /* If either comparison code is not correct for our logical operation,
5727 fail. However, we can convert a one-bit comparison against zero into
5728 the opposite comparison against that bit being set in the field. */
5730 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5731 if (lcode != wanted_code)
5733 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5735 /* Make the left operand unsigned, since we are only interested
5736 in the value of one bit. Otherwise we are doing the wrong
5737 thing below. */
5738 ll_unsignedp = 1;
5739 l_const = ll_mask;
5741 else
5742 return 0;
5745 /* This is analogous to the code for l_const above. */
5746 if (rcode != wanted_code)
5748 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5750 rl_unsignedp = 1;
5751 r_const = rl_mask;
5753 else
5754 return 0;
5757 /* See if we can find a mode that contains both fields being compared on
5758 the left. If we can't, fail. Otherwise, update all constants and masks
5759 to be relative to a field of that size. */
5760 first_bit = MIN (ll_bitpos, rl_bitpos);
5761 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5762 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5763 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5764 volatilep);
5765 if (lnmode == VOIDmode)
5766 return 0;
5768 lnbitsize = GET_MODE_BITSIZE (lnmode);
5769 lnbitpos = first_bit & ~ (lnbitsize - 1);
5770 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5771 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5773 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5775 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5776 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5779 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5780 size_int (xll_bitpos));
5781 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5782 size_int (xrl_bitpos));
5784 if (l_const)
5786 l_const = fold_convert_loc (loc, lntype, l_const);
5787 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5788 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5789 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5790 fold_build1_loc (loc, BIT_NOT_EXPR,
5791 lntype, ll_mask))))
5793 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5795 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5798 if (r_const)
5800 r_const = fold_convert_loc (loc, lntype, r_const);
5801 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5802 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5803 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5804 fold_build1_loc (loc, BIT_NOT_EXPR,
5805 lntype, rl_mask))))
5807 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5809 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5813 /* If the right sides are not constant, do the same for it. Also,
5814 disallow this optimization if a size or signedness mismatch occurs
5815 between the left and right sides. */
5816 if (l_const == 0)
5818 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5819 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5820 /* Make sure the two fields on the right
5821 correspond to the left without being swapped. */
5822 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5823 return 0;
5825 first_bit = MIN (lr_bitpos, rr_bitpos);
5826 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5827 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5828 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5829 volatilep);
5830 if (rnmode == VOIDmode)
5831 return 0;
5833 rnbitsize = GET_MODE_BITSIZE (rnmode);
5834 rnbitpos = first_bit & ~ (rnbitsize - 1);
5835 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5836 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5838 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5840 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5841 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5844 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5845 rntype, lr_mask),
5846 size_int (xlr_bitpos));
5847 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5848 rntype, rr_mask),
5849 size_int (xrr_bitpos));
5851 /* Make a mask that corresponds to both fields being compared.
5852 Do this for both items being compared. If the operands are the
5853 same size and the bits being compared are in the same position
5854 then we can do this by masking both and comparing the masked
5855 results. */
5856 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5857 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5858 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5860 lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5861 lntype, lnbitsize, lnbitpos,
5862 ll_unsignedp || rl_unsignedp, ll_reversep);
5863 if (! all_ones_mask_p (ll_mask, lnbitsize))
5864 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5866 rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
5867 rntype, rnbitsize, rnbitpos,
5868 lr_unsignedp || rr_unsignedp, lr_reversep);
5869 if (! all_ones_mask_p (lr_mask, rnbitsize))
5870 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5872 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5875 /* There is still another way we can do something: If both pairs of
5876 fields being compared are adjacent, we may be able to make a wider
5877 field containing them both.
5879 Note that we still must mask the lhs/rhs expressions. Furthermore,
5880 the mask must be shifted to account for the shift done by
5881 make_bit_field_ref. */
5882 if ((ll_bitsize + ll_bitpos == rl_bitpos
5883 && lr_bitsize + lr_bitpos == rr_bitpos)
5884 || (ll_bitpos == rl_bitpos + rl_bitsize
5885 && lr_bitpos == rr_bitpos + rr_bitsize))
5887 tree type;
5889 lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
5890 ll_bitsize + rl_bitsize,
5891 MIN (ll_bitpos, rl_bitpos),
5892 ll_unsignedp, ll_reversep);
5893 rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
5894 lr_bitsize + rr_bitsize,
5895 MIN (lr_bitpos, rr_bitpos),
5896 lr_unsignedp, lr_reversep);
5898 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5899 size_int (MIN (xll_bitpos, xrl_bitpos)));
5900 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5901 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5903 /* Convert to the smaller type before masking out unwanted bits. */
5904 type = lntype;
5905 if (lntype != rntype)
5907 if (lnbitsize > rnbitsize)
5909 lhs = fold_convert_loc (loc, rntype, lhs);
5910 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5911 type = rntype;
5913 else if (lnbitsize < rnbitsize)
5915 rhs = fold_convert_loc (loc, lntype, rhs);
5916 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5917 type = lntype;
5921 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5922 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5924 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5925 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5927 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5930 return 0;
5933 /* Handle the case of comparisons with constants. If there is something in
5934 common between the masks, those bits of the constants must be the same.
5935 If not, the condition is always false. Test for this to avoid generating
5936 incorrect code below. */
5937 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5938 if (! integer_zerop (result)
5939 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5940 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5942 if (wanted_code == NE_EXPR)
5944 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5945 return constant_boolean_node (true, truth_type);
5947 else
5949 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5950 return constant_boolean_node (false, truth_type);
5954 /* Construct the expression we will return. First get the component
5955 reference we will make. Unless the mask is all ones the width of
5956 that field, perform the mask operation. Then compare with the
5957 merged constant. */
5958 result = make_bit_field_ref (loc, ll_inner, ll_arg,
5959 lntype, lnbitsize, lnbitpos,
5960 ll_unsignedp || rl_unsignedp, ll_reversep);
5962 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5963 if (! all_ones_mask_p (ll_mask, lnbitsize))
5964 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5966 return build2_loc (loc, wanted_code, truth_type, result,
5967 const_binop (BIT_IOR_EXPR, l_const, r_const));
5970 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5971 constant. */
5973 static tree
5974 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5975 tree op0, tree op1)
5977 tree arg0 = op0;
5978 enum tree_code op_code;
5979 tree comp_const;
5980 tree minmax_const;
5981 int consts_equal, consts_lt;
5982 tree inner;
5984 STRIP_SIGN_NOPS (arg0);
5986 op_code = TREE_CODE (arg0);
5987 minmax_const = TREE_OPERAND (arg0, 1);
5988 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5989 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5990 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5991 inner = TREE_OPERAND (arg0, 0);
5993 /* If something does not permit us to optimize, return the original tree. */
5994 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5995 || TREE_CODE (comp_const) != INTEGER_CST
5996 || TREE_OVERFLOW (comp_const)
5997 || TREE_CODE (minmax_const) != INTEGER_CST
5998 || TREE_OVERFLOW (minmax_const))
5999 return NULL_TREE;
6001 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6002 and GT_EXPR, doing the rest with recursive calls using logical
6003 simplifications. */
6004 switch (code)
6006 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6008 tree tem
6009 = optimize_minmax_comparison (loc,
6010 invert_tree_comparison (code, false),
6011 type, op0, op1);
6012 if (tem)
6013 return invert_truthvalue_loc (loc, tem);
6014 return NULL_TREE;
6017 case GE_EXPR:
6018 return
6019 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6020 optimize_minmax_comparison
6021 (loc, EQ_EXPR, type, arg0, comp_const),
6022 optimize_minmax_comparison
6023 (loc, GT_EXPR, type, arg0, comp_const));
6025 case EQ_EXPR:
6026 if (op_code == MAX_EXPR && consts_equal)
6027 /* MAX (X, 0) == 0 -> X <= 0 */
6028 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6030 else if (op_code == MAX_EXPR && consts_lt)
6031 /* MAX (X, 0) == 5 -> X == 5 */
6032 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6034 else if (op_code == MAX_EXPR)
6035 /* MAX (X, 0) == -1 -> false */
6036 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6038 else if (consts_equal)
6039 /* MIN (X, 0) == 0 -> X >= 0 */
6040 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6042 else if (consts_lt)
6043 /* MIN (X, 0) == 5 -> false */
6044 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6046 else
6047 /* MIN (X, 0) == -1 -> X == -1 */
6048 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6050 case GT_EXPR:
6051 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6052 /* MAX (X, 0) > 0 -> X > 0
6053 MAX (X, 0) > 5 -> X > 5 */
6054 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6056 else if (op_code == MAX_EXPR)
6057 /* MAX (X, 0) > -1 -> true */
6058 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6060 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6061 /* MIN (X, 0) > 0 -> false
6062 MIN (X, 0) > 5 -> false */
6063 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6065 else
6066 /* MIN (X, 0) > -1 -> X > -1 */
6067 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6069 default:
6070 return NULL_TREE;
6074 /* T is an integer expression that is being multiplied, divided, or taken a
6075 modulus (CODE says which and what kind of divide or modulus) by a
6076 constant C. See if we can eliminate that operation by folding it with
6077 other operations already in T. WIDE_TYPE, if non-null, is a type that
6078 should be used for the computation if wider than our type.
6080 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6081 (X * 2) + (Y * 4). We must, however, be assured that either the original
6082 expression would not overflow or that overflow is undefined for the type
6083 in the language in question.
6085 If we return a non-null expression, it is an equivalent form of the
6086 original computation, but need not be in the original type.
6088 We set *STRICT_OVERFLOW_P to true if the return values depends on
6089 signed overflow being undefined. Otherwise we do not change
6090 *STRICT_OVERFLOW_P. */
6092 static tree
6093 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6094 bool *strict_overflow_p)
6096 /* To avoid exponential search depth, refuse to allow recursion past
6097 three levels. Beyond that (1) it's highly unlikely that we'll find
6098 something interesting and (2) we've probably processed it before
6099 when we built the inner expression. */
6101 static int depth;
6102 tree ret;
6104 if (depth > 3)
6105 return NULL;
6107 depth++;
6108 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6109 depth--;
6111 return ret;
6114 static tree
6115 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6116 bool *strict_overflow_p)
6118 tree type = TREE_TYPE (t);
6119 enum tree_code tcode = TREE_CODE (t);
6120 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6121 > GET_MODE_SIZE (TYPE_MODE (type)))
6122 ? wide_type : type);
6123 tree t1, t2;
6124 int same_p = tcode == code;
6125 tree op0 = NULL_TREE, op1 = NULL_TREE;
6126 bool sub_strict_overflow_p;
6128 /* Don't deal with constants of zero here; they confuse the code below. */
6129 if (integer_zerop (c))
6130 return NULL_TREE;
6132 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6133 op0 = TREE_OPERAND (t, 0);
6135 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6136 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6138 /* Note that we need not handle conditional operations here since fold
6139 already handles those cases. So just do arithmetic here. */
6140 switch (tcode)
6142 case INTEGER_CST:
6143 /* For a constant, we can always simplify if we are a multiply
6144 or (for divide and modulus) if it is a multiple of our constant. */
6145 if (code == MULT_EXPR
6146 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6148 tree tem = const_binop (code, fold_convert (ctype, t),
6149 fold_convert (ctype, c));
6150 /* If the multiplication overflowed, we lost information on it.
6151 See PR68142 and PR69845. */
6152 if (TREE_OVERFLOW (tem))
6153 return NULL_TREE;
6154 return tem;
6156 break;
6158 CASE_CONVERT: case NON_LVALUE_EXPR:
6159 /* If op0 is an expression ... */
6160 if ((COMPARISON_CLASS_P (op0)
6161 || UNARY_CLASS_P (op0)
6162 || BINARY_CLASS_P (op0)
6163 || VL_EXP_CLASS_P (op0)
6164 || EXPRESSION_CLASS_P (op0))
6165 /* ... and has wrapping overflow, and its type is smaller
6166 than ctype, then we cannot pass through as widening. */
6167 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6168 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6169 && (TYPE_PRECISION (ctype)
6170 > TYPE_PRECISION (TREE_TYPE (op0))))
6171 /* ... or this is a truncation (t is narrower than op0),
6172 then we cannot pass through this narrowing. */
6173 || (TYPE_PRECISION (type)
6174 < TYPE_PRECISION (TREE_TYPE (op0)))
6175 /* ... or signedness changes for division or modulus,
6176 then we cannot pass through this conversion. */
6177 || (code != MULT_EXPR
6178 && (TYPE_UNSIGNED (ctype)
6179 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6180 /* ... or has undefined overflow while the converted to
6181 type has not, we cannot do the operation in the inner type
6182 as that would introduce undefined overflow. */
6183 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6184 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6185 && !TYPE_OVERFLOW_UNDEFINED (type))))
6186 break;
6188 /* Pass the constant down and see if we can make a simplification. If
6189 we can, replace this expression with the inner simplification for
6190 possible later conversion to our or some other type. */
6191 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6192 && TREE_CODE (t2) == INTEGER_CST
6193 && !TREE_OVERFLOW (t2)
6194 && (0 != (t1 = extract_muldiv (op0, t2, code,
6195 code == MULT_EXPR
6196 ? ctype : NULL_TREE,
6197 strict_overflow_p))))
6198 return t1;
6199 break;
6201 case ABS_EXPR:
6202 /* If widening the type changes it from signed to unsigned, then we
6203 must avoid building ABS_EXPR itself as unsigned. */
6204 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6206 tree cstype = (*signed_type_for) (ctype);
6207 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6208 != 0)
6210 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6211 return fold_convert (ctype, t1);
6213 break;
6215 /* If the constant is negative, we cannot simplify this. */
6216 if (tree_int_cst_sgn (c) == -1)
6217 break;
6218 /* FALLTHROUGH */
6219 case NEGATE_EXPR:
6220 /* For division and modulus, type can't be unsigned, as e.g.
6221 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6222 For signed types, even with wrapping overflow, this is fine. */
6223 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6224 break;
6225 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6226 != 0)
6227 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6228 break;
6230 case MIN_EXPR: case MAX_EXPR:
6231 /* If widening the type changes the signedness, then we can't perform
6232 this optimization as that changes the result. */
6233 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6234 break;
6236 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6237 sub_strict_overflow_p = false;
6238 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6239 &sub_strict_overflow_p)) != 0
6240 && (t2 = extract_muldiv (op1, c, code, wide_type,
6241 &sub_strict_overflow_p)) != 0)
6243 if (tree_int_cst_sgn (c) < 0)
6244 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6245 if (sub_strict_overflow_p)
6246 *strict_overflow_p = true;
6247 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6248 fold_convert (ctype, t2));
6250 break;
6252 case LSHIFT_EXPR: case RSHIFT_EXPR:
6253 /* If the second operand is constant, this is a multiplication
6254 or floor division, by a power of two, so we can treat it that
6255 way unless the multiplier or divisor overflows. Signed
6256 left-shift overflow is implementation-defined rather than
6257 undefined in C90, so do not convert signed left shift into
6258 multiplication. */
6259 if (TREE_CODE (op1) == INTEGER_CST
6260 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6261 /* const_binop may not detect overflow correctly,
6262 so check for it explicitly here. */
6263 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6264 && 0 != (t1 = fold_convert (ctype,
6265 const_binop (LSHIFT_EXPR,
6266 size_one_node,
6267 op1)))
6268 && !TREE_OVERFLOW (t1))
6269 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6270 ? MULT_EXPR : FLOOR_DIV_EXPR,
6271 ctype,
6272 fold_convert (ctype, op0),
6273 t1),
6274 c, code, wide_type, strict_overflow_p);
6275 break;
6277 case PLUS_EXPR: case MINUS_EXPR:
6278 /* See if we can eliminate the operation on both sides. If we can, we
6279 can return a new PLUS or MINUS. If we can't, the only remaining
6280 cases where we can do anything are if the second operand is a
6281 constant. */
6282 sub_strict_overflow_p = false;
6283 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6284 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6285 if (t1 != 0 && t2 != 0
6286 && (code == MULT_EXPR
6287 /* If not multiplication, we can only do this if both operands
6288 are divisible by c. */
6289 || (multiple_of_p (ctype, op0, c)
6290 && multiple_of_p (ctype, op1, c))))
6292 if (sub_strict_overflow_p)
6293 *strict_overflow_p = true;
6294 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6295 fold_convert (ctype, t2));
6298 /* If this was a subtraction, negate OP1 and set it to be an addition.
6299 This simplifies the logic below. */
6300 if (tcode == MINUS_EXPR)
6302 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6303 /* If OP1 was not easily negatable, the constant may be OP0. */
6304 if (TREE_CODE (op0) == INTEGER_CST)
6306 std::swap (op0, op1);
6307 std::swap (t1, t2);
6311 if (TREE_CODE (op1) != INTEGER_CST)
6312 break;
6314 /* If either OP1 or C are negative, this optimization is not safe for
6315 some of the division and remainder types while for others we need
6316 to change the code. */
6317 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6319 if (code == CEIL_DIV_EXPR)
6320 code = FLOOR_DIV_EXPR;
6321 else if (code == FLOOR_DIV_EXPR)
6322 code = CEIL_DIV_EXPR;
6323 else if (code != MULT_EXPR
6324 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6325 break;
6328 /* If it's a multiply or a division/modulus operation of a multiple
6329 of our constant, do the operation and verify it doesn't overflow. */
6330 if (code == MULT_EXPR
6331 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6333 op1 = const_binop (code, fold_convert (ctype, op1),
6334 fold_convert (ctype, c));
6335 /* We allow the constant to overflow with wrapping semantics. */
6336 if (op1 == 0
6337 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6338 break;
6340 else
6341 break;
6343 /* If we have an unsigned type, we cannot widen the operation since it
6344 will change the result if the original computation overflowed. */
6345 if (TYPE_UNSIGNED (ctype) && ctype != type)
6346 break;
6348 /* If we were able to eliminate our operation from the first side,
6349 apply our operation to the second side and reform the PLUS. */
6350 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6351 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6353 /* The last case is if we are a multiply. In that case, we can
6354 apply the distributive law to commute the multiply and addition
6355 if the multiplication of the constants doesn't overflow
6356 and overflow is defined. With undefined overflow
6357 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6358 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6359 return fold_build2 (tcode, ctype,
6360 fold_build2 (code, ctype,
6361 fold_convert (ctype, op0),
6362 fold_convert (ctype, c)),
6363 op1);
6365 break;
6367 case MULT_EXPR:
6368 /* We have a special case here if we are doing something like
6369 (C * 8) % 4 since we know that's zero. */
6370 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6371 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6372 /* If the multiplication can overflow we cannot optimize this. */
6373 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6374 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6375 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6377 *strict_overflow_p = true;
6378 return omit_one_operand (type, integer_zero_node, op0);
6381 /* ... fall through ... */
6383 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6384 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6385 /* If we can extract our operation from the LHS, do so and return a
6386 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6387 do something only if the second operand is a constant. */
6388 if (same_p
6389 && (t1 = extract_muldiv (op0, c, code, wide_type,
6390 strict_overflow_p)) != 0)
6391 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6392 fold_convert (ctype, op1));
6393 else if (tcode == MULT_EXPR && code == MULT_EXPR
6394 && (t1 = extract_muldiv (op1, c, code, wide_type,
6395 strict_overflow_p)) != 0)
6396 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6397 fold_convert (ctype, t1));
6398 else if (TREE_CODE (op1) != INTEGER_CST)
6399 return 0;
6401 /* If these are the same operation types, we can associate them
6402 assuming no overflow. */
6403 if (tcode == code)
6405 bool overflow_p = false;
6406 bool overflow_mul_p;
6407 signop sign = TYPE_SIGN (ctype);
6408 unsigned prec = TYPE_PRECISION (ctype);
6409 wide_int mul = wi::mul (wi::to_wide (op1, prec),
6410 wi::to_wide (c, prec),
6411 sign, &overflow_mul_p);
6412 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6413 if (overflow_mul_p
6414 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6415 overflow_p = true;
6416 if (!overflow_p)
6417 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6418 wide_int_to_tree (ctype, mul));
6421 /* If these operations "cancel" each other, we have the main
6422 optimizations of this pass, which occur when either constant is a
6423 multiple of the other, in which case we replace this with either an
6424 operation or CODE or TCODE.
6426 If we have an unsigned type, we cannot do this since it will change
6427 the result if the original computation overflowed. */
6428 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6429 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6430 || (tcode == MULT_EXPR
6431 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6432 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6433 && code != MULT_EXPR)))
6435 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6437 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6438 *strict_overflow_p = true;
6439 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6440 fold_convert (ctype,
6441 const_binop (TRUNC_DIV_EXPR,
6442 op1, c)));
6444 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6446 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6447 *strict_overflow_p = true;
6448 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6449 fold_convert (ctype,
6450 const_binop (TRUNC_DIV_EXPR,
6451 c, op1)));
6454 break;
6456 default:
6457 break;
6460 return 0;
6463 /* Return a node which has the indicated constant VALUE (either 0 or
6464 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6465 and is of the indicated TYPE. */
6467 tree
6468 constant_boolean_node (bool value, tree type)
6470 if (type == integer_type_node)
6471 return value ? integer_one_node : integer_zero_node;
6472 else if (type == boolean_type_node)
6473 return value ? boolean_true_node : boolean_false_node;
6474 else if (TREE_CODE (type) == VECTOR_TYPE)
6475 return build_vector_from_val (type,
6476 build_int_cst (TREE_TYPE (type),
6477 value ? -1 : 0));
6478 else
6479 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6483 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6484 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6485 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6486 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6487 COND is the first argument to CODE; otherwise (as in the example
6488 given here), it is the second argument. TYPE is the type of the
6489 original expression. Return NULL_TREE if no simplification is
6490 possible. */
6492 static tree
6493 fold_binary_op_with_conditional_arg (location_t loc,
6494 enum tree_code code,
6495 tree type, tree op0, tree op1,
6496 tree cond, tree arg, int cond_first_p)
6498 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6499 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6500 tree test, true_value, false_value;
6501 tree lhs = NULL_TREE;
6502 tree rhs = NULL_TREE;
6503 enum tree_code cond_code = COND_EXPR;
6505 if (TREE_CODE (cond) == COND_EXPR
6506 || TREE_CODE (cond) == VEC_COND_EXPR)
6508 test = TREE_OPERAND (cond, 0);
6509 true_value = TREE_OPERAND (cond, 1);
6510 false_value = TREE_OPERAND (cond, 2);
6511 /* If this operand throws an expression, then it does not make
6512 sense to try to perform a logical or arithmetic operation
6513 involving it. */
6514 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6515 lhs = true_value;
6516 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6517 rhs = false_value;
6519 else if (!(TREE_CODE (type) != VECTOR_TYPE
6520 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6522 tree testtype = TREE_TYPE (cond);
6523 test = cond;
6524 true_value = constant_boolean_node (true, testtype);
6525 false_value = constant_boolean_node (false, testtype);
6527 else
6528 /* Detect the case of mixing vector and scalar types - bail out. */
6529 return NULL_TREE;
6531 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6532 cond_code = VEC_COND_EXPR;
6534 /* This transformation is only worthwhile if we don't have to wrap ARG
6535 in a SAVE_EXPR and the operation can be simplified without recursing
6536 on at least one of the branches once its pushed inside the COND_EXPR. */
6537 if (!TREE_CONSTANT (arg)
6538 && (TREE_SIDE_EFFECTS (arg)
6539 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6540 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6541 return NULL_TREE;
6543 arg = fold_convert_loc (loc, arg_type, arg);
6544 if (lhs == 0)
6546 true_value = fold_convert_loc (loc, cond_type, true_value);
6547 if (cond_first_p)
6548 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6549 else
6550 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6552 if (rhs == 0)
6554 false_value = fold_convert_loc (loc, cond_type, false_value);
6555 if (cond_first_p)
6556 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6557 else
6558 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6561 /* Check that we have simplified at least one of the branches. */
6562 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6563 return NULL_TREE;
6565 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6569 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6571 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6572 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6573 ADDEND is the same as X.
6575 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6576 and finite. The problematic cases are when X is zero, and its mode
6577 has signed zeros. In the case of rounding towards -infinity,
6578 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6579 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6581 bool
6582 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6584 if (!real_zerop (addend))
6585 return false;
6587 /* Don't allow the fold with -fsignaling-nans. */
6588 if (HONOR_SNANS (element_mode (type)))
6589 return false;
6591 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6592 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6593 return true;
6595 /* In a vector or complex, we would need to check the sign of all zeros. */
6596 if (TREE_CODE (addend) != REAL_CST)
6597 return false;
6599 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6600 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6601 negate = !negate;
6603 /* The mode has signed zeros, and we have to honor their sign.
6604 In this situation, there is only one case we can return true for.
6605 X - 0 is the same as X unless rounding towards -infinity is
6606 supported. */
6607 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6610 /* Subroutine of fold() that optimizes comparisons of a division by
6611 a nonzero integer constant against an integer constant, i.e.
6612 X/C1 op C2.
6614 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6615 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6616 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6618 The function returns the constant folded tree if a simplification
6619 can be made, and NULL_TREE otherwise. */
6621 static tree
6622 fold_div_compare (location_t loc,
6623 enum tree_code code, tree type, tree arg0, tree arg1)
6625 tree prod, tmp, hi, lo;
6626 tree arg00 = TREE_OPERAND (arg0, 0);
6627 tree arg01 = TREE_OPERAND (arg0, 1);
6628 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6629 bool neg_overflow = false;
6630 bool overflow;
6632 /* We have to do this the hard way to detect unsigned overflow.
6633 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6634 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6635 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6636 neg_overflow = false;
6638 if (sign == UNSIGNED)
6640 tmp = int_const_binop (MINUS_EXPR, arg01,
6641 build_int_cst (TREE_TYPE (arg01), 1));
6642 lo = prod;
6644 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6645 val = wi::add (prod, tmp, sign, &overflow);
6646 hi = force_fit_type (TREE_TYPE (arg00), val,
6647 -1, overflow | TREE_OVERFLOW (prod));
6649 else if (tree_int_cst_sgn (arg01) >= 0)
6651 tmp = int_const_binop (MINUS_EXPR, arg01,
6652 build_int_cst (TREE_TYPE (arg01), 1));
6653 switch (tree_int_cst_sgn (arg1))
6655 case -1:
6656 neg_overflow = true;
6657 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6658 hi = prod;
6659 break;
6661 case 0:
6662 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6663 hi = tmp;
6664 break;
6666 case 1:
6667 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6668 lo = prod;
6669 break;
6671 default:
6672 gcc_unreachable ();
6675 else
6677 /* A negative divisor reverses the relational operators. */
6678 code = swap_tree_comparison (code);
6680 tmp = int_const_binop (PLUS_EXPR, arg01,
6681 build_int_cst (TREE_TYPE (arg01), 1));
6682 switch (tree_int_cst_sgn (arg1))
6684 case -1:
6685 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6686 lo = prod;
6687 break;
6689 case 0:
6690 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6691 lo = tmp;
6692 break;
6694 case 1:
6695 neg_overflow = true;
6696 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6697 hi = prod;
6698 break;
6700 default:
6701 gcc_unreachable ();
6705 switch (code)
6707 case EQ_EXPR:
6708 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6709 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6710 if (TREE_OVERFLOW (hi))
6711 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6712 if (TREE_OVERFLOW (lo))
6713 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6714 return build_range_check (loc, type, arg00, 1, lo, hi);
6716 case NE_EXPR:
6717 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6718 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6719 if (TREE_OVERFLOW (hi))
6720 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6721 if (TREE_OVERFLOW (lo))
6722 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6723 return build_range_check (loc, type, arg00, 0, lo, hi);
6725 case LT_EXPR:
6726 if (TREE_OVERFLOW (lo))
6728 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6729 return omit_one_operand_loc (loc, type, tmp, arg00);
6731 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6733 case LE_EXPR:
6734 if (TREE_OVERFLOW (hi))
6736 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6737 return omit_one_operand_loc (loc, type, tmp, arg00);
6739 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6741 case GT_EXPR:
6742 if (TREE_OVERFLOW (hi))
6744 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6745 return omit_one_operand_loc (loc, type, tmp, arg00);
6747 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6749 case GE_EXPR:
6750 if (TREE_OVERFLOW (lo))
6752 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6753 return omit_one_operand_loc (loc, type, tmp, arg00);
6755 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6757 default:
6758 break;
6761 return NULL_TREE;
6765 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6766 equality/inequality test, then return a simplified form of the test
6767 using a sign testing. Otherwise return NULL. TYPE is the desired
6768 result type. */
6770 static tree
6771 fold_single_bit_test_into_sign_test (location_t loc,
6772 enum tree_code code, tree arg0, tree arg1,
6773 tree result_type)
6775 /* If this is testing a single bit, we can optimize the test. */
6776 if ((code == NE_EXPR || code == EQ_EXPR)
6777 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6778 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6780 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6781 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6782 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6784 if (arg00 != NULL_TREE
6785 /* This is only a win if casting to a signed type is cheap,
6786 i.e. when arg00's type is not a partial mode. */
6787 && TYPE_PRECISION (TREE_TYPE (arg00))
6788 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6790 tree stype = signed_type_for (TREE_TYPE (arg00));
6791 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6792 result_type,
6793 fold_convert_loc (loc, stype, arg00),
6794 build_int_cst (stype, 0));
6798 return NULL_TREE;
6801 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6802 equality/inequality test, then return a simplified form of
6803 the test using shifts and logical operations. Otherwise return
6804 NULL. TYPE is the desired result type. */
6806 tree
6807 fold_single_bit_test (location_t loc, enum tree_code code,
6808 tree arg0, tree arg1, tree result_type)
6810 /* If this is testing a single bit, we can optimize the test. */
6811 if ((code == NE_EXPR || code == EQ_EXPR)
6812 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6813 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6815 tree inner = TREE_OPERAND (arg0, 0);
6816 tree type = TREE_TYPE (arg0);
6817 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6818 machine_mode operand_mode = TYPE_MODE (type);
6819 int ops_unsigned;
6820 tree signed_type, unsigned_type, intermediate_type;
6821 tree tem, one;
6823 /* First, see if we can fold the single bit test into a sign-bit
6824 test. */
6825 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6826 result_type);
6827 if (tem)
6828 return tem;
6830 /* Otherwise we have (A & C) != 0 where C is a single bit,
6831 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6832 Similarly for (A & C) == 0. */
6834 /* If INNER is a right shift of a constant and it plus BITNUM does
6835 not overflow, adjust BITNUM and INNER. */
6836 if (TREE_CODE (inner) == RSHIFT_EXPR
6837 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6838 && bitnum < TYPE_PRECISION (type)
6839 && wi::ltu_p (TREE_OPERAND (inner, 1),
6840 TYPE_PRECISION (type) - bitnum))
6842 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6843 inner = TREE_OPERAND (inner, 0);
6846 /* If we are going to be able to omit the AND below, we must do our
6847 operations as unsigned. If we must use the AND, we have a choice.
6848 Normally unsigned is faster, but for some machines signed is. */
6849 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6850 && !flag_syntax_only) ? 0 : 1;
6852 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6853 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6854 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6855 inner = fold_convert_loc (loc, intermediate_type, inner);
6857 if (bitnum != 0)
6858 inner = build2 (RSHIFT_EXPR, intermediate_type,
6859 inner, size_int (bitnum));
6861 one = build_int_cst (intermediate_type, 1);
6863 if (code == EQ_EXPR)
6864 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6866 /* Put the AND last so it can combine with more things. */
6867 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6869 /* Make sure to return the proper type. */
6870 inner = fold_convert_loc (loc, result_type, inner);
6872 return inner;
6874 return NULL_TREE;
6877 /* Check whether we are allowed to reorder operands arg0 and arg1,
6878 such that the evaluation of arg1 occurs before arg0. */
6880 static bool
6881 reorder_operands_p (const_tree arg0, const_tree arg1)
6883 if (! flag_evaluation_order)
6884 return true;
6885 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6886 return true;
6887 return ! TREE_SIDE_EFFECTS (arg0)
6888 && ! TREE_SIDE_EFFECTS (arg1);
6891 /* Test whether it is preferable two swap two operands, ARG0 and
6892 ARG1, for example because ARG0 is an integer constant and ARG1
6893 isn't. If REORDER is true, only recommend swapping if we can
6894 evaluate the operands in reverse order. */
6896 bool
6897 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6899 if (CONSTANT_CLASS_P (arg1))
6900 return 0;
6901 if (CONSTANT_CLASS_P (arg0))
6902 return 1;
6904 STRIP_NOPS (arg0);
6905 STRIP_NOPS (arg1);
6907 if (TREE_CONSTANT (arg1))
6908 return 0;
6909 if (TREE_CONSTANT (arg0))
6910 return 1;
6912 if (reorder && flag_evaluation_order
6913 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6914 return 0;
6916 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6917 for commutative and comparison operators. Ensuring a canonical
6918 form allows the optimizers to find additional redundancies without
6919 having to explicitly check for both orderings. */
6920 if (TREE_CODE (arg0) == SSA_NAME
6921 && TREE_CODE (arg1) == SSA_NAME
6922 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6923 return 1;
6925 /* Put SSA_NAMEs last. */
6926 if (TREE_CODE (arg1) == SSA_NAME)
6927 return 0;
6928 if (TREE_CODE (arg0) == SSA_NAME)
6929 return 1;
6931 /* Put variables last. */
6932 if (DECL_P (arg1))
6933 return 0;
6934 if (DECL_P (arg0))
6935 return 1;
6937 return 0;
6941 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6942 means A >= Y && A != MAX, but in this case we know that
6943 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6945 static tree
6946 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6948 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6950 if (TREE_CODE (bound) == LT_EXPR)
6951 a = TREE_OPERAND (bound, 0);
6952 else if (TREE_CODE (bound) == GT_EXPR)
6953 a = TREE_OPERAND (bound, 1);
6954 else
6955 return NULL_TREE;
6957 typea = TREE_TYPE (a);
6958 if (!INTEGRAL_TYPE_P (typea)
6959 && !POINTER_TYPE_P (typea))
6960 return NULL_TREE;
6962 if (TREE_CODE (ineq) == LT_EXPR)
6964 a1 = TREE_OPERAND (ineq, 1);
6965 y = TREE_OPERAND (ineq, 0);
6967 else if (TREE_CODE (ineq) == GT_EXPR)
6969 a1 = TREE_OPERAND (ineq, 0);
6970 y = TREE_OPERAND (ineq, 1);
6972 else
6973 return NULL_TREE;
6975 if (TREE_TYPE (a1) != typea)
6976 return NULL_TREE;
6978 if (POINTER_TYPE_P (typea))
6980 /* Convert the pointer types into integer before taking the difference. */
6981 tree ta = fold_convert_loc (loc, ssizetype, a);
6982 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6983 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6985 else
6986 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6988 if (!diff || !integer_onep (diff))
6989 return NULL_TREE;
6991 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6994 /* Fold a sum or difference of at least one multiplication.
6995 Returns the folded tree or NULL if no simplification could be made. */
6997 static tree
6998 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6999 tree arg0, tree arg1)
7001 tree arg00, arg01, arg10, arg11;
7002 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7004 /* (A * C) +- (B * C) -> (A+-B) * C.
7005 (A * C) +- A -> A * (C+-1).
7006 We are most concerned about the case where C is a constant,
7007 but other combinations show up during loop reduction. Since
7008 it is not difficult, try all four possibilities. */
7010 if (TREE_CODE (arg0) == MULT_EXPR)
7012 arg00 = TREE_OPERAND (arg0, 0);
7013 arg01 = TREE_OPERAND (arg0, 1);
7015 else if (TREE_CODE (arg0) == INTEGER_CST)
7017 arg00 = build_one_cst (type);
7018 arg01 = arg0;
7020 else
7022 /* We cannot generate constant 1 for fract. */
7023 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7024 return NULL_TREE;
7025 arg00 = arg0;
7026 arg01 = build_one_cst (type);
7028 if (TREE_CODE (arg1) == MULT_EXPR)
7030 arg10 = TREE_OPERAND (arg1, 0);
7031 arg11 = TREE_OPERAND (arg1, 1);
7033 else if (TREE_CODE (arg1) == INTEGER_CST)
7035 arg10 = build_one_cst (type);
7036 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7037 the purpose of this canonicalization. */
7038 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7039 && negate_expr_p (arg1)
7040 && code == PLUS_EXPR)
7042 arg11 = negate_expr (arg1);
7043 code = MINUS_EXPR;
7045 else
7046 arg11 = arg1;
7048 else
7050 /* We cannot generate constant 1 for fract. */
7051 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7052 return NULL_TREE;
7053 arg10 = arg1;
7054 arg11 = build_one_cst (type);
7056 same = NULL_TREE;
7058 if (operand_equal_p (arg01, arg11, 0))
7059 same = arg01, alt0 = arg00, alt1 = arg10;
7060 else if (operand_equal_p (arg00, arg10, 0))
7061 same = arg00, alt0 = arg01, alt1 = arg11;
7062 else if (operand_equal_p (arg00, arg11, 0))
7063 same = arg00, alt0 = arg01, alt1 = arg10;
7064 else if (operand_equal_p (arg01, arg10, 0))
7065 same = arg01, alt0 = arg00, alt1 = arg11;
7067 /* No identical multiplicands; see if we can find a common
7068 power-of-two factor in non-power-of-two multiplies. This
7069 can help in multi-dimensional array access. */
7070 else if (tree_fits_shwi_p (arg01)
7071 && tree_fits_shwi_p (arg11))
7073 HOST_WIDE_INT int01, int11, tmp;
7074 bool swap = false;
7075 tree maybe_same;
7076 int01 = tree_to_shwi (arg01);
7077 int11 = tree_to_shwi (arg11);
7079 /* Move min of absolute values to int11. */
7080 if (absu_hwi (int01) < absu_hwi (int11))
7082 tmp = int01, int01 = int11, int11 = tmp;
7083 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7084 maybe_same = arg01;
7085 swap = true;
7087 else
7088 maybe_same = arg11;
7090 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7091 /* The remainder should not be a constant, otherwise we
7092 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7093 increased the number of multiplications necessary. */
7094 && TREE_CODE (arg10) != INTEGER_CST)
7096 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7097 build_int_cst (TREE_TYPE (arg00),
7098 int01 / int11));
7099 alt1 = arg10;
7100 same = maybe_same;
7101 if (swap)
7102 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7106 if (same)
7107 return fold_build2_loc (loc, MULT_EXPR, type,
7108 fold_build2_loc (loc, code, type,
7109 fold_convert_loc (loc, type, alt0),
7110 fold_convert_loc (loc, type, alt1)),
7111 fold_convert_loc (loc, type, same));
7113 return NULL_TREE;
7116 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7117 specified by EXPR into the buffer PTR of length LEN bytes.
7118 Return the number of bytes placed in the buffer, or zero
7119 upon failure. */
7121 static int
7122 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7124 tree type = TREE_TYPE (expr);
7125 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7126 int byte, offset, word, words;
7127 unsigned char value;
7129 if ((off == -1 && total_bytes > len)
7130 || off >= total_bytes)
7131 return 0;
7132 if (off == -1)
7133 off = 0;
7134 words = total_bytes / UNITS_PER_WORD;
7136 for (byte = 0; byte < total_bytes; byte++)
7138 int bitpos = byte * BITS_PER_UNIT;
7139 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7140 number of bytes. */
7141 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7143 if (total_bytes > UNITS_PER_WORD)
7145 word = byte / UNITS_PER_WORD;
7146 if (WORDS_BIG_ENDIAN)
7147 word = (words - 1) - word;
7148 offset = word * UNITS_PER_WORD;
7149 if (BYTES_BIG_ENDIAN)
7150 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7151 else
7152 offset += byte % UNITS_PER_WORD;
7154 else
7155 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7156 if (offset >= off
7157 && offset - off < len)
7158 ptr[offset - off] = value;
7160 return MIN (len, total_bytes - off);
7164 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7165 specified by EXPR into the buffer PTR of length LEN bytes.
7166 Return the number of bytes placed in the buffer, or zero
7167 upon failure. */
7169 static int
7170 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7172 tree type = TREE_TYPE (expr);
7173 machine_mode mode = TYPE_MODE (type);
7174 int total_bytes = GET_MODE_SIZE (mode);
7175 FIXED_VALUE_TYPE value;
7176 tree i_value, i_type;
7178 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7179 return 0;
7181 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7183 if (NULL_TREE == i_type
7184 || TYPE_PRECISION (i_type) != total_bytes)
7185 return 0;
7187 value = TREE_FIXED_CST (expr);
7188 i_value = double_int_to_tree (i_type, value.data);
7190 return native_encode_int (i_value, ptr, len, off);
7194 /* Subroutine of native_encode_expr. Encode the REAL_CST
7195 specified by EXPR into the buffer PTR of length LEN bytes.
7196 Return the number of bytes placed in the buffer, or zero
7197 upon failure. */
7199 static int
7200 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7202 tree type = TREE_TYPE (expr);
7203 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7204 int byte, offset, word, words, bitpos;
7205 unsigned char value;
7207 /* There are always 32 bits in each long, no matter the size of
7208 the hosts long. We handle floating point representations with
7209 up to 192 bits. */
7210 long tmp[6];
7212 if ((off == -1 && total_bytes > len)
7213 || off >= total_bytes)
7214 return 0;
7215 if (off == -1)
7216 off = 0;
7217 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7219 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7221 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7222 bitpos += BITS_PER_UNIT)
7224 byte = (bitpos / BITS_PER_UNIT) & 3;
7225 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7227 if (UNITS_PER_WORD < 4)
7229 word = byte / UNITS_PER_WORD;
7230 if (WORDS_BIG_ENDIAN)
7231 word = (words - 1) - word;
7232 offset = word * UNITS_PER_WORD;
7233 if (BYTES_BIG_ENDIAN)
7234 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7235 else
7236 offset += byte % UNITS_PER_WORD;
7238 else
7239 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7240 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7241 if (offset >= off
7242 && offset - off < len)
7243 ptr[offset - off] = value;
7245 return MIN (len, total_bytes - off);
7248 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7249 specified by EXPR into the buffer PTR of length LEN bytes.
7250 Return the number of bytes placed in the buffer, or zero
7251 upon failure. */
7253 static int
7254 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7256 int rsize, isize;
7257 tree part;
7259 part = TREE_REALPART (expr);
7260 rsize = native_encode_expr (part, ptr, len, off);
7261 if (off == -1
7262 && rsize == 0)
7263 return 0;
7264 part = TREE_IMAGPART (expr);
7265 if (off != -1)
7266 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7267 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7268 if (off == -1
7269 && isize != rsize)
7270 return 0;
7271 return rsize + isize;
7275 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7276 specified by EXPR into the buffer PTR of length LEN bytes.
7277 Return the number of bytes placed in the buffer, or zero
7278 upon failure. */
7280 static int
7281 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7283 unsigned i, count;
7284 int size, offset;
7285 tree itype, elem;
7287 offset = 0;
7288 count = VECTOR_CST_NELTS (expr);
7289 itype = TREE_TYPE (TREE_TYPE (expr));
7290 size = GET_MODE_SIZE (TYPE_MODE (itype));
7291 for (i = 0; i < count; i++)
7293 if (off >= size)
7295 off -= size;
7296 continue;
7298 elem = VECTOR_CST_ELT (expr, i);
7299 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7300 if ((off == -1 && res != size)
7301 || res == 0)
7302 return 0;
7303 offset += res;
7304 if (offset >= len)
7305 return offset;
7306 if (off != -1)
7307 off = 0;
7309 return offset;
7313 /* Subroutine of native_encode_expr. Encode the STRING_CST
7314 specified by EXPR into the buffer PTR of length LEN bytes.
7315 Return the number of bytes placed in the buffer, or zero
7316 upon failure. */
7318 static int
7319 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7321 tree type = TREE_TYPE (expr);
7322 HOST_WIDE_INT total_bytes;
7324 if (TREE_CODE (type) != ARRAY_TYPE
7325 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7326 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7327 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7328 return 0;
7329 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7330 if ((off == -1 && total_bytes > len)
7331 || off >= total_bytes)
7332 return 0;
7333 if (off == -1)
7334 off = 0;
7335 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7337 int written = 0;
7338 if (off < TREE_STRING_LENGTH (expr))
7340 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7341 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7343 memset (ptr + written, 0,
7344 MIN (total_bytes - written, len - written));
7346 else
7347 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7348 return MIN (total_bytes - off, len);
7352 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7353 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7354 buffer PTR of length LEN bytes. If OFF is not -1 then start
7355 the encoding at byte offset OFF and encode at most LEN bytes.
7356 Return the number of bytes placed in the buffer, or zero upon failure. */
7359 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7361 /* We don't support starting at negative offset and -1 is special. */
7362 if (off < -1)
7363 return 0;
7365 switch (TREE_CODE (expr))
7367 case INTEGER_CST:
7368 return native_encode_int (expr, ptr, len, off);
7370 case REAL_CST:
7371 return native_encode_real (expr, ptr, len, off);
7373 case FIXED_CST:
7374 return native_encode_fixed (expr, ptr, len, off);
7376 case COMPLEX_CST:
7377 return native_encode_complex (expr, ptr, len, off);
7379 case VECTOR_CST:
7380 return native_encode_vector (expr, ptr, len, off);
7382 case STRING_CST:
7383 return native_encode_string (expr, ptr, len, off);
7385 default:
7386 return 0;
7391 /* Subroutine of native_interpret_expr. Interpret the contents of
7392 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7393 If the buffer cannot be interpreted, return NULL_TREE. */
7395 static tree
7396 native_interpret_int (tree type, const unsigned char *ptr, int len)
7398 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7400 if (total_bytes > len
7401 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7402 return NULL_TREE;
7404 wide_int result = wi::from_buffer (ptr, total_bytes);
7406 return wide_int_to_tree (type, result);
7410 /* Subroutine of native_interpret_expr. Interpret the contents of
7411 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7412 If the buffer cannot be interpreted, return NULL_TREE. */
7414 static tree
7415 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7417 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7418 double_int result;
7419 FIXED_VALUE_TYPE fixed_value;
7421 if (total_bytes > len
7422 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7423 return NULL_TREE;
7425 result = double_int::from_buffer (ptr, total_bytes);
7426 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7428 return build_fixed (type, fixed_value);
7432 /* Subroutine of native_interpret_expr. Interpret the contents of
7433 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7434 If the buffer cannot be interpreted, return NULL_TREE. */
7436 static tree
7437 native_interpret_real (tree type, const unsigned char *ptr, int len)
7439 machine_mode mode = TYPE_MODE (type);
7440 int total_bytes = GET_MODE_SIZE (mode);
7441 unsigned char value;
7442 /* There are always 32 bits in each long, no matter the size of
7443 the hosts long. We handle floating point representations with
7444 up to 192 bits. */
7445 REAL_VALUE_TYPE r;
7446 long tmp[6];
7448 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7449 if (total_bytes > len || total_bytes > 24)
7450 return NULL_TREE;
7451 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7453 memset (tmp, 0, sizeof (tmp));
7454 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7455 bitpos += BITS_PER_UNIT)
7457 /* Both OFFSET and BYTE index within a long;
7458 bitpos indexes the whole float. */
7459 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7460 if (UNITS_PER_WORD < 4)
7462 int word = byte / UNITS_PER_WORD;
7463 if (WORDS_BIG_ENDIAN)
7464 word = (words - 1) - word;
7465 offset = word * UNITS_PER_WORD;
7466 if (BYTES_BIG_ENDIAN)
7467 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7468 else
7469 offset += byte % UNITS_PER_WORD;
7471 else
7473 offset = byte;
7474 if (BYTES_BIG_ENDIAN)
7476 /* Reverse bytes within each long, or within the entire float
7477 if it's smaller than a long (for HFmode). */
7478 offset = MIN (3, total_bytes - 1) - offset;
7479 gcc_assert (offset >= 0);
7482 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7484 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7487 real_from_target (&r, tmp, mode);
7488 return build_real (type, r);
7492 /* Subroutine of native_interpret_expr. Interpret the contents of
7493 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7494 If the buffer cannot be interpreted, return NULL_TREE. */
7496 static tree
7497 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7499 tree etype, rpart, ipart;
7500 int size;
7502 etype = TREE_TYPE (type);
7503 size = GET_MODE_SIZE (TYPE_MODE (etype));
7504 if (size * 2 > len)
7505 return NULL_TREE;
7506 rpart = native_interpret_expr (etype, ptr, size);
7507 if (!rpart)
7508 return NULL_TREE;
7509 ipart = native_interpret_expr (etype, ptr+size, size);
7510 if (!ipart)
7511 return NULL_TREE;
7512 return build_complex (type, rpart, ipart);
7516 /* Subroutine of native_interpret_expr. Interpret the contents of
7517 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7518 If the buffer cannot be interpreted, return NULL_TREE. */
7520 static tree
7521 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7523 tree etype, elem;
7524 int i, size, count;
7525 tree *elements;
7527 etype = TREE_TYPE (type);
7528 size = GET_MODE_SIZE (TYPE_MODE (etype));
7529 count = TYPE_VECTOR_SUBPARTS (type);
7530 if (size * count > len)
7531 return NULL_TREE;
7533 elements = XALLOCAVEC (tree, count);
7534 for (i = count - 1; i >= 0; i--)
7536 elem = native_interpret_expr (etype, ptr+(i*size), size);
7537 if (!elem)
7538 return NULL_TREE;
7539 elements[i] = elem;
7541 return build_vector (type, elements);
7545 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7546 the buffer PTR of length LEN as a constant of type TYPE. For
7547 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7548 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7549 return NULL_TREE. */
7551 tree
7552 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7554 switch (TREE_CODE (type))
7556 case INTEGER_TYPE:
7557 case ENUMERAL_TYPE:
7558 case BOOLEAN_TYPE:
7559 case POINTER_TYPE:
7560 case REFERENCE_TYPE:
7561 return native_interpret_int (type, ptr, len);
7563 case REAL_TYPE:
7564 return native_interpret_real (type, ptr, len);
7566 case FIXED_POINT_TYPE:
7567 return native_interpret_fixed (type, ptr, len);
7569 case COMPLEX_TYPE:
7570 return native_interpret_complex (type, ptr, len);
7572 case VECTOR_TYPE:
7573 return native_interpret_vector (type, ptr, len);
7575 default:
7576 return NULL_TREE;
7580 /* Returns true if we can interpret the contents of a native encoding
7581 as TYPE. */
7583 static bool
7584 can_native_interpret_type_p (tree type)
7586 switch (TREE_CODE (type))
7588 case INTEGER_TYPE:
7589 case ENUMERAL_TYPE:
7590 case BOOLEAN_TYPE:
7591 case POINTER_TYPE:
7592 case REFERENCE_TYPE:
7593 case FIXED_POINT_TYPE:
7594 case REAL_TYPE:
7595 case COMPLEX_TYPE:
7596 case VECTOR_TYPE:
7597 return true;
7598 default:
7599 return false;
7603 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7604 TYPE at compile-time. If we're unable to perform the conversion
7605 return NULL_TREE. */
7607 static tree
7608 fold_view_convert_expr (tree type, tree expr)
7610 /* We support up to 512-bit values (for V8DFmode). */
7611 unsigned char buffer[64];
7612 int len;
7614 /* Check that the host and target are sane. */
7615 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7616 return NULL_TREE;
7618 len = native_encode_expr (expr, buffer, sizeof (buffer));
7619 if (len == 0)
7620 return NULL_TREE;
7622 return native_interpret_expr (type, buffer, len);
7625 /* Build an expression for the address of T. Folds away INDIRECT_REF
7626 to avoid confusing the gimplify process. */
7628 tree
7629 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7631 /* The size of the object is not relevant when talking about its address. */
7632 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7633 t = TREE_OPERAND (t, 0);
7635 if (TREE_CODE (t) == INDIRECT_REF)
7637 t = TREE_OPERAND (t, 0);
7639 if (TREE_TYPE (t) != ptrtype)
7640 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7642 else if (TREE_CODE (t) == MEM_REF
7643 && integer_zerop (TREE_OPERAND (t, 1)))
7644 return TREE_OPERAND (t, 0);
7645 else if (TREE_CODE (t) == MEM_REF
7646 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7647 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7648 TREE_OPERAND (t, 0),
7649 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7650 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7652 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7654 if (TREE_TYPE (t) != ptrtype)
7655 t = fold_convert_loc (loc, ptrtype, t);
7657 else
7658 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7660 return t;
7663 /* Build an expression for the address of T. */
7665 tree
7666 build_fold_addr_expr_loc (location_t loc, tree t)
7668 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7670 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7673 /* Fold a unary expression of code CODE and type TYPE with operand
7674 OP0. Return the folded expression if folding is successful.
7675 Otherwise, return NULL_TREE. */
7677 tree
7678 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7680 tree tem;
7681 tree arg0;
7682 enum tree_code_class kind = TREE_CODE_CLASS (code);
7684 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7685 && TREE_CODE_LENGTH (code) == 1);
7687 arg0 = op0;
7688 if (arg0)
7690 if (CONVERT_EXPR_CODE_P (code)
7691 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7693 /* Don't use STRIP_NOPS, because signedness of argument type
7694 matters. */
7695 STRIP_SIGN_NOPS (arg0);
7697 else
7699 /* Strip any conversions that don't change the mode. This
7700 is safe for every expression, except for a comparison
7701 expression because its signedness is derived from its
7702 operands.
7704 Note that this is done as an internal manipulation within
7705 the constant folder, in order to find the simplest
7706 representation of the arguments so that their form can be
7707 studied. In any cases, the appropriate type conversions
7708 should be put back in the tree that will get out of the
7709 constant folder. */
7710 STRIP_NOPS (arg0);
7713 if (CONSTANT_CLASS_P (arg0))
7715 tree tem = const_unop (code, type, arg0);
7716 if (tem)
7718 if (TREE_TYPE (tem) != type)
7719 tem = fold_convert_loc (loc, type, tem);
7720 return tem;
7725 tem = generic_simplify (loc, code, type, op0);
7726 if (tem)
7727 return tem;
7729 if (TREE_CODE_CLASS (code) == tcc_unary)
7731 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7732 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7733 fold_build1_loc (loc, code, type,
7734 fold_convert_loc (loc, TREE_TYPE (op0),
7735 TREE_OPERAND (arg0, 1))));
7736 else if (TREE_CODE (arg0) == COND_EXPR)
7738 tree arg01 = TREE_OPERAND (arg0, 1);
7739 tree arg02 = TREE_OPERAND (arg0, 2);
7740 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7741 arg01 = fold_build1_loc (loc, code, type,
7742 fold_convert_loc (loc,
7743 TREE_TYPE (op0), arg01));
7744 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7745 arg02 = fold_build1_loc (loc, code, type,
7746 fold_convert_loc (loc,
7747 TREE_TYPE (op0), arg02));
7748 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7749 arg01, arg02);
7751 /* If this was a conversion, and all we did was to move into
7752 inside the COND_EXPR, bring it back out. But leave it if
7753 it is a conversion from integer to integer and the
7754 result precision is no wider than a word since such a
7755 conversion is cheap and may be optimized away by combine,
7756 while it couldn't if it were outside the COND_EXPR. Then return
7757 so we don't get into an infinite recursion loop taking the
7758 conversion out and then back in. */
7760 if ((CONVERT_EXPR_CODE_P (code)
7761 || code == NON_LVALUE_EXPR)
7762 && TREE_CODE (tem) == COND_EXPR
7763 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7764 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7765 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7766 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7767 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7768 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7769 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7770 && (INTEGRAL_TYPE_P
7771 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7772 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7773 || flag_syntax_only))
7774 tem = build1_loc (loc, code, type,
7775 build3 (COND_EXPR,
7776 TREE_TYPE (TREE_OPERAND
7777 (TREE_OPERAND (tem, 1), 0)),
7778 TREE_OPERAND (tem, 0),
7779 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7780 TREE_OPERAND (TREE_OPERAND (tem, 2),
7781 0)));
7782 return tem;
7786 switch (code)
7788 case NON_LVALUE_EXPR:
7789 if (!maybe_lvalue_p (op0))
7790 return fold_convert_loc (loc, type, op0);
7791 return NULL_TREE;
7793 CASE_CONVERT:
7794 case FLOAT_EXPR:
7795 case FIX_TRUNC_EXPR:
7796 if (COMPARISON_CLASS_P (op0))
7798 /* If we have (type) (a CMP b) and type is an integral type, return
7799 new expression involving the new type. Canonicalize
7800 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7801 non-integral type.
7802 Do not fold the result as that would not simplify further, also
7803 folding again results in recursions. */
7804 if (TREE_CODE (type) == BOOLEAN_TYPE)
7805 return build2_loc (loc, TREE_CODE (op0), type,
7806 TREE_OPERAND (op0, 0),
7807 TREE_OPERAND (op0, 1));
7808 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7809 && TREE_CODE (type) != VECTOR_TYPE)
7810 return build3_loc (loc, COND_EXPR, type, op0,
7811 constant_boolean_node (true, type),
7812 constant_boolean_node (false, type));
7815 /* Handle (T *)&A.B.C for A being of type T and B and C
7816 living at offset zero. This occurs frequently in
7817 C++ upcasting and then accessing the base. */
7818 if (TREE_CODE (op0) == ADDR_EXPR
7819 && POINTER_TYPE_P (type)
7820 && handled_component_p (TREE_OPERAND (op0, 0)))
7822 HOST_WIDE_INT bitsize, bitpos;
7823 tree offset;
7824 machine_mode mode;
7825 int unsignedp, reversep, volatilep;
7826 tree base
7827 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7828 &offset, &mode, &unsignedp, &reversep,
7829 &volatilep, false);
7830 /* If the reference was to a (constant) zero offset, we can use
7831 the address of the base if it has the same base type
7832 as the result type and the pointer type is unqualified. */
7833 if (! offset && bitpos == 0
7834 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7835 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7836 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7837 return fold_convert_loc (loc, type,
7838 build_fold_addr_expr_loc (loc, base));
7841 if (TREE_CODE (op0) == MODIFY_EXPR
7842 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7843 /* Detect assigning a bitfield. */
7844 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7845 && DECL_BIT_FIELD
7846 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7848 /* Don't leave an assignment inside a conversion
7849 unless assigning a bitfield. */
7850 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7851 /* First do the assignment, then return converted constant. */
7852 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7853 TREE_NO_WARNING (tem) = 1;
7854 TREE_USED (tem) = 1;
7855 return tem;
7858 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7859 constants (if x has signed type, the sign bit cannot be set
7860 in c). This folds extension into the BIT_AND_EXPR.
7861 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7862 very likely don't have maximal range for their precision and this
7863 transformation effectively doesn't preserve non-maximal ranges. */
7864 if (TREE_CODE (type) == INTEGER_TYPE
7865 && TREE_CODE (op0) == BIT_AND_EXPR
7866 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7868 tree and_expr = op0;
7869 tree and0 = TREE_OPERAND (and_expr, 0);
7870 tree and1 = TREE_OPERAND (and_expr, 1);
7871 int change = 0;
7873 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7874 || (TYPE_PRECISION (type)
7875 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7876 change = 1;
7877 else if (TYPE_PRECISION (TREE_TYPE (and1))
7878 <= HOST_BITS_PER_WIDE_INT
7879 && tree_fits_uhwi_p (and1))
7881 unsigned HOST_WIDE_INT cst;
7883 cst = tree_to_uhwi (and1);
7884 cst &= HOST_WIDE_INT_M1U
7885 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7886 change = (cst == 0);
7887 if (change
7888 && !flag_syntax_only
7889 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7890 == ZERO_EXTEND))
7892 tree uns = unsigned_type_for (TREE_TYPE (and0));
7893 and0 = fold_convert_loc (loc, uns, and0);
7894 and1 = fold_convert_loc (loc, uns, and1);
7897 if (change)
7899 tem = force_fit_type (type, wi::to_widest (and1), 0,
7900 TREE_OVERFLOW (and1));
7901 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7902 fold_convert_loc (loc, type, and0), tem);
7906 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7907 cast (T1)X will fold away. We assume that this happens when X itself
7908 is a cast. */
7909 if (POINTER_TYPE_P (type)
7910 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7911 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7913 tree arg00 = TREE_OPERAND (arg0, 0);
7914 tree arg01 = TREE_OPERAND (arg0, 1);
7916 return fold_build_pointer_plus_loc
7917 (loc, fold_convert_loc (loc, type, arg00), arg01);
7920 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7921 of the same precision, and X is an integer type not narrower than
7922 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7923 if (INTEGRAL_TYPE_P (type)
7924 && TREE_CODE (op0) == BIT_NOT_EXPR
7925 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7926 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7927 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7929 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7930 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7931 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7932 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7933 fold_convert_loc (loc, type, tem));
7936 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7937 type of X and Y (integer types only). */
7938 if (INTEGRAL_TYPE_P (type)
7939 && TREE_CODE (op0) == MULT_EXPR
7940 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7941 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7943 /* Be careful not to introduce new overflows. */
7944 tree mult_type;
7945 if (TYPE_OVERFLOW_WRAPS (type))
7946 mult_type = type;
7947 else
7948 mult_type = unsigned_type_for (type);
7950 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7952 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7953 fold_convert_loc (loc, mult_type,
7954 TREE_OPERAND (op0, 0)),
7955 fold_convert_loc (loc, mult_type,
7956 TREE_OPERAND (op0, 1)));
7957 return fold_convert_loc (loc, type, tem);
7961 return NULL_TREE;
7963 case VIEW_CONVERT_EXPR:
7964 if (TREE_CODE (op0) == MEM_REF)
7966 tem = fold_build2_loc (loc, MEM_REF, type,
7967 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7968 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7969 return tem;
7972 return NULL_TREE;
7974 case NEGATE_EXPR:
7975 tem = fold_negate_expr (loc, arg0);
7976 if (tem)
7977 return fold_convert_loc (loc, type, tem);
7978 return NULL_TREE;
7980 case ABS_EXPR:
7981 /* Convert fabs((double)float) into (double)fabsf(float). */
7982 if (TREE_CODE (arg0) == NOP_EXPR
7983 && TREE_CODE (type) == REAL_TYPE)
7985 tree targ0 = strip_float_extensions (arg0);
7986 if (targ0 != arg0)
7987 return fold_convert_loc (loc, type,
7988 fold_build1_loc (loc, ABS_EXPR,
7989 TREE_TYPE (targ0),
7990 targ0));
7992 return NULL_TREE;
7994 case BIT_NOT_EXPR:
7995 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7996 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7997 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7998 fold_convert_loc (loc, type,
7999 TREE_OPERAND (arg0, 0)))))
8000 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8001 fold_convert_loc (loc, type,
8002 TREE_OPERAND (arg0, 1)));
8003 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8004 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8005 fold_convert_loc (loc, type,
8006 TREE_OPERAND (arg0, 1)))))
8007 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8008 fold_convert_loc (loc, type,
8009 TREE_OPERAND (arg0, 0)), tem);
8011 return NULL_TREE;
8013 case TRUTH_NOT_EXPR:
8014 /* Note that the operand of this must be an int
8015 and its values must be 0 or 1.
8016 ("true" is a fixed value perhaps depending on the language,
8017 but we don't handle values other than 1 correctly yet.) */
8018 tem = fold_truth_not_expr (loc, arg0);
8019 if (!tem)
8020 return NULL_TREE;
8021 return fold_convert_loc (loc, type, tem);
8023 case INDIRECT_REF:
8024 /* Fold *&X to X if X is an lvalue. */
8025 if (TREE_CODE (op0) == ADDR_EXPR)
8027 tree op00 = TREE_OPERAND (op0, 0);
8028 if ((TREE_CODE (op00) == VAR_DECL
8029 || TREE_CODE (op00) == PARM_DECL
8030 || TREE_CODE (op00) == RESULT_DECL)
8031 && !TREE_READONLY (op00))
8032 return op00;
8034 return NULL_TREE;
8036 default:
8037 return NULL_TREE;
8038 } /* switch (code) */
8042 /* If the operation was a conversion do _not_ mark a resulting constant
8043 with TREE_OVERFLOW if the original constant was not. These conversions
8044 have implementation defined behavior and retaining the TREE_OVERFLOW
8045 flag here would confuse later passes such as VRP. */
8046 tree
8047 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8048 tree type, tree op0)
8050 tree res = fold_unary_loc (loc, code, type, op0);
8051 if (res
8052 && TREE_CODE (res) == INTEGER_CST
8053 && TREE_CODE (op0) == INTEGER_CST
8054 && CONVERT_EXPR_CODE_P (code))
8055 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8057 return res;
8060 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8061 operands OP0 and OP1. LOC is the location of the resulting expression.
8062 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8063 Return the folded expression if folding is successful. Otherwise,
8064 return NULL_TREE. */
8065 static tree
8066 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8067 tree arg0, tree arg1, tree op0, tree op1)
8069 tree tem;
8071 /* We only do these simplifications if we are optimizing. */
8072 if (!optimize)
8073 return NULL_TREE;
8075 /* Check for things like (A || B) && (A || C). We can convert this
8076 to A || (B && C). Note that either operator can be any of the four
8077 truth and/or operations and the transformation will still be
8078 valid. Also note that we only care about order for the
8079 ANDIF and ORIF operators. If B contains side effects, this
8080 might change the truth-value of A. */
8081 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8082 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8083 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8084 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8085 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8086 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8088 tree a00 = TREE_OPERAND (arg0, 0);
8089 tree a01 = TREE_OPERAND (arg0, 1);
8090 tree a10 = TREE_OPERAND (arg1, 0);
8091 tree a11 = TREE_OPERAND (arg1, 1);
8092 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8093 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8094 && (code == TRUTH_AND_EXPR
8095 || code == TRUTH_OR_EXPR));
8097 if (operand_equal_p (a00, a10, 0))
8098 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8099 fold_build2_loc (loc, code, type, a01, a11));
8100 else if (commutative && operand_equal_p (a00, a11, 0))
8101 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8102 fold_build2_loc (loc, code, type, a01, a10));
8103 else if (commutative && operand_equal_p (a01, a10, 0))
8104 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8105 fold_build2_loc (loc, code, type, a00, a11));
8107 /* This case if tricky because we must either have commutative
8108 operators or else A10 must not have side-effects. */
8110 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8111 && operand_equal_p (a01, a11, 0))
8112 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8113 fold_build2_loc (loc, code, type, a00, a10),
8114 a01);
8117 /* See if we can build a range comparison. */
8118 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8119 return tem;
8121 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8122 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8124 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8125 if (tem)
8126 return fold_build2_loc (loc, code, type, tem, arg1);
8129 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8130 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8132 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8133 if (tem)
8134 return fold_build2_loc (loc, code, type, arg0, tem);
8137 /* Check for the possibility of merging component references. If our
8138 lhs is another similar operation, try to merge its rhs with our
8139 rhs. Then try to merge our lhs and rhs. */
8140 if (TREE_CODE (arg0) == code
8141 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8142 TREE_OPERAND (arg0, 1), arg1)))
8143 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8145 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8146 return tem;
8148 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8149 && (code == TRUTH_AND_EXPR
8150 || code == TRUTH_ANDIF_EXPR
8151 || code == TRUTH_OR_EXPR
8152 || code == TRUTH_ORIF_EXPR))
8154 enum tree_code ncode, icode;
8156 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8157 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8158 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8160 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8161 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8162 We don't want to pack more than two leafs to a non-IF AND/OR
8163 expression.
8164 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8165 equal to IF-CODE, then we don't want to add right-hand operand.
8166 If the inner right-hand side of left-hand operand has
8167 side-effects, or isn't simple, then we can't add to it,
8168 as otherwise we might destroy if-sequence. */
8169 if (TREE_CODE (arg0) == icode
8170 && simple_operand_p_2 (arg1)
8171 /* Needed for sequence points to handle trappings, and
8172 side-effects. */
8173 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8175 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8176 arg1);
8177 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8178 tem);
8180 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8181 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8182 else if (TREE_CODE (arg1) == icode
8183 && simple_operand_p_2 (arg0)
8184 /* Needed for sequence points to handle trappings, and
8185 side-effects. */
8186 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8188 tem = fold_build2_loc (loc, ncode, type,
8189 arg0, TREE_OPERAND (arg1, 0));
8190 return fold_build2_loc (loc, icode, type, tem,
8191 TREE_OPERAND (arg1, 1));
8193 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8194 into (A OR B).
8195 For sequence point consistancy, we need to check for trapping,
8196 and side-effects. */
8197 else if (code == icode && simple_operand_p_2 (arg0)
8198 && simple_operand_p_2 (arg1))
8199 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8202 return NULL_TREE;
8205 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8206 by changing CODE to reduce the magnitude of constants involved in
8207 ARG0 of the comparison.
8208 Returns a canonicalized comparison tree if a simplification was
8209 possible, otherwise returns NULL_TREE.
8210 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8211 valid if signed overflow is undefined. */
8213 static tree
8214 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8215 tree arg0, tree arg1,
8216 bool *strict_overflow_p)
8218 enum tree_code code0 = TREE_CODE (arg0);
8219 tree t, cst0 = NULL_TREE;
8220 int sgn0;
8222 /* Match A +- CST code arg1. We can change this only if overflow
8223 is undefined. */
8224 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8225 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8226 /* In principle pointers also have undefined overflow behavior,
8227 but that causes problems elsewhere. */
8228 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8229 && (code0 == MINUS_EXPR
8230 || code0 == PLUS_EXPR)
8231 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8232 return NULL_TREE;
8234 /* Identify the constant in arg0 and its sign. */
8235 cst0 = TREE_OPERAND (arg0, 1);
8236 sgn0 = tree_int_cst_sgn (cst0);
8238 /* Overflowed constants and zero will cause problems. */
8239 if (integer_zerop (cst0)
8240 || TREE_OVERFLOW (cst0))
8241 return NULL_TREE;
8243 /* See if we can reduce the magnitude of the constant in
8244 arg0 by changing the comparison code. */
8245 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8246 if (code == LT_EXPR
8247 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8248 code = LE_EXPR;
8249 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8250 else if (code == GT_EXPR
8251 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8252 code = GE_EXPR;
8253 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8254 else if (code == LE_EXPR
8255 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8256 code = LT_EXPR;
8257 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8258 else if (code == GE_EXPR
8259 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8260 code = GT_EXPR;
8261 else
8262 return NULL_TREE;
8263 *strict_overflow_p = true;
8265 /* Now build the constant reduced in magnitude. But not if that
8266 would produce one outside of its types range. */
8267 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8268 && ((sgn0 == 1
8269 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8270 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8271 || (sgn0 == -1
8272 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8273 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8274 return NULL_TREE;
8276 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8277 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8278 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8279 t = fold_convert (TREE_TYPE (arg1), t);
8281 return fold_build2_loc (loc, code, type, t, arg1);
8284 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8285 overflow further. Try to decrease the magnitude of constants involved
8286 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8287 and put sole constants at the second argument position.
8288 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8290 static tree
8291 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8292 tree arg0, tree arg1)
8294 tree t;
8295 bool strict_overflow_p;
8296 const char * const warnmsg = G_("assuming signed overflow does not occur "
8297 "when reducing constant in comparison");
8299 /* Try canonicalization by simplifying arg0. */
8300 strict_overflow_p = false;
8301 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8302 &strict_overflow_p);
8303 if (t)
8305 if (strict_overflow_p)
8306 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8307 return t;
8310 /* Try canonicalization by simplifying arg1 using the swapped
8311 comparison. */
8312 code = swap_tree_comparison (code);
8313 strict_overflow_p = false;
8314 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8315 &strict_overflow_p);
8316 if (t && strict_overflow_p)
8317 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8318 return t;
8321 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8322 space. This is used to avoid issuing overflow warnings for
8323 expressions like &p->x which can not wrap. */
8325 static bool
8326 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8328 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8329 return true;
8331 if (bitpos < 0)
8332 return true;
8334 wide_int wi_offset;
8335 int precision = TYPE_PRECISION (TREE_TYPE (base));
8336 if (offset == NULL_TREE)
8337 wi_offset = wi::zero (precision);
8338 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8339 return true;
8340 else
8341 wi_offset = offset;
8343 bool overflow;
8344 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8345 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8346 if (overflow)
8347 return true;
8349 if (!wi::fits_uhwi_p (total))
8350 return true;
8352 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8353 if (size <= 0)
8354 return true;
8356 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8357 array. */
8358 if (TREE_CODE (base) == ADDR_EXPR)
8360 HOST_WIDE_INT base_size;
8362 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8363 if (base_size > 0 && size < base_size)
8364 size = base_size;
8367 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8370 /* Return a positive integer when the symbol DECL is known to have
8371 a nonzero address, zero when it's known not to (e.g., it's a weak
8372 symbol), and a negative integer when the symbol is not yet in the
8373 symbol table and so whether or not its address is zero is unknown. */
8374 static int
8375 maybe_nonzero_address (tree decl)
8377 if (DECL_P (decl) && decl_in_symtab_p (decl))
8378 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8379 return symbol->nonzero_address ();
8381 return -1;
8384 /* Subroutine of fold_binary. This routine performs all of the
8385 transformations that are common to the equality/inequality
8386 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8387 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8388 fold_binary should call fold_binary. Fold a comparison with
8389 tree code CODE and type TYPE with operands OP0 and OP1. Return
8390 the folded comparison or NULL_TREE. */
8392 static tree
8393 fold_comparison (location_t loc, enum tree_code code, tree type,
8394 tree op0, tree op1)
8396 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8397 tree arg0, arg1, tem;
8399 arg0 = op0;
8400 arg1 = op1;
8402 STRIP_SIGN_NOPS (arg0);
8403 STRIP_SIGN_NOPS (arg1);
8405 /* For comparisons of pointers we can decompose it to a compile time
8406 comparison of the base objects and the offsets into the object.
8407 This requires at least one operand being an ADDR_EXPR or a
8408 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8409 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8410 && (TREE_CODE (arg0) == ADDR_EXPR
8411 || TREE_CODE (arg1) == ADDR_EXPR
8412 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8413 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8415 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8416 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8417 machine_mode mode;
8418 int volatilep, reversep, unsignedp;
8419 bool indirect_base0 = false, indirect_base1 = false;
8421 /* Get base and offset for the access. Strip ADDR_EXPR for
8422 get_inner_reference, but put it back by stripping INDIRECT_REF
8423 off the base object if possible. indirect_baseN will be true
8424 if baseN is not an address but refers to the object itself. */
8425 base0 = arg0;
8426 if (TREE_CODE (arg0) == ADDR_EXPR)
8428 base0
8429 = get_inner_reference (TREE_OPERAND (arg0, 0),
8430 &bitsize, &bitpos0, &offset0, &mode,
8431 &unsignedp, &reversep, &volatilep, false);
8432 if (TREE_CODE (base0) == INDIRECT_REF)
8433 base0 = TREE_OPERAND (base0, 0);
8434 else
8435 indirect_base0 = true;
8437 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8439 base0 = TREE_OPERAND (arg0, 0);
8440 STRIP_SIGN_NOPS (base0);
8441 if (TREE_CODE (base0) == ADDR_EXPR)
8443 base0
8444 = get_inner_reference (TREE_OPERAND (base0, 0),
8445 &bitsize, &bitpos0, &offset0, &mode,
8446 &unsignedp, &reversep, &volatilep,
8447 false);
8448 if (TREE_CODE (base0) == INDIRECT_REF)
8449 base0 = TREE_OPERAND (base0, 0);
8450 else
8451 indirect_base0 = true;
8453 if (offset0 == NULL_TREE || integer_zerop (offset0))
8454 offset0 = TREE_OPERAND (arg0, 1);
8455 else
8456 offset0 = size_binop (PLUS_EXPR, offset0,
8457 TREE_OPERAND (arg0, 1));
8458 if (TREE_CODE (offset0) == INTEGER_CST)
8460 offset_int tem = wi::sext (wi::to_offset (offset0),
8461 TYPE_PRECISION (sizetype));
8462 tem <<= LOG2_BITS_PER_UNIT;
8463 tem += bitpos0;
8464 if (wi::fits_shwi_p (tem))
8466 bitpos0 = tem.to_shwi ();
8467 offset0 = NULL_TREE;
8472 base1 = arg1;
8473 if (TREE_CODE (arg1) == ADDR_EXPR)
8475 base1
8476 = get_inner_reference (TREE_OPERAND (arg1, 0),
8477 &bitsize, &bitpos1, &offset1, &mode,
8478 &unsignedp, &reversep, &volatilep, false);
8479 if (TREE_CODE (base1) == INDIRECT_REF)
8480 base1 = TREE_OPERAND (base1, 0);
8481 else
8482 indirect_base1 = true;
8484 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8486 base1 = TREE_OPERAND (arg1, 0);
8487 STRIP_SIGN_NOPS (base1);
8488 if (TREE_CODE (base1) == ADDR_EXPR)
8490 base1
8491 = get_inner_reference (TREE_OPERAND (base1, 0),
8492 &bitsize, &bitpos1, &offset1, &mode,
8493 &unsignedp, &reversep, &volatilep,
8494 false);
8495 if (TREE_CODE (base1) == INDIRECT_REF)
8496 base1 = TREE_OPERAND (base1, 0);
8497 else
8498 indirect_base1 = true;
8500 if (offset1 == NULL_TREE || integer_zerop (offset1))
8501 offset1 = TREE_OPERAND (arg1, 1);
8502 else
8503 offset1 = size_binop (PLUS_EXPR, offset1,
8504 TREE_OPERAND (arg1, 1));
8505 if (TREE_CODE (offset1) == INTEGER_CST)
8507 offset_int tem = wi::sext (wi::to_offset (offset1),
8508 TYPE_PRECISION (sizetype));
8509 tem <<= LOG2_BITS_PER_UNIT;
8510 tem += bitpos1;
8511 if (wi::fits_shwi_p (tem))
8513 bitpos1 = tem.to_shwi ();
8514 offset1 = NULL_TREE;
8519 /* If we have equivalent bases we might be able to simplify. */
8520 if (indirect_base0 == indirect_base1
8521 && operand_equal_p (base0, base1,
8522 indirect_base0 ? OEP_ADDRESS_OF : 0))
8524 /* We can fold this expression to a constant if the non-constant
8525 offset parts are equal. */
8526 if ((offset0 == offset1
8527 || (offset0 && offset1
8528 && operand_equal_p (offset0, offset1, 0)))
8529 && (code == EQ_EXPR
8530 || code == NE_EXPR
8531 || (indirect_base0 && DECL_P (base0))
8532 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8535 if (!equality_code
8536 && bitpos0 != bitpos1
8537 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8538 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8539 fold_overflow_warning (("assuming pointer wraparound does not "
8540 "occur when comparing P +- C1 with "
8541 "P +- C2"),
8542 WARN_STRICT_OVERFLOW_CONDITIONAL);
8544 switch (code)
8546 case EQ_EXPR:
8547 return constant_boolean_node (bitpos0 == bitpos1, type);
8548 case NE_EXPR:
8549 return constant_boolean_node (bitpos0 != bitpos1, type);
8550 case LT_EXPR:
8551 return constant_boolean_node (bitpos0 < bitpos1, type);
8552 case LE_EXPR:
8553 return constant_boolean_node (bitpos0 <= bitpos1, type);
8554 case GE_EXPR:
8555 return constant_boolean_node (bitpos0 >= bitpos1, type);
8556 case GT_EXPR:
8557 return constant_boolean_node (bitpos0 > bitpos1, type);
8558 default:;
8561 /* We can simplify the comparison to a comparison of the variable
8562 offset parts if the constant offset parts are equal.
8563 Be careful to use signed sizetype here because otherwise we
8564 mess with array offsets in the wrong way. This is possible
8565 because pointer arithmetic is restricted to retain within an
8566 object and overflow on pointer differences is undefined as of
8567 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8568 else if (bitpos0 == bitpos1
8569 && (equality_code
8570 || (indirect_base0 && DECL_P (base0))
8571 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8573 /* By converting to signed sizetype we cover middle-end pointer
8574 arithmetic which operates on unsigned pointer types of size
8575 type size and ARRAY_REF offsets which are properly sign or
8576 zero extended from their type in case it is narrower than
8577 sizetype. */
8578 if (offset0 == NULL_TREE)
8579 offset0 = build_int_cst (ssizetype, 0);
8580 else
8581 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8582 if (offset1 == NULL_TREE)
8583 offset1 = build_int_cst (ssizetype, 0);
8584 else
8585 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8587 if (!equality_code
8588 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8589 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8590 fold_overflow_warning (("assuming pointer wraparound does not "
8591 "occur when comparing P +- C1 with "
8592 "P +- C2"),
8593 WARN_STRICT_OVERFLOW_COMPARISON);
8595 return fold_build2_loc (loc, code, type, offset0, offset1);
8598 /* For equal offsets we can simplify to a comparison of the
8599 base addresses. */
8600 else if (bitpos0 == bitpos1
8601 && (indirect_base0
8602 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8603 && (indirect_base1
8604 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8605 && ((offset0 == offset1)
8606 || (offset0 && offset1
8607 && operand_equal_p (offset0, offset1, 0))))
8609 if (indirect_base0)
8610 base0 = build_fold_addr_expr_loc (loc, base0);
8611 if (indirect_base1)
8612 base1 = build_fold_addr_expr_loc (loc, base1);
8613 return fold_build2_loc (loc, code, type, base0, base1);
8615 /* Comparison between an ordinary (non-weak) symbol and a null
8616 pointer can be eliminated since such symbols must have a non
8617 null address. In C, relational expressions between pointers
8618 to objects and null pointers are undefined. The results
8619 below follow the C++ rules with the additional property that
8620 every object pointer compares greater than a null pointer.
8622 else if (DECL_P (base0)
8623 && maybe_nonzero_address (base0) > 0
8624 /* Avoid folding references to struct members at offset 0 to
8625 prevent tests like '&ptr->firstmember == 0' from getting
8626 eliminated. When ptr is null, although the -> expression
8627 is strictly speaking invalid, GCC retains it as a matter
8628 of QoI. See PR c/44555. */
8629 && (offset0 == NULL_TREE && bitpos0 != 0)
8630 /* The caller guarantees that when one of the arguments is
8631 constant (i.e., null in this case) it is second. */
8632 && integer_zerop (arg1))
8634 switch (code)
8636 case EQ_EXPR:
8637 case LE_EXPR:
8638 case LT_EXPR:
8639 return constant_boolean_node (false, type);
8640 case GE_EXPR:
8641 case GT_EXPR:
8642 case NE_EXPR:
8643 return constant_boolean_node (true, type);
8644 default:
8645 gcc_unreachable ();
8650 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8651 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8652 the resulting offset is smaller in absolute value than the
8653 original one and has the same sign. */
8654 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8655 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8656 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8657 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8658 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8659 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8660 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8661 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8663 tree const1 = TREE_OPERAND (arg0, 1);
8664 tree const2 = TREE_OPERAND (arg1, 1);
8665 tree variable1 = TREE_OPERAND (arg0, 0);
8666 tree variable2 = TREE_OPERAND (arg1, 0);
8667 tree cst;
8668 const char * const warnmsg = G_("assuming signed overflow does not "
8669 "occur when combining constants around "
8670 "a comparison");
8672 /* Put the constant on the side where it doesn't overflow and is
8673 of lower absolute value and of same sign than before. */
8674 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8675 ? MINUS_EXPR : PLUS_EXPR,
8676 const2, const1);
8677 if (!TREE_OVERFLOW (cst)
8678 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8679 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8681 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8682 return fold_build2_loc (loc, code, type,
8683 variable1,
8684 fold_build2_loc (loc, TREE_CODE (arg1),
8685 TREE_TYPE (arg1),
8686 variable2, cst));
8689 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8690 ? MINUS_EXPR : PLUS_EXPR,
8691 const1, const2);
8692 if (!TREE_OVERFLOW (cst)
8693 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8694 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8696 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8697 return fold_build2_loc (loc, code, type,
8698 fold_build2_loc (loc, TREE_CODE (arg0),
8699 TREE_TYPE (arg0),
8700 variable1, cst),
8701 variable2);
8705 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8706 if (tem)
8707 return tem;
8709 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8710 constant, we can simplify it. */
8711 if (TREE_CODE (arg1) == INTEGER_CST
8712 && (TREE_CODE (arg0) == MIN_EXPR
8713 || TREE_CODE (arg0) == MAX_EXPR)
8714 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8716 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8717 if (tem)
8718 return tem;
8721 /* If we are comparing an expression that just has comparisons
8722 of two integer values, arithmetic expressions of those comparisons,
8723 and constants, we can simplify it. There are only three cases
8724 to check: the two values can either be equal, the first can be
8725 greater, or the second can be greater. Fold the expression for
8726 those three values. Since each value must be 0 or 1, we have
8727 eight possibilities, each of which corresponds to the constant 0
8728 or 1 or one of the six possible comparisons.
8730 This handles common cases like (a > b) == 0 but also handles
8731 expressions like ((x > y) - (y > x)) > 0, which supposedly
8732 occur in macroized code. */
8734 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8736 tree cval1 = 0, cval2 = 0;
8737 int save_p = 0;
8739 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8740 /* Don't handle degenerate cases here; they should already
8741 have been handled anyway. */
8742 && cval1 != 0 && cval2 != 0
8743 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8744 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8745 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8746 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8747 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8748 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8749 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8751 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8752 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8754 /* We can't just pass T to eval_subst in case cval1 or cval2
8755 was the same as ARG1. */
8757 tree high_result
8758 = fold_build2_loc (loc, code, type,
8759 eval_subst (loc, arg0, cval1, maxval,
8760 cval2, minval),
8761 arg1);
8762 tree equal_result
8763 = fold_build2_loc (loc, code, type,
8764 eval_subst (loc, arg0, cval1, maxval,
8765 cval2, maxval),
8766 arg1);
8767 tree low_result
8768 = fold_build2_loc (loc, code, type,
8769 eval_subst (loc, arg0, cval1, minval,
8770 cval2, maxval),
8771 arg1);
8773 /* All three of these results should be 0 or 1. Confirm they are.
8774 Then use those values to select the proper code to use. */
8776 if (TREE_CODE (high_result) == INTEGER_CST
8777 && TREE_CODE (equal_result) == INTEGER_CST
8778 && TREE_CODE (low_result) == INTEGER_CST)
8780 /* Make a 3-bit mask with the high-order bit being the
8781 value for `>', the next for '=', and the low for '<'. */
8782 switch ((integer_onep (high_result) * 4)
8783 + (integer_onep (equal_result) * 2)
8784 + integer_onep (low_result))
8786 case 0:
8787 /* Always false. */
8788 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8789 case 1:
8790 code = LT_EXPR;
8791 break;
8792 case 2:
8793 code = EQ_EXPR;
8794 break;
8795 case 3:
8796 code = LE_EXPR;
8797 break;
8798 case 4:
8799 code = GT_EXPR;
8800 break;
8801 case 5:
8802 code = NE_EXPR;
8803 break;
8804 case 6:
8805 code = GE_EXPR;
8806 break;
8807 case 7:
8808 /* Always true. */
8809 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8812 if (save_p)
8814 tem = save_expr (build2 (code, type, cval1, cval2));
8815 SET_EXPR_LOCATION (tem, loc);
8816 return tem;
8818 return fold_build2_loc (loc, code, type, cval1, cval2);
8823 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8824 into a single range test. */
8825 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8826 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8827 && TREE_CODE (arg1) == INTEGER_CST
8828 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8829 && !integer_zerop (TREE_OPERAND (arg0, 1))
8830 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8831 && !TREE_OVERFLOW (arg1))
8833 tem = fold_div_compare (loc, code, type, arg0, arg1);
8834 if (tem != NULL_TREE)
8835 return tem;
8838 return NULL_TREE;
8842 /* Subroutine of fold_binary. Optimize complex multiplications of the
8843 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8844 argument EXPR represents the expression "z" of type TYPE. */
8846 static tree
8847 fold_mult_zconjz (location_t loc, tree type, tree expr)
8849 tree itype = TREE_TYPE (type);
8850 tree rpart, ipart, tem;
8852 if (TREE_CODE (expr) == COMPLEX_EXPR)
8854 rpart = TREE_OPERAND (expr, 0);
8855 ipart = TREE_OPERAND (expr, 1);
8857 else if (TREE_CODE (expr) == COMPLEX_CST)
8859 rpart = TREE_REALPART (expr);
8860 ipart = TREE_IMAGPART (expr);
8862 else
8864 expr = save_expr (expr);
8865 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8866 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8869 rpart = save_expr (rpart);
8870 ipart = save_expr (ipart);
8871 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8872 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8873 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8874 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8875 build_zero_cst (itype));
8879 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8880 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8882 static bool
8883 vec_cst_ctor_to_array (tree arg, tree *elts)
8885 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8887 if (TREE_CODE (arg) == VECTOR_CST)
8889 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8890 elts[i] = VECTOR_CST_ELT (arg, i);
8892 else if (TREE_CODE (arg) == CONSTRUCTOR)
8894 constructor_elt *elt;
8896 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8897 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8898 return false;
8899 else
8900 elts[i] = elt->value;
8902 else
8903 return false;
8904 for (; i < nelts; i++)
8905 elts[i]
8906 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8907 return true;
8910 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8911 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8912 NULL_TREE otherwise. */
8914 static tree
8915 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8917 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8918 tree *elts;
8919 bool need_ctor = false;
8921 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8922 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8923 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8924 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8925 return NULL_TREE;
8927 elts = XALLOCAVEC (tree, nelts * 3);
8928 if (!vec_cst_ctor_to_array (arg0, elts)
8929 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8930 return NULL_TREE;
8932 for (i = 0; i < nelts; i++)
8934 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8935 need_ctor = true;
8936 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8939 if (need_ctor)
8941 vec<constructor_elt, va_gc> *v;
8942 vec_alloc (v, nelts);
8943 for (i = 0; i < nelts; i++)
8944 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8945 return build_constructor (type, v);
8947 else
8948 return build_vector (type, &elts[2 * nelts]);
8951 /* Try to fold a pointer difference of type TYPE two address expressions of
8952 array references AREF0 and AREF1 using location LOC. Return a
8953 simplified expression for the difference or NULL_TREE. */
8955 static tree
8956 fold_addr_of_array_ref_difference (location_t loc, tree type,
8957 tree aref0, tree aref1)
8959 tree base0 = TREE_OPERAND (aref0, 0);
8960 tree base1 = TREE_OPERAND (aref1, 0);
8961 tree base_offset = build_int_cst (type, 0);
8963 /* If the bases are array references as well, recurse. If the bases
8964 are pointer indirections compute the difference of the pointers.
8965 If the bases are equal, we are set. */
8966 if ((TREE_CODE (base0) == ARRAY_REF
8967 && TREE_CODE (base1) == ARRAY_REF
8968 && (base_offset
8969 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8970 || (INDIRECT_REF_P (base0)
8971 && INDIRECT_REF_P (base1)
8972 && (base_offset
8973 = fold_binary_loc (loc, MINUS_EXPR, type,
8974 fold_convert (type, TREE_OPERAND (base0, 0)),
8975 fold_convert (type,
8976 TREE_OPERAND (base1, 0)))))
8977 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8979 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8980 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8981 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8982 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8983 return fold_build2_loc (loc, PLUS_EXPR, type,
8984 base_offset,
8985 fold_build2_loc (loc, MULT_EXPR, type,
8986 diff, esz));
8988 return NULL_TREE;
8991 /* If the real or vector real constant CST of type TYPE has an exact
8992 inverse, return it, else return NULL. */
8994 tree
8995 exact_inverse (tree type, tree cst)
8997 REAL_VALUE_TYPE r;
8998 tree unit_type, *elts;
8999 machine_mode mode;
9000 unsigned vec_nelts, i;
9002 switch (TREE_CODE (cst))
9004 case REAL_CST:
9005 r = TREE_REAL_CST (cst);
9007 if (exact_real_inverse (TYPE_MODE (type), &r))
9008 return build_real (type, r);
9010 return NULL_TREE;
9012 case VECTOR_CST:
9013 vec_nelts = VECTOR_CST_NELTS (cst);
9014 elts = XALLOCAVEC (tree, vec_nelts);
9015 unit_type = TREE_TYPE (type);
9016 mode = TYPE_MODE (unit_type);
9018 for (i = 0; i < vec_nelts; i++)
9020 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9021 if (!exact_real_inverse (mode, &r))
9022 return NULL_TREE;
9023 elts[i] = build_real (unit_type, r);
9026 return build_vector (type, elts);
9028 default:
9029 return NULL_TREE;
9033 /* Mask out the tz least significant bits of X of type TYPE where
9034 tz is the number of trailing zeroes in Y. */
9035 static wide_int
9036 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9038 int tz = wi::ctz (y);
9039 if (tz > 0)
9040 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9041 return x;
9044 /* Return true when T is an address and is known to be nonzero.
9045 For floating point we further ensure that T is not denormal.
9046 Similar logic is present in nonzero_address in rtlanal.h.
9048 If the return value is based on the assumption that signed overflow
9049 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9050 change *STRICT_OVERFLOW_P. */
9052 static bool
9053 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9055 tree type = TREE_TYPE (t);
9056 enum tree_code code;
9058 /* Doing something useful for floating point would need more work. */
9059 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9060 return false;
9062 code = TREE_CODE (t);
9063 switch (TREE_CODE_CLASS (code))
9065 case tcc_unary:
9066 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9067 strict_overflow_p);
9068 case tcc_binary:
9069 case tcc_comparison:
9070 return tree_binary_nonzero_warnv_p (code, type,
9071 TREE_OPERAND (t, 0),
9072 TREE_OPERAND (t, 1),
9073 strict_overflow_p);
9074 case tcc_constant:
9075 case tcc_declaration:
9076 case tcc_reference:
9077 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9079 default:
9080 break;
9083 switch (code)
9085 case TRUTH_NOT_EXPR:
9086 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9087 strict_overflow_p);
9089 case TRUTH_AND_EXPR:
9090 case TRUTH_OR_EXPR:
9091 case TRUTH_XOR_EXPR:
9092 return tree_binary_nonzero_warnv_p (code, type,
9093 TREE_OPERAND (t, 0),
9094 TREE_OPERAND (t, 1),
9095 strict_overflow_p);
9097 case COND_EXPR:
9098 case CONSTRUCTOR:
9099 case OBJ_TYPE_REF:
9100 case ASSERT_EXPR:
9101 case ADDR_EXPR:
9102 case WITH_SIZE_EXPR:
9103 case SSA_NAME:
9104 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9106 case COMPOUND_EXPR:
9107 case MODIFY_EXPR:
9108 case BIND_EXPR:
9109 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9110 strict_overflow_p);
9112 case SAVE_EXPR:
9113 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9114 strict_overflow_p);
9116 case CALL_EXPR:
9118 tree fndecl = get_callee_fndecl (t);
9119 if (!fndecl) return false;
9120 if (flag_delete_null_pointer_checks && !flag_check_new
9121 && DECL_IS_OPERATOR_NEW (fndecl)
9122 && !TREE_NOTHROW (fndecl))
9123 return true;
9124 if (flag_delete_null_pointer_checks
9125 && lookup_attribute ("returns_nonnull",
9126 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9127 return true;
9128 return alloca_call_p (t);
9131 default:
9132 break;
9134 return false;
9137 /* Return true when T is an address and is known to be nonzero.
9138 Handle warnings about undefined signed overflow. */
9140 static bool
9141 tree_expr_nonzero_p (tree t)
9143 bool ret, strict_overflow_p;
9145 strict_overflow_p = false;
9146 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9147 if (strict_overflow_p)
9148 fold_overflow_warning (("assuming signed overflow does not occur when "
9149 "determining that expression is always "
9150 "non-zero"),
9151 WARN_STRICT_OVERFLOW_MISC);
9152 return ret;
9155 /* Return true if T is known not to be equal to an integer W. */
9157 bool
9158 expr_not_equal_to (tree t, const wide_int &w)
9160 wide_int min, max, nz;
9161 value_range_type rtype;
9162 switch (TREE_CODE (t))
9164 case INTEGER_CST:
9165 return wi::ne_p (t, w);
9167 case SSA_NAME:
9168 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9169 return false;
9170 rtype = get_range_info (t, &min, &max);
9171 if (rtype == VR_RANGE)
9173 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9174 return true;
9175 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9176 return true;
9178 else if (rtype == VR_ANTI_RANGE
9179 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9180 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9181 return true;
9182 /* If T has some known zero bits and W has any of those bits set,
9183 then T is known not to be equal to W. */
9184 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9185 TYPE_PRECISION (TREE_TYPE (t))), 0))
9186 return true;
9187 return false;
9189 default:
9190 return false;
9194 /* Fold a binary expression of code CODE and type TYPE with operands
9195 OP0 and OP1. LOC is the location of the resulting expression.
9196 Return the folded expression if folding is successful. Otherwise,
9197 return NULL_TREE. */
9199 tree
9200 fold_binary_loc (location_t loc,
9201 enum tree_code code, tree type, tree op0, tree op1)
9203 enum tree_code_class kind = TREE_CODE_CLASS (code);
9204 tree arg0, arg1, tem;
9205 tree t1 = NULL_TREE;
9206 bool strict_overflow_p;
9207 unsigned int prec;
9209 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9210 && TREE_CODE_LENGTH (code) == 2
9211 && op0 != NULL_TREE
9212 && op1 != NULL_TREE);
9214 arg0 = op0;
9215 arg1 = op1;
9217 /* Strip any conversions that don't change the mode. This is
9218 safe for every expression, except for a comparison expression
9219 because its signedness is derived from its operands. So, in
9220 the latter case, only strip conversions that don't change the
9221 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9222 preserved.
9224 Note that this is done as an internal manipulation within the
9225 constant folder, in order to find the simplest representation
9226 of the arguments so that their form can be studied. In any
9227 cases, the appropriate type conversions should be put back in
9228 the tree that will get out of the constant folder. */
9230 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9232 STRIP_SIGN_NOPS (arg0);
9233 STRIP_SIGN_NOPS (arg1);
9235 else
9237 STRIP_NOPS (arg0);
9238 STRIP_NOPS (arg1);
9241 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9242 constant but we can't do arithmetic on them. */
9243 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9245 tem = const_binop (code, type, arg0, arg1);
9246 if (tem != NULL_TREE)
9248 if (TREE_TYPE (tem) != type)
9249 tem = fold_convert_loc (loc, type, tem);
9250 return tem;
9254 /* If this is a commutative operation, and ARG0 is a constant, move it
9255 to ARG1 to reduce the number of tests below. */
9256 if (commutative_tree_code (code)
9257 && tree_swap_operands_p (arg0, arg1, true))
9258 return fold_build2_loc (loc, code, type, op1, op0);
9260 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9261 to ARG1 to reduce the number of tests below. */
9262 if (kind == tcc_comparison
9263 && tree_swap_operands_p (arg0, arg1, true))
9264 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9266 tem = generic_simplify (loc, code, type, op0, op1);
9267 if (tem)
9268 return tem;
9270 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9272 First check for cases where an arithmetic operation is applied to a
9273 compound, conditional, or comparison operation. Push the arithmetic
9274 operation inside the compound or conditional to see if any folding
9275 can then be done. Convert comparison to conditional for this purpose.
9276 The also optimizes non-constant cases that used to be done in
9277 expand_expr.
9279 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9280 one of the operands is a comparison and the other is a comparison, a
9281 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9282 code below would make the expression more complex. Change it to a
9283 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9284 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9286 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9287 || code == EQ_EXPR || code == NE_EXPR)
9288 && TREE_CODE (type) != VECTOR_TYPE
9289 && ((truth_value_p (TREE_CODE (arg0))
9290 && (truth_value_p (TREE_CODE (arg1))
9291 || (TREE_CODE (arg1) == BIT_AND_EXPR
9292 && integer_onep (TREE_OPERAND (arg1, 1)))))
9293 || (truth_value_p (TREE_CODE (arg1))
9294 && (truth_value_p (TREE_CODE (arg0))
9295 || (TREE_CODE (arg0) == BIT_AND_EXPR
9296 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9298 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9299 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9300 : TRUTH_XOR_EXPR,
9301 boolean_type_node,
9302 fold_convert_loc (loc, boolean_type_node, arg0),
9303 fold_convert_loc (loc, boolean_type_node, arg1));
9305 if (code == EQ_EXPR)
9306 tem = invert_truthvalue_loc (loc, tem);
9308 return fold_convert_loc (loc, type, tem);
9311 if (TREE_CODE_CLASS (code) == tcc_binary
9312 || TREE_CODE_CLASS (code) == tcc_comparison)
9314 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9316 tem = fold_build2_loc (loc, code, type,
9317 fold_convert_loc (loc, TREE_TYPE (op0),
9318 TREE_OPERAND (arg0, 1)), op1);
9319 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9320 tem);
9322 if (TREE_CODE (arg1) == COMPOUND_EXPR
9323 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9325 tem = fold_build2_loc (loc, code, type, op0,
9326 fold_convert_loc (loc, TREE_TYPE (op1),
9327 TREE_OPERAND (arg1, 1)));
9328 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9329 tem);
9332 if (TREE_CODE (arg0) == COND_EXPR
9333 || TREE_CODE (arg0) == VEC_COND_EXPR
9334 || COMPARISON_CLASS_P (arg0))
9336 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9337 arg0, arg1,
9338 /*cond_first_p=*/1);
9339 if (tem != NULL_TREE)
9340 return tem;
9343 if (TREE_CODE (arg1) == COND_EXPR
9344 || TREE_CODE (arg1) == VEC_COND_EXPR
9345 || COMPARISON_CLASS_P (arg1))
9347 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9348 arg1, arg0,
9349 /*cond_first_p=*/0);
9350 if (tem != NULL_TREE)
9351 return tem;
9355 switch (code)
9357 case MEM_REF:
9358 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9359 if (TREE_CODE (arg0) == ADDR_EXPR
9360 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9362 tree iref = TREE_OPERAND (arg0, 0);
9363 return fold_build2 (MEM_REF, type,
9364 TREE_OPERAND (iref, 0),
9365 int_const_binop (PLUS_EXPR, arg1,
9366 TREE_OPERAND (iref, 1)));
9369 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9370 if (TREE_CODE (arg0) == ADDR_EXPR
9371 && handled_component_p (TREE_OPERAND (arg0, 0)))
9373 tree base;
9374 HOST_WIDE_INT coffset;
9375 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9376 &coffset);
9377 if (!base)
9378 return NULL_TREE;
9379 return fold_build2 (MEM_REF, type,
9380 build_fold_addr_expr (base),
9381 int_const_binop (PLUS_EXPR, arg1,
9382 size_int (coffset)));
9385 return NULL_TREE;
9387 case POINTER_PLUS_EXPR:
9388 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9389 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9390 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9391 return fold_convert_loc (loc, type,
9392 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9393 fold_convert_loc (loc, sizetype,
9394 arg1),
9395 fold_convert_loc (loc, sizetype,
9396 arg0)));
9398 return NULL_TREE;
9400 case PLUS_EXPR:
9401 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9403 /* X + (X / CST) * -CST is X % CST. */
9404 if (TREE_CODE (arg1) == MULT_EXPR
9405 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9406 && operand_equal_p (arg0,
9407 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9409 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9410 tree cst1 = TREE_OPERAND (arg1, 1);
9411 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9412 cst1, cst0);
9413 if (sum && integer_zerop (sum))
9414 return fold_convert_loc (loc, type,
9415 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9416 TREE_TYPE (arg0), arg0,
9417 cst0));
9421 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9422 one. Make sure the type is not saturating and has the signedness of
9423 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9424 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9425 if ((TREE_CODE (arg0) == MULT_EXPR
9426 || TREE_CODE (arg1) == MULT_EXPR)
9427 && !TYPE_SATURATING (type)
9428 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9429 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9430 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9432 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9433 if (tem)
9434 return tem;
9437 if (! FLOAT_TYPE_P (type))
9439 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9440 (plus (plus (mult) (mult)) (foo)) so that we can
9441 take advantage of the factoring cases below. */
9442 if (ANY_INTEGRAL_TYPE_P (type)
9443 && TYPE_OVERFLOW_WRAPS (type)
9444 && (((TREE_CODE (arg0) == PLUS_EXPR
9445 || TREE_CODE (arg0) == MINUS_EXPR)
9446 && TREE_CODE (arg1) == MULT_EXPR)
9447 || ((TREE_CODE (arg1) == PLUS_EXPR
9448 || TREE_CODE (arg1) == MINUS_EXPR)
9449 && TREE_CODE (arg0) == MULT_EXPR)))
9451 tree parg0, parg1, parg, marg;
9452 enum tree_code pcode;
9454 if (TREE_CODE (arg1) == MULT_EXPR)
9455 parg = arg0, marg = arg1;
9456 else
9457 parg = arg1, marg = arg0;
9458 pcode = TREE_CODE (parg);
9459 parg0 = TREE_OPERAND (parg, 0);
9460 parg1 = TREE_OPERAND (parg, 1);
9461 STRIP_NOPS (parg0);
9462 STRIP_NOPS (parg1);
9464 if (TREE_CODE (parg0) == MULT_EXPR
9465 && TREE_CODE (parg1) != MULT_EXPR)
9466 return fold_build2_loc (loc, pcode, type,
9467 fold_build2_loc (loc, PLUS_EXPR, type,
9468 fold_convert_loc (loc, type,
9469 parg0),
9470 fold_convert_loc (loc, type,
9471 marg)),
9472 fold_convert_loc (loc, type, parg1));
9473 if (TREE_CODE (parg0) != MULT_EXPR
9474 && TREE_CODE (parg1) == MULT_EXPR)
9475 return
9476 fold_build2_loc (loc, PLUS_EXPR, type,
9477 fold_convert_loc (loc, type, parg0),
9478 fold_build2_loc (loc, pcode, type,
9479 fold_convert_loc (loc, type, marg),
9480 fold_convert_loc (loc, type,
9481 parg1)));
9484 else
9486 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9487 to __complex__ ( x, y ). This is not the same for SNaNs or
9488 if signed zeros are involved. */
9489 if (!HONOR_SNANS (element_mode (arg0))
9490 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9491 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9493 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9494 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9495 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9496 bool arg0rz = false, arg0iz = false;
9497 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9498 || (arg0i && (arg0iz = real_zerop (arg0i))))
9500 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9501 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9502 if (arg0rz && arg1i && real_zerop (arg1i))
9504 tree rp = arg1r ? arg1r
9505 : build1 (REALPART_EXPR, rtype, arg1);
9506 tree ip = arg0i ? arg0i
9507 : build1 (IMAGPART_EXPR, rtype, arg0);
9508 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9510 else if (arg0iz && arg1r && real_zerop (arg1r))
9512 tree rp = arg0r ? arg0r
9513 : build1 (REALPART_EXPR, rtype, arg0);
9514 tree ip = arg1i ? arg1i
9515 : build1 (IMAGPART_EXPR, rtype, arg1);
9516 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9521 if (flag_unsafe_math_optimizations
9522 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9523 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9524 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9525 return tem;
9527 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9528 We associate floats only if the user has specified
9529 -fassociative-math. */
9530 if (flag_associative_math
9531 && TREE_CODE (arg1) == PLUS_EXPR
9532 && TREE_CODE (arg0) != MULT_EXPR)
9534 tree tree10 = TREE_OPERAND (arg1, 0);
9535 tree tree11 = TREE_OPERAND (arg1, 1);
9536 if (TREE_CODE (tree11) == MULT_EXPR
9537 && TREE_CODE (tree10) == MULT_EXPR)
9539 tree tree0;
9540 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9541 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9544 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9545 We associate floats only if the user has specified
9546 -fassociative-math. */
9547 if (flag_associative_math
9548 && TREE_CODE (arg0) == PLUS_EXPR
9549 && TREE_CODE (arg1) != MULT_EXPR)
9551 tree tree00 = TREE_OPERAND (arg0, 0);
9552 tree tree01 = TREE_OPERAND (arg0, 1);
9553 if (TREE_CODE (tree01) == MULT_EXPR
9554 && TREE_CODE (tree00) == MULT_EXPR)
9556 tree tree0;
9557 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9558 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9563 bit_rotate:
9564 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9565 is a rotate of A by C1 bits. */
9566 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9567 is a rotate of A by B bits. */
9569 enum tree_code code0, code1;
9570 tree rtype;
9571 code0 = TREE_CODE (arg0);
9572 code1 = TREE_CODE (arg1);
9573 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9574 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9575 && operand_equal_p (TREE_OPERAND (arg0, 0),
9576 TREE_OPERAND (arg1, 0), 0)
9577 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9578 TYPE_UNSIGNED (rtype))
9579 /* Only create rotates in complete modes. Other cases are not
9580 expanded properly. */
9581 && (element_precision (rtype)
9582 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9584 tree tree01, tree11;
9585 enum tree_code code01, code11;
9587 tree01 = TREE_OPERAND (arg0, 1);
9588 tree11 = TREE_OPERAND (arg1, 1);
9589 STRIP_NOPS (tree01);
9590 STRIP_NOPS (tree11);
9591 code01 = TREE_CODE (tree01);
9592 code11 = TREE_CODE (tree11);
9593 if (code01 == INTEGER_CST
9594 && code11 == INTEGER_CST
9595 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9596 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9598 tem = build2_loc (loc, LROTATE_EXPR,
9599 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9600 TREE_OPERAND (arg0, 0),
9601 code0 == LSHIFT_EXPR
9602 ? TREE_OPERAND (arg0, 1)
9603 : TREE_OPERAND (arg1, 1));
9604 return fold_convert_loc (loc, type, tem);
9606 else if (code11 == MINUS_EXPR)
9608 tree tree110, tree111;
9609 tree110 = TREE_OPERAND (tree11, 0);
9610 tree111 = TREE_OPERAND (tree11, 1);
9611 STRIP_NOPS (tree110);
9612 STRIP_NOPS (tree111);
9613 if (TREE_CODE (tree110) == INTEGER_CST
9614 && 0 == compare_tree_int (tree110,
9615 element_precision
9616 (TREE_TYPE (TREE_OPERAND
9617 (arg0, 0))))
9618 && operand_equal_p (tree01, tree111, 0))
9619 return
9620 fold_convert_loc (loc, type,
9621 build2 ((code0 == LSHIFT_EXPR
9622 ? LROTATE_EXPR
9623 : RROTATE_EXPR),
9624 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9625 TREE_OPERAND (arg0, 0),
9626 TREE_OPERAND (arg0, 1)));
9628 else if (code01 == MINUS_EXPR)
9630 tree tree010, tree011;
9631 tree010 = TREE_OPERAND (tree01, 0);
9632 tree011 = TREE_OPERAND (tree01, 1);
9633 STRIP_NOPS (tree010);
9634 STRIP_NOPS (tree011);
9635 if (TREE_CODE (tree010) == INTEGER_CST
9636 && 0 == compare_tree_int (tree010,
9637 element_precision
9638 (TREE_TYPE (TREE_OPERAND
9639 (arg0, 0))))
9640 && operand_equal_p (tree11, tree011, 0))
9641 return fold_convert_loc
9642 (loc, type,
9643 build2 ((code0 != LSHIFT_EXPR
9644 ? LROTATE_EXPR
9645 : RROTATE_EXPR),
9646 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9647 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9652 associate:
9653 /* In most languages, can't associate operations on floats through
9654 parentheses. Rather than remember where the parentheses were, we
9655 don't associate floats at all, unless the user has specified
9656 -fassociative-math.
9657 And, we need to make sure type is not saturating. */
9659 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9660 && !TYPE_SATURATING (type))
9662 tree var0, con0, lit0, minus_lit0;
9663 tree var1, con1, lit1, minus_lit1;
9664 tree atype = type;
9665 bool ok = true;
9667 /* Split both trees into variables, constants, and literals. Then
9668 associate each group together, the constants with literals,
9669 then the result with variables. This increases the chances of
9670 literals being recombined later and of generating relocatable
9671 expressions for the sum of a constant and literal. */
9672 var0 = split_tree (loc, arg0, type, code,
9673 &con0, &lit0, &minus_lit0, 0);
9674 var1 = split_tree (loc, arg1, type, code,
9675 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9677 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9678 if (code == MINUS_EXPR)
9679 code = PLUS_EXPR;
9681 /* With undefined overflow prefer doing association in a type
9682 which wraps on overflow, if that is one of the operand types. */
9683 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9684 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9686 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9687 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9688 atype = TREE_TYPE (arg0);
9689 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9690 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9691 atype = TREE_TYPE (arg1);
9692 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9695 /* With undefined overflow we can only associate constants with one
9696 variable, and constants whose association doesn't overflow. */
9697 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9698 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9700 if (var0 && var1)
9702 tree tmp0 = var0;
9703 tree tmp1 = var1;
9704 bool one_neg = false;
9706 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9708 tmp0 = TREE_OPERAND (tmp0, 0);
9709 one_neg = !one_neg;
9711 if (CONVERT_EXPR_P (tmp0)
9712 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9713 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9714 <= TYPE_PRECISION (atype)))
9715 tmp0 = TREE_OPERAND (tmp0, 0);
9716 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9718 tmp1 = TREE_OPERAND (tmp1, 0);
9719 one_neg = !one_neg;
9721 if (CONVERT_EXPR_P (tmp1)
9722 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9723 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9724 <= TYPE_PRECISION (atype)))
9725 tmp1 = TREE_OPERAND (tmp1, 0);
9726 /* The only case we can still associate with two variables
9727 is if they cancel out. */
9728 if (!one_neg
9729 || !operand_equal_p (tmp0, tmp1, 0))
9730 ok = false;
9734 /* Only do something if we found more than two objects. Otherwise,
9735 nothing has changed and we risk infinite recursion. */
9736 if (ok
9737 && (2 < ((var0 != 0) + (var1 != 0)
9738 + (con0 != 0) + (con1 != 0)
9739 + (lit0 != 0) + (lit1 != 0)
9740 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9742 bool any_overflows = false;
9743 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9744 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9745 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9746 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9747 var0 = associate_trees (loc, var0, var1, code, atype);
9748 con0 = associate_trees (loc, con0, con1, code, atype);
9749 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9750 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9751 code, atype);
9753 /* Preserve the MINUS_EXPR if the negative part of the literal is
9754 greater than the positive part. Otherwise, the multiplicative
9755 folding code (i.e extract_muldiv) may be fooled in case
9756 unsigned constants are subtracted, like in the following
9757 example: ((X*2 + 4) - 8U)/2. */
9758 if (minus_lit0 && lit0)
9760 if (TREE_CODE (lit0) == INTEGER_CST
9761 && TREE_CODE (minus_lit0) == INTEGER_CST
9762 && tree_int_cst_lt (lit0, minus_lit0))
9764 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9765 MINUS_EXPR, atype);
9766 lit0 = 0;
9768 else
9770 lit0 = associate_trees (loc, lit0, minus_lit0,
9771 MINUS_EXPR, atype);
9772 minus_lit0 = 0;
9776 /* Don't introduce overflows through reassociation. */
9777 if (!any_overflows
9778 && ((lit0 && TREE_OVERFLOW_P (lit0))
9779 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9780 return NULL_TREE;
9782 if (minus_lit0)
9784 if (con0 == 0)
9785 return
9786 fold_convert_loc (loc, type,
9787 associate_trees (loc, var0, minus_lit0,
9788 MINUS_EXPR, atype));
9789 else
9791 con0 = associate_trees (loc, con0, minus_lit0,
9792 MINUS_EXPR, atype);
9793 return
9794 fold_convert_loc (loc, type,
9795 associate_trees (loc, var0, con0,
9796 PLUS_EXPR, atype));
9800 con0 = associate_trees (loc, con0, lit0, code, atype);
9801 return
9802 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9803 code, atype));
9807 return NULL_TREE;
9809 case MINUS_EXPR:
9810 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9811 if (TREE_CODE (arg0) == NEGATE_EXPR
9812 && negate_expr_p (op1)
9813 && reorder_operands_p (arg0, arg1))
9814 return fold_build2_loc (loc, MINUS_EXPR, type,
9815 negate_expr (op1),
9816 fold_convert_loc (loc, type,
9817 TREE_OPERAND (arg0, 0)));
9819 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9820 __complex__ ( x, -y ). This is not the same for SNaNs or if
9821 signed zeros are involved. */
9822 if (!HONOR_SNANS (element_mode (arg0))
9823 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9824 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9826 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9827 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9828 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9829 bool arg0rz = false, arg0iz = false;
9830 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9831 || (arg0i && (arg0iz = real_zerop (arg0i))))
9833 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9834 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9835 if (arg0rz && arg1i && real_zerop (arg1i))
9837 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9838 arg1r ? arg1r
9839 : build1 (REALPART_EXPR, rtype, arg1));
9840 tree ip = arg0i ? arg0i
9841 : build1 (IMAGPART_EXPR, rtype, arg0);
9842 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9844 else if (arg0iz && arg1r && real_zerop (arg1r))
9846 tree rp = arg0r ? arg0r
9847 : build1 (REALPART_EXPR, rtype, arg0);
9848 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9849 arg1i ? arg1i
9850 : build1 (IMAGPART_EXPR, rtype, arg1));
9851 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9856 /* A - B -> A + (-B) if B is easily negatable. */
9857 if (negate_expr_p (op1)
9858 && ! TYPE_OVERFLOW_SANITIZED (type)
9859 && ((FLOAT_TYPE_P (type)
9860 /* Avoid this transformation if B is a positive REAL_CST. */
9861 && (TREE_CODE (op1) != REAL_CST
9862 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9863 || INTEGRAL_TYPE_P (type)))
9864 return fold_build2_loc (loc, PLUS_EXPR, type,
9865 fold_convert_loc (loc, type, arg0),
9866 negate_expr (op1));
9868 /* Fold &a[i] - &a[j] to i-j. */
9869 if (TREE_CODE (arg0) == ADDR_EXPR
9870 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9871 && TREE_CODE (arg1) == ADDR_EXPR
9872 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9874 tree tem = fold_addr_of_array_ref_difference (loc, type,
9875 TREE_OPERAND (arg0, 0),
9876 TREE_OPERAND (arg1, 0));
9877 if (tem)
9878 return tem;
9881 if (FLOAT_TYPE_P (type)
9882 && flag_unsafe_math_optimizations
9883 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9884 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9885 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9886 return tem;
9888 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9889 one. Make sure the type is not saturating and has the signedness of
9890 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9891 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9892 if ((TREE_CODE (arg0) == MULT_EXPR
9893 || TREE_CODE (arg1) == MULT_EXPR)
9894 && !TYPE_SATURATING (type)
9895 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9896 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9897 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9899 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9900 if (tem)
9901 return tem;
9904 goto associate;
9906 case MULT_EXPR:
9907 if (! FLOAT_TYPE_P (type))
9909 /* Transform x * -C into -x * C if x is easily negatable. */
9910 if (TREE_CODE (op1) == INTEGER_CST
9911 && tree_int_cst_sgn (op1) == -1
9912 && negate_expr_p (op0)
9913 && (tem = negate_expr (op1)) != op1
9914 && ! TREE_OVERFLOW (tem))
9915 return fold_build2_loc (loc, MULT_EXPR, type,
9916 fold_convert_loc (loc, type,
9917 negate_expr (op0)), tem);
9919 strict_overflow_p = false;
9920 if (TREE_CODE (arg1) == INTEGER_CST
9921 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9922 &strict_overflow_p)))
9924 if (strict_overflow_p)
9925 fold_overflow_warning (("assuming signed overflow does not "
9926 "occur when simplifying "
9927 "multiplication"),
9928 WARN_STRICT_OVERFLOW_MISC);
9929 return fold_convert_loc (loc, type, tem);
9932 /* Optimize z * conj(z) for integer complex numbers. */
9933 if (TREE_CODE (arg0) == CONJ_EXPR
9934 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9935 return fold_mult_zconjz (loc, type, arg1);
9936 if (TREE_CODE (arg1) == CONJ_EXPR
9937 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9938 return fold_mult_zconjz (loc, type, arg0);
9940 else
9942 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9943 This is not the same for NaNs or if signed zeros are
9944 involved. */
9945 if (!HONOR_NANS (arg0)
9946 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9947 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9948 && TREE_CODE (arg1) == COMPLEX_CST
9949 && real_zerop (TREE_REALPART (arg1)))
9951 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9952 if (real_onep (TREE_IMAGPART (arg1)))
9953 return
9954 fold_build2_loc (loc, COMPLEX_EXPR, type,
9955 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9956 rtype, arg0)),
9957 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9958 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9959 return
9960 fold_build2_loc (loc, COMPLEX_EXPR, type,
9961 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9962 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9963 rtype, arg0)));
9966 /* Optimize z * conj(z) for floating point complex numbers.
9967 Guarded by flag_unsafe_math_optimizations as non-finite
9968 imaginary components don't produce scalar results. */
9969 if (flag_unsafe_math_optimizations
9970 && TREE_CODE (arg0) == CONJ_EXPR
9971 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9972 return fold_mult_zconjz (loc, type, arg1);
9973 if (flag_unsafe_math_optimizations
9974 && TREE_CODE (arg1) == CONJ_EXPR
9975 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9976 return fold_mult_zconjz (loc, type, arg0);
9978 goto associate;
9980 case BIT_IOR_EXPR:
9981 /* Canonicalize (X & C1) | C2. */
9982 if (TREE_CODE (arg0) == BIT_AND_EXPR
9983 && TREE_CODE (arg1) == INTEGER_CST
9984 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9986 int width = TYPE_PRECISION (type), w;
9987 wide_int c1 = TREE_OPERAND (arg0, 1);
9988 wide_int c2 = arg1;
9990 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9991 if ((c1 & c2) == c1)
9992 return omit_one_operand_loc (loc, type, arg1,
9993 TREE_OPERAND (arg0, 0));
9995 wide_int msk = wi::mask (width, false,
9996 TYPE_PRECISION (TREE_TYPE (arg1)));
9998 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9999 if (msk.and_not (c1 | c2) == 0)
10000 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10001 TREE_OPERAND (arg0, 0), arg1);
10003 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10004 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10005 mode which allows further optimizations. */
10006 c1 &= msk;
10007 c2 &= msk;
10008 wide_int c3 = c1.and_not (c2);
10009 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10011 wide_int mask = wi::mask (w, false,
10012 TYPE_PRECISION (type));
10013 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10015 c3 = mask;
10016 break;
10020 if (c3 != c1)
10021 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10022 fold_build2_loc (loc, BIT_AND_EXPR, type,
10023 TREE_OPERAND (arg0, 0),
10024 wide_int_to_tree (type,
10025 c3)),
10026 arg1);
10029 /* See if this can be simplified into a rotate first. If that
10030 is unsuccessful continue in the association code. */
10031 goto bit_rotate;
10033 case BIT_XOR_EXPR:
10034 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10035 if (TREE_CODE (arg0) == BIT_AND_EXPR
10036 && INTEGRAL_TYPE_P (type)
10037 && integer_onep (TREE_OPERAND (arg0, 1))
10038 && integer_onep (arg1))
10039 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10040 build_zero_cst (TREE_TYPE (arg0)));
10042 /* See if this can be simplified into a rotate first. If that
10043 is unsuccessful continue in the association code. */
10044 goto bit_rotate;
10046 case BIT_AND_EXPR:
10047 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10048 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10049 && INTEGRAL_TYPE_P (type)
10050 && integer_onep (TREE_OPERAND (arg0, 1))
10051 && integer_onep (arg1))
10053 tree tem2;
10054 tem = TREE_OPERAND (arg0, 0);
10055 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10056 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10057 tem, tem2);
10058 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10059 build_zero_cst (TREE_TYPE (tem)));
10061 /* Fold ~X & 1 as (X & 1) == 0. */
10062 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10063 && INTEGRAL_TYPE_P (type)
10064 && integer_onep (arg1))
10066 tree tem2;
10067 tem = TREE_OPERAND (arg0, 0);
10068 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10069 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10070 tem, tem2);
10071 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10072 build_zero_cst (TREE_TYPE (tem)));
10074 /* Fold !X & 1 as X == 0. */
10075 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10076 && integer_onep (arg1))
10078 tem = TREE_OPERAND (arg0, 0);
10079 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10080 build_zero_cst (TREE_TYPE (tem)));
10083 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10084 multiple of 1 << CST. */
10085 if (TREE_CODE (arg1) == INTEGER_CST)
10087 wide_int cst1 = arg1;
10088 wide_int ncst1 = -cst1;
10089 if ((cst1 & ncst1) == ncst1
10090 && multiple_of_p (type, arg0,
10091 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10092 return fold_convert_loc (loc, type, arg0);
10095 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10096 bits from CST2. */
10097 if (TREE_CODE (arg1) == INTEGER_CST
10098 && TREE_CODE (arg0) == MULT_EXPR
10099 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10101 wide_int warg1 = arg1;
10102 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10104 if (masked == 0)
10105 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10106 arg0, arg1);
10107 else if (masked != warg1)
10109 /* Avoid the transform if arg1 is a mask of some
10110 mode which allows further optimizations. */
10111 int pop = wi::popcount (warg1);
10112 if (!(pop >= BITS_PER_UNIT
10113 && exact_log2 (pop) != -1
10114 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10115 return fold_build2_loc (loc, code, type, op0,
10116 wide_int_to_tree (type, masked));
10120 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10121 ((A & N) + B) & M -> (A + B) & M
10122 Similarly if (N & M) == 0,
10123 ((A | N) + B) & M -> (A + B) & M
10124 and for - instead of + (or unary - instead of +)
10125 and/or ^ instead of |.
10126 If B is constant and (B & M) == 0, fold into A & M. */
10127 if (TREE_CODE (arg1) == INTEGER_CST)
10129 wide_int cst1 = arg1;
10130 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10131 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10132 && (TREE_CODE (arg0) == PLUS_EXPR
10133 || TREE_CODE (arg0) == MINUS_EXPR
10134 || TREE_CODE (arg0) == NEGATE_EXPR)
10135 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10136 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10138 tree pmop[2];
10139 int which = 0;
10140 wide_int cst0;
10142 /* Now we know that arg0 is (C + D) or (C - D) or
10143 -C and arg1 (M) is == (1LL << cst) - 1.
10144 Store C into PMOP[0] and D into PMOP[1]. */
10145 pmop[0] = TREE_OPERAND (arg0, 0);
10146 pmop[1] = NULL;
10147 if (TREE_CODE (arg0) != NEGATE_EXPR)
10149 pmop[1] = TREE_OPERAND (arg0, 1);
10150 which = 1;
10153 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10154 which = -1;
10156 for (; which >= 0; which--)
10157 switch (TREE_CODE (pmop[which]))
10159 case BIT_AND_EXPR:
10160 case BIT_IOR_EXPR:
10161 case BIT_XOR_EXPR:
10162 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10163 != INTEGER_CST)
10164 break;
10165 cst0 = TREE_OPERAND (pmop[which], 1);
10166 cst0 &= cst1;
10167 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10169 if (cst0 != cst1)
10170 break;
10172 else if (cst0 != 0)
10173 break;
10174 /* If C or D is of the form (A & N) where
10175 (N & M) == M, or of the form (A | N) or
10176 (A ^ N) where (N & M) == 0, replace it with A. */
10177 pmop[which] = TREE_OPERAND (pmop[which], 0);
10178 break;
10179 case INTEGER_CST:
10180 /* If C or D is a N where (N & M) == 0, it can be
10181 omitted (assumed 0). */
10182 if ((TREE_CODE (arg0) == PLUS_EXPR
10183 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10184 && (cst1 & pmop[which]) == 0)
10185 pmop[which] = NULL;
10186 break;
10187 default:
10188 break;
10191 /* Only build anything new if we optimized one or both arguments
10192 above. */
10193 if (pmop[0] != TREE_OPERAND (arg0, 0)
10194 || (TREE_CODE (arg0) != NEGATE_EXPR
10195 && pmop[1] != TREE_OPERAND (arg0, 1)))
10197 tree utype = TREE_TYPE (arg0);
10198 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10200 /* Perform the operations in a type that has defined
10201 overflow behavior. */
10202 utype = unsigned_type_for (TREE_TYPE (arg0));
10203 if (pmop[0] != NULL)
10204 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10205 if (pmop[1] != NULL)
10206 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10209 if (TREE_CODE (arg0) == NEGATE_EXPR)
10210 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10211 else if (TREE_CODE (arg0) == PLUS_EXPR)
10213 if (pmop[0] != NULL && pmop[1] != NULL)
10214 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10215 pmop[0], pmop[1]);
10216 else if (pmop[0] != NULL)
10217 tem = pmop[0];
10218 else if (pmop[1] != NULL)
10219 tem = pmop[1];
10220 else
10221 return build_int_cst (type, 0);
10223 else if (pmop[0] == NULL)
10224 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10225 else
10226 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10227 pmop[0], pmop[1]);
10228 /* TEM is now the new binary +, - or unary - replacement. */
10229 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10230 fold_convert_loc (loc, utype, arg1));
10231 return fold_convert_loc (loc, type, tem);
10236 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10237 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10238 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10240 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10242 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10243 if (mask == -1)
10244 return
10245 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10248 goto associate;
10250 case RDIV_EXPR:
10251 /* Don't touch a floating-point divide by zero unless the mode
10252 of the constant can represent infinity. */
10253 if (TREE_CODE (arg1) == REAL_CST
10254 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10255 && real_zerop (arg1))
10256 return NULL_TREE;
10258 /* (-A) / (-B) -> A / B */
10259 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10260 return fold_build2_loc (loc, RDIV_EXPR, type,
10261 TREE_OPERAND (arg0, 0),
10262 negate_expr (arg1));
10263 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10264 return fold_build2_loc (loc, RDIV_EXPR, type,
10265 negate_expr (arg0),
10266 TREE_OPERAND (arg1, 0));
10267 return NULL_TREE;
10269 case TRUNC_DIV_EXPR:
10270 /* Fall through */
10272 case FLOOR_DIV_EXPR:
10273 /* Simplify A / (B << N) where A and B are positive and B is
10274 a power of 2, to A >> (N + log2(B)). */
10275 strict_overflow_p = false;
10276 if (TREE_CODE (arg1) == LSHIFT_EXPR
10277 && (TYPE_UNSIGNED (type)
10278 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10280 tree sval = TREE_OPERAND (arg1, 0);
10281 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10283 tree sh_cnt = TREE_OPERAND (arg1, 1);
10284 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10285 wi::exact_log2 (sval));
10287 if (strict_overflow_p)
10288 fold_overflow_warning (("assuming signed overflow does not "
10289 "occur when simplifying A / (B << N)"),
10290 WARN_STRICT_OVERFLOW_MISC);
10292 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10293 sh_cnt, pow2);
10294 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10295 fold_convert_loc (loc, type, arg0), sh_cnt);
10299 /* Fall through */
10301 case ROUND_DIV_EXPR:
10302 case CEIL_DIV_EXPR:
10303 case EXACT_DIV_EXPR:
10304 if (integer_zerop (arg1))
10305 return NULL_TREE;
10307 /* Convert -A / -B to A / B when the type is signed and overflow is
10308 undefined. */
10309 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10310 && TREE_CODE (arg0) == NEGATE_EXPR
10311 && negate_expr_p (op1))
10313 if (INTEGRAL_TYPE_P (type))
10314 fold_overflow_warning (("assuming signed overflow does not occur "
10315 "when distributing negation across "
10316 "division"),
10317 WARN_STRICT_OVERFLOW_MISC);
10318 return fold_build2_loc (loc, code, type,
10319 fold_convert_loc (loc, type,
10320 TREE_OPERAND (arg0, 0)),
10321 negate_expr (op1));
10323 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10324 && TREE_CODE (arg1) == NEGATE_EXPR
10325 && negate_expr_p (op0))
10327 if (INTEGRAL_TYPE_P (type))
10328 fold_overflow_warning (("assuming signed overflow does not occur "
10329 "when distributing negation across "
10330 "division"),
10331 WARN_STRICT_OVERFLOW_MISC);
10332 return fold_build2_loc (loc, code, type,
10333 negate_expr (op0),
10334 fold_convert_loc (loc, type,
10335 TREE_OPERAND (arg1, 0)));
10338 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10339 operation, EXACT_DIV_EXPR.
10341 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10342 At one time others generated faster code, it's not clear if they do
10343 after the last round to changes to the DIV code in expmed.c. */
10344 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10345 && multiple_of_p (type, arg0, arg1))
10346 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10347 fold_convert (type, arg0),
10348 fold_convert (type, arg1));
10350 strict_overflow_p = false;
10351 if (TREE_CODE (arg1) == INTEGER_CST
10352 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10353 &strict_overflow_p)))
10355 if (strict_overflow_p)
10356 fold_overflow_warning (("assuming signed overflow does not occur "
10357 "when simplifying division"),
10358 WARN_STRICT_OVERFLOW_MISC);
10359 return fold_convert_loc (loc, type, tem);
10362 return NULL_TREE;
10364 case CEIL_MOD_EXPR:
10365 case FLOOR_MOD_EXPR:
10366 case ROUND_MOD_EXPR:
10367 case TRUNC_MOD_EXPR:
10368 strict_overflow_p = false;
10369 if (TREE_CODE (arg1) == INTEGER_CST
10370 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10371 &strict_overflow_p)))
10373 if (strict_overflow_p)
10374 fold_overflow_warning (("assuming signed overflow does not occur "
10375 "when simplifying modulus"),
10376 WARN_STRICT_OVERFLOW_MISC);
10377 return fold_convert_loc (loc, type, tem);
10380 return NULL_TREE;
10382 case LROTATE_EXPR:
10383 case RROTATE_EXPR:
10384 case RSHIFT_EXPR:
10385 case LSHIFT_EXPR:
10386 /* Since negative shift count is not well-defined,
10387 don't try to compute it in the compiler. */
10388 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10389 return NULL_TREE;
10391 prec = element_precision (type);
10393 /* If we have a rotate of a bit operation with the rotate count and
10394 the second operand of the bit operation both constant,
10395 permute the two operations. */
10396 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10397 && (TREE_CODE (arg0) == BIT_AND_EXPR
10398 || TREE_CODE (arg0) == BIT_IOR_EXPR
10399 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10400 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10401 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10402 fold_build2_loc (loc, code, type,
10403 TREE_OPERAND (arg0, 0), arg1),
10404 fold_build2_loc (loc, code, type,
10405 TREE_OPERAND (arg0, 1), arg1));
10407 /* Two consecutive rotates adding up to the some integer
10408 multiple of the precision of the type can be ignored. */
10409 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10410 && TREE_CODE (arg0) == RROTATE_EXPR
10411 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10412 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10413 prec) == 0)
10414 return TREE_OPERAND (arg0, 0);
10416 return NULL_TREE;
10418 case MIN_EXPR:
10419 case MAX_EXPR:
10420 goto associate;
10422 case TRUTH_ANDIF_EXPR:
10423 /* Note that the operands of this must be ints
10424 and their values must be 0 or 1.
10425 ("true" is a fixed value perhaps depending on the language.) */
10426 /* If first arg is constant zero, return it. */
10427 if (integer_zerop (arg0))
10428 return fold_convert_loc (loc, type, arg0);
10429 case TRUTH_AND_EXPR:
10430 /* If either arg is constant true, drop it. */
10431 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10432 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10433 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10434 /* Preserve sequence points. */
10435 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10436 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10437 /* If second arg is constant zero, result is zero, but first arg
10438 must be evaluated. */
10439 if (integer_zerop (arg1))
10440 return omit_one_operand_loc (loc, type, arg1, arg0);
10441 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10442 case will be handled here. */
10443 if (integer_zerop (arg0))
10444 return omit_one_operand_loc (loc, type, arg0, arg1);
10446 /* !X && X is always false. */
10447 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10448 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10449 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10450 /* X && !X is always false. */
10451 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10452 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10453 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10455 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10456 means A >= Y && A != MAX, but in this case we know that
10457 A < X <= MAX. */
10459 if (!TREE_SIDE_EFFECTS (arg0)
10460 && !TREE_SIDE_EFFECTS (arg1))
10462 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10463 if (tem && !operand_equal_p (tem, arg0, 0))
10464 return fold_build2_loc (loc, code, type, tem, arg1);
10466 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10467 if (tem && !operand_equal_p (tem, arg1, 0))
10468 return fold_build2_loc (loc, code, type, arg0, tem);
10471 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10472 != NULL_TREE)
10473 return tem;
10475 return NULL_TREE;
10477 case TRUTH_ORIF_EXPR:
10478 /* Note that the operands of this must be ints
10479 and their values must be 0 or true.
10480 ("true" is a fixed value perhaps depending on the language.) */
10481 /* If first arg is constant true, return it. */
10482 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10483 return fold_convert_loc (loc, type, arg0);
10484 case TRUTH_OR_EXPR:
10485 /* If either arg is constant zero, drop it. */
10486 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10487 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10488 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10489 /* Preserve sequence points. */
10490 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10491 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10492 /* If second arg is constant true, result is true, but we must
10493 evaluate first arg. */
10494 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10495 return omit_one_operand_loc (loc, type, arg1, arg0);
10496 /* Likewise for first arg, but note this only occurs here for
10497 TRUTH_OR_EXPR. */
10498 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10499 return omit_one_operand_loc (loc, type, arg0, arg1);
10501 /* !X || X is always true. */
10502 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10503 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10504 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10505 /* X || !X is always true. */
10506 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10507 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10508 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10510 /* (X && !Y) || (!X && Y) is X ^ Y */
10511 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10512 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10514 tree a0, a1, l0, l1, n0, n1;
10516 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10517 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10519 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10520 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10522 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10523 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10525 if ((operand_equal_p (n0, a0, 0)
10526 && operand_equal_p (n1, a1, 0))
10527 || (operand_equal_p (n0, a1, 0)
10528 && operand_equal_p (n1, a0, 0)))
10529 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10532 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10533 != NULL_TREE)
10534 return tem;
10536 return NULL_TREE;
10538 case TRUTH_XOR_EXPR:
10539 /* If the second arg is constant zero, drop it. */
10540 if (integer_zerop (arg1))
10541 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10542 /* If the second arg is constant true, this is a logical inversion. */
10543 if (integer_onep (arg1))
10545 tem = invert_truthvalue_loc (loc, arg0);
10546 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10548 /* Identical arguments cancel to zero. */
10549 if (operand_equal_p (arg0, arg1, 0))
10550 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10552 /* !X ^ X is always true. */
10553 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10555 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10557 /* X ^ !X is always true. */
10558 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10559 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10560 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10562 return NULL_TREE;
10564 case EQ_EXPR:
10565 case NE_EXPR:
10566 STRIP_NOPS (arg0);
10567 STRIP_NOPS (arg1);
10569 tem = fold_comparison (loc, code, type, op0, op1);
10570 if (tem != NULL_TREE)
10571 return tem;
10573 /* bool_var != 1 becomes !bool_var. */
10574 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10575 && code == NE_EXPR)
10576 return fold_convert_loc (loc, type,
10577 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10578 TREE_TYPE (arg0), arg0));
10580 /* bool_var == 0 becomes !bool_var. */
10581 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10582 && code == EQ_EXPR)
10583 return fold_convert_loc (loc, type,
10584 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10585 TREE_TYPE (arg0), arg0));
10587 /* !exp != 0 becomes !exp */
10588 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10589 && code == NE_EXPR)
10590 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10592 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10593 if ((TREE_CODE (arg0) == PLUS_EXPR
10594 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10595 || TREE_CODE (arg0) == MINUS_EXPR)
10596 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10597 0)),
10598 arg1, 0)
10599 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10600 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10602 tree val = TREE_OPERAND (arg0, 1);
10603 val = fold_build2_loc (loc, code, type, val,
10604 build_int_cst (TREE_TYPE (val), 0));
10605 return omit_two_operands_loc (loc, type, val,
10606 TREE_OPERAND (arg0, 0), arg1);
10609 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10610 if ((TREE_CODE (arg1) == PLUS_EXPR
10611 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10612 || TREE_CODE (arg1) == MINUS_EXPR)
10613 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10614 0)),
10615 arg0, 0)
10616 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10617 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10619 tree val = TREE_OPERAND (arg1, 1);
10620 val = fold_build2_loc (loc, code, type, val,
10621 build_int_cst (TREE_TYPE (val), 0));
10622 return omit_two_operands_loc (loc, type, val,
10623 TREE_OPERAND (arg1, 0), arg0);
10626 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10627 if (TREE_CODE (arg0) == MINUS_EXPR
10628 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10629 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10630 1)),
10631 arg1, 0)
10632 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10633 return omit_two_operands_loc (loc, type,
10634 code == NE_EXPR
10635 ? boolean_true_node : boolean_false_node,
10636 TREE_OPERAND (arg0, 1), arg1);
10638 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10639 if (TREE_CODE (arg1) == MINUS_EXPR
10640 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10641 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10642 1)),
10643 arg0, 0)
10644 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10645 return omit_two_operands_loc (loc, type,
10646 code == NE_EXPR
10647 ? boolean_true_node : boolean_false_node,
10648 TREE_OPERAND (arg1, 1), arg0);
10650 /* If this is an EQ or NE comparison with zero and ARG0 is
10651 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10652 two operations, but the latter can be done in one less insn
10653 on machines that have only two-operand insns or on which a
10654 constant cannot be the first operand. */
10655 if (TREE_CODE (arg0) == BIT_AND_EXPR
10656 && integer_zerop (arg1))
10658 tree arg00 = TREE_OPERAND (arg0, 0);
10659 tree arg01 = TREE_OPERAND (arg0, 1);
10660 if (TREE_CODE (arg00) == LSHIFT_EXPR
10661 && integer_onep (TREE_OPERAND (arg00, 0)))
10663 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10664 arg01, TREE_OPERAND (arg00, 1));
10665 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10666 build_int_cst (TREE_TYPE (arg0), 1));
10667 return fold_build2_loc (loc, code, type,
10668 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10669 arg1);
10671 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10672 && integer_onep (TREE_OPERAND (arg01, 0)))
10674 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10675 arg00, TREE_OPERAND (arg01, 1));
10676 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10677 build_int_cst (TREE_TYPE (arg0), 1));
10678 return fold_build2_loc (loc, code, type,
10679 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10680 arg1);
10684 /* If this is an NE or EQ comparison of zero against the result of a
10685 signed MOD operation whose second operand is a power of 2, make
10686 the MOD operation unsigned since it is simpler and equivalent. */
10687 if (integer_zerop (arg1)
10688 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10689 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10690 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10691 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10692 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10693 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10695 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10696 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10697 fold_convert_loc (loc, newtype,
10698 TREE_OPERAND (arg0, 0)),
10699 fold_convert_loc (loc, newtype,
10700 TREE_OPERAND (arg0, 1)));
10702 return fold_build2_loc (loc, code, type, newmod,
10703 fold_convert_loc (loc, newtype, arg1));
10706 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10707 C1 is a valid shift constant, and C2 is a power of two, i.e.
10708 a single bit. */
10709 if (TREE_CODE (arg0) == BIT_AND_EXPR
10710 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10711 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10712 == INTEGER_CST
10713 && integer_pow2p (TREE_OPERAND (arg0, 1))
10714 && integer_zerop (arg1))
10716 tree itype = TREE_TYPE (arg0);
10717 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10718 prec = TYPE_PRECISION (itype);
10720 /* Check for a valid shift count. */
10721 if (wi::ltu_p (arg001, prec))
10723 tree arg01 = TREE_OPERAND (arg0, 1);
10724 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10725 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10726 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10727 can be rewritten as (X & (C2 << C1)) != 0. */
10728 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10730 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10731 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10732 return fold_build2_loc (loc, code, type, tem,
10733 fold_convert_loc (loc, itype, arg1));
10735 /* Otherwise, for signed (arithmetic) shifts,
10736 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10737 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10738 else if (!TYPE_UNSIGNED (itype))
10739 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10740 arg000, build_int_cst (itype, 0));
10741 /* Otherwise, of unsigned (logical) shifts,
10742 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10743 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10744 else
10745 return omit_one_operand_loc (loc, type,
10746 code == EQ_EXPR ? integer_one_node
10747 : integer_zero_node,
10748 arg000);
10752 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10753 Similarly for NE_EXPR. */
10754 if (TREE_CODE (arg0) == BIT_AND_EXPR
10755 && TREE_CODE (arg1) == INTEGER_CST
10756 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10758 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10759 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10760 TREE_OPERAND (arg0, 1));
10761 tree dandnotc
10762 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10763 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10764 notc);
10765 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10766 if (integer_nonzerop (dandnotc))
10767 return omit_one_operand_loc (loc, type, rslt, arg0);
10770 /* If this is a comparison of a field, we may be able to simplify it. */
10771 if ((TREE_CODE (arg0) == COMPONENT_REF
10772 || TREE_CODE (arg0) == BIT_FIELD_REF)
10773 /* Handle the constant case even without -O
10774 to make sure the warnings are given. */
10775 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10777 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10778 if (t1)
10779 return t1;
10782 /* Optimize comparisons of strlen vs zero to a compare of the
10783 first character of the string vs zero. To wit,
10784 strlen(ptr) == 0 => *ptr == 0
10785 strlen(ptr) != 0 => *ptr != 0
10786 Other cases should reduce to one of these two (or a constant)
10787 due to the return value of strlen being unsigned. */
10788 if (TREE_CODE (arg0) == CALL_EXPR
10789 && integer_zerop (arg1))
10791 tree fndecl = get_callee_fndecl (arg0);
10793 if (fndecl
10794 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10795 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10796 && call_expr_nargs (arg0) == 1
10797 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10799 tree iref = build_fold_indirect_ref_loc (loc,
10800 CALL_EXPR_ARG (arg0, 0));
10801 return fold_build2_loc (loc, code, type, iref,
10802 build_int_cst (TREE_TYPE (iref), 0));
10806 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10807 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10808 if (TREE_CODE (arg0) == RSHIFT_EXPR
10809 && integer_zerop (arg1)
10810 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10812 tree arg00 = TREE_OPERAND (arg0, 0);
10813 tree arg01 = TREE_OPERAND (arg0, 1);
10814 tree itype = TREE_TYPE (arg00);
10815 if (wi::eq_p (arg01, element_precision (itype) - 1))
10817 if (TYPE_UNSIGNED (itype))
10819 itype = signed_type_for (itype);
10820 arg00 = fold_convert_loc (loc, itype, arg00);
10822 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10823 type, arg00, build_zero_cst (itype));
10827 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10828 (X & C) == 0 when C is a single bit. */
10829 if (TREE_CODE (arg0) == BIT_AND_EXPR
10830 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10831 && integer_zerop (arg1)
10832 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10834 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10835 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10836 TREE_OPERAND (arg0, 1));
10837 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10838 type, tem,
10839 fold_convert_loc (loc, TREE_TYPE (arg0),
10840 arg1));
10843 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10844 constant C is a power of two, i.e. a single bit. */
10845 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10846 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10847 && integer_zerop (arg1)
10848 && integer_pow2p (TREE_OPERAND (arg0, 1))
10849 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10850 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10852 tree arg00 = TREE_OPERAND (arg0, 0);
10853 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10854 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10857 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10858 when is C is a power of two, i.e. a single bit. */
10859 if (TREE_CODE (arg0) == BIT_AND_EXPR
10860 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10861 && integer_zerop (arg1)
10862 && integer_pow2p (TREE_OPERAND (arg0, 1))
10863 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10864 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10866 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10867 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10868 arg000, TREE_OPERAND (arg0, 1));
10869 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10870 tem, build_int_cst (TREE_TYPE (tem), 0));
10873 if (integer_zerop (arg1)
10874 && tree_expr_nonzero_p (arg0))
10876 tree res = constant_boolean_node (code==NE_EXPR, type);
10877 return omit_one_operand_loc (loc, type, res, arg0);
10880 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10881 if (TREE_CODE (arg0) == BIT_AND_EXPR
10882 && TREE_CODE (arg1) == BIT_AND_EXPR)
10884 tree arg00 = TREE_OPERAND (arg0, 0);
10885 tree arg01 = TREE_OPERAND (arg0, 1);
10886 tree arg10 = TREE_OPERAND (arg1, 0);
10887 tree arg11 = TREE_OPERAND (arg1, 1);
10888 tree itype = TREE_TYPE (arg0);
10890 if (operand_equal_p (arg01, arg11, 0))
10891 return fold_build2_loc (loc, code, type,
10892 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10893 fold_build2_loc (loc,
10894 BIT_XOR_EXPR, itype,
10895 arg00, arg10),
10896 arg01),
10897 build_zero_cst (itype));
10899 if (operand_equal_p (arg01, arg10, 0))
10900 return fold_build2_loc (loc, code, type,
10901 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10902 fold_build2_loc (loc,
10903 BIT_XOR_EXPR, itype,
10904 arg00, arg11),
10905 arg01),
10906 build_zero_cst (itype));
10908 if (operand_equal_p (arg00, arg11, 0))
10909 return fold_build2_loc (loc, code, type,
10910 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10911 fold_build2_loc (loc,
10912 BIT_XOR_EXPR, itype,
10913 arg01, arg10),
10914 arg00),
10915 build_zero_cst (itype));
10917 if (operand_equal_p (arg00, arg10, 0))
10918 return fold_build2_loc (loc, code, type,
10919 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10920 fold_build2_loc (loc,
10921 BIT_XOR_EXPR, itype,
10922 arg01, arg11),
10923 arg00),
10924 build_zero_cst (itype));
10927 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10928 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10930 tree arg00 = TREE_OPERAND (arg0, 0);
10931 tree arg01 = TREE_OPERAND (arg0, 1);
10932 tree arg10 = TREE_OPERAND (arg1, 0);
10933 tree arg11 = TREE_OPERAND (arg1, 1);
10934 tree itype = TREE_TYPE (arg0);
10936 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10937 operand_equal_p guarantees no side-effects so we don't need
10938 to use omit_one_operand on Z. */
10939 if (operand_equal_p (arg01, arg11, 0))
10940 return fold_build2_loc (loc, code, type, arg00,
10941 fold_convert_loc (loc, TREE_TYPE (arg00),
10942 arg10));
10943 if (operand_equal_p (arg01, arg10, 0))
10944 return fold_build2_loc (loc, code, type, arg00,
10945 fold_convert_loc (loc, TREE_TYPE (arg00),
10946 arg11));
10947 if (operand_equal_p (arg00, arg11, 0))
10948 return fold_build2_loc (loc, code, type, arg01,
10949 fold_convert_loc (loc, TREE_TYPE (arg01),
10950 arg10));
10951 if (operand_equal_p (arg00, arg10, 0))
10952 return fold_build2_loc (loc, code, type, arg01,
10953 fold_convert_loc (loc, TREE_TYPE (arg01),
10954 arg11));
10956 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10957 if (TREE_CODE (arg01) == INTEGER_CST
10958 && TREE_CODE (arg11) == INTEGER_CST)
10960 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10961 fold_convert_loc (loc, itype, arg11));
10962 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10963 return fold_build2_loc (loc, code, type, tem,
10964 fold_convert_loc (loc, itype, arg10));
10968 /* Attempt to simplify equality/inequality comparisons of complex
10969 values. Only lower the comparison if the result is known or
10970 can be simplified to a single scalar comparison. */
10971 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10972 || TREE_CODE (arg0) == COMPLEX_CST)
10973 && (TREE_CODE (arg1) == COMPLEX_EXPR
10974 || TREE_CODE (arg1) == COMPLEX_CST))
10976 tree real0, imag0, real1, imag1;
10977 tree rcond, icond;
10979 if (TREE_CODE (arg0) == COMPLEX_EXPR)
10981 real0 = TREE_OPERAND (arg0, 0);
10982 imag0 = TREE_OPERAND (arg0, 1);
10984 else
10986 real0 = TREE_REALPART (arg0);
10987 imag0 = TREE_IMAGPART (arg0);
10990 if (TREE_CODE (arg1) == COMPLEX_EXPR)
10992 real1 = TREE_OPERAND (arg1, 0);
10993 imag1 = TREE_OPERAND (arg1, 1);
10995 else
10997 real1 = TREE_REALPART (arg1);
10998 imag1 = TREE_IMAGPART (arg1);
11001 rcond = fold_binary_loc (loc, code, type, real0, real1);
11002 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11004 if (integer_zerop (rcond))
11006 if (code == EQ_EXPR)
11007 return omit_two_operands_loc (loc, type, boolean_false_node,
11008 imag0, imag1);
11009 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11011 else
11013 if (code == NE_EXPR)
11014 return omit_two_operands_loc (loc, type, boolean_true_node,
11015 imag0, imag1);
11016 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11020 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11021 if (icond && TREE_CODE (icond) == INTEGER_CST)
11023 if (integer_zerop (icond))
11025 if (code == EQ_EXPR)
11026 return omit_two_operands_loc (loc, type, boolean_false_node,
11027 real0, real1);
11028 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11030 else
11032 if (code == NE_EXPR)
11033 return omit_two_operands_loc (loc, type, boolean_true_node,
11034 real0, real1);
11035 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11040 return NULL_TREE;
11042 case LT_EXPR:
11043 case GT_EXPR:
11044 case LE_EXPR:
11045 case GE_EXPR:
11046 tem = fold_comparison (loc, code, type, op0, op1);
11047 if (tem != NULL_TREE)
11048 return tem;
11050 /* Transform comparisons of the form X +- C CMP X. */
11051 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11052 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11053 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11054 && !HONOR_SNANS (arg0))
11055 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11056 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11058 tree arg01 = TREE_OPERAND (arg0, 1);
11059 enum tree_code code0 = TREE_CODE (arg0);
11060 int is_positive;
11062 if (TREE_CODE (arg01) == REAL_CST)
11063 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11064 else
11065 is_positive = tree_int_cst_sgn (arg01);
11067 /* (X - c) > X becomes false. */
11068 if (code == GT_EXPR
11069 && ((code0 == MINUS_EXPR && is_positive >= 0)
11070 || (code0 == PLUS_EXPR && is_positive <= 0)))
11072 if (TREE_CODE (arg01) == INTEGER_CST
11073 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11074 fold_overflow_warning (("assuming signed overflow does not "
11075 "occur when assuming that (X - c) > X "
11076 "is always false"),
11077 WARN_STRICT_OVERFLOW_ALL);
11078 return constant_boolean_node (0, type);
11081 /* Likewise (X + c) < X becomes false. */
11082 if (code == LT_EXPR
11083 && ((code0 == PLUS_EXPR && is_positive >= 0)
11084 || (code0 == MINUS_EXPR && is_positive <= 0)))
11086 if (TREE_CODE (arg01) == INTEGER_CST
11087 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11088 fold_overflow_warning (("assuming signed overflow does not "
11089 "occur when assuming that "
11090 "(X + c) < X is always false"),
11091 WARN_STRICT_OVERFLOW_ALL);
11092 return constant_boolean_node (0, type);
11095 /* Convert (X - c) <= X to true. */
11096 if (!HONOR_NANS (arg1)
11097 && code == LE_EXPR
11098 && ((code0 == MINUS_EXPR && is_positive >= 0)
11099 || (code0 == PLUS_EXPR && is_positive <= 0)))
11101 if (TREE_CODE (arg01) == INTEGER_CST
11102 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11103 fold_overflow_warning (("assuming signed overflow does not "
11104 "occur when assuming that "
11105 "(X - c) <= X is always true"),
11106 WARN_STRICT_OVERFLOW_ALL);
11107 return constant_boolean_node (1, type);
11110 /* Convert (X + c) >= X to true. */
11111 if (!HONOR_NANS (arg1)
11112 && code == GE_EXPR
11113 && ((code0 == PLUS_EXPR && is_positive >= 0)
11114 || (code0 == MINUS_EXPR && is_positive <= 0)))
11116 if (TREE_CODE (arg01) == INTEGER_CST
11117 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11118 fold_overflow_warning (("assuming signed overflow does not "
11119 "occur when assuming that "
11120 "(X + c) >= X is always true"),
11121 WARN_STRICT_OVERFLOW_ALL);
11122 return constant_boolean_node (1, type);
11125 if (TREE_CODE (arg01) == INTEGER_CST)
11127 /* Convert X + c > X and X - c < X to true for integers. */
11128 if (code == GT_EXPR
11129 && ((code0 == PLUS_EXPR && is_positive > 0)
11130 || (code0 == MINUS_EXPR && is_positive < 0)))
11132 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11133 fold_overflow_warning (("assuming signed overflow does "
11134 "not occur when assuming that "
11135 "(X + c) > X is always true"),
11136 WARN_STRICT_OVERFLOW_ALL);
11137 return constant_boolean_node (1, type);
11140 if (code == LT_EXPR
11141 && ((code0 == MINUS_EXPR && is_positive > 0)
11142 || (code0 == PLUS_EXPR && is_positive < 0)))
11144 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11145 fold_overflow_warning (("assuming signed overflow does "
11146 "not occur when assuming that "
11147 "(X - c) < X is always true"),
11148 WARN_STRICT_OVERFLOW_ALL);
11149 return constant_boolean_node (1, type);
11152 /* Convert X + c <= X and X - c >= X to false for integers. */
11153 if (code == LE_EXPR
11154 && ((code0 == PLUS_EXPR && is_positive > 0)
11155 || (code0 == MINUS_EXPR && is_positive < 0)))
11157 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11158 fold_overflow_warning (("assuming signed overflow does "
11159 "not occur when assuming that "
11160 "(X + c) <= X is always false"),
11161 WARN_STRICT_OVERFLOW_ALL);
11162 return constant_boolean_node (0, type);
11165 if (code == GE_EXPR
11166 && ((code0 == MINUS_EXPR && is_positive > 0)
11167 || (code0 == PLUS_EXPR && is_positive < 0)))
11169 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11170 fold_overflow_warning (("assuming signed overflow does "
11171 "not occur when assuming that "
11172 "(X - c) >= X is always false"),
11173 WARN_STRICT_OVERFLOW_ALL);
11174 return constant_boolean_node (0, type);
11179 /* If we are comparing an ABS_EXPR with a constant, we can
11180 convert all the cases into explicit comparisons, but they may
11181 well not be faster than doing the ABS and one comparison.
11182 But ABS (X) <= C is a range comparison, which becomes a subtraction
11183 and a comparison, and is probably faster. */
11184 if (code == LE_EXPR
11185 && TREE_CODE (arg1) == INTEGER_CST
11186 && TREE_CODE (arg0) == ABS_EXPR
11187 && ! TREE_SIDE_EFFECTS (arg0)
11188 && (0 != (tem = negate_expr (arg1)))
11189 && TREE_CODE (tem) == INTEGER_CST
11190 && !TREE_OVERFLOW (tem))
11191 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11192 build2 (GE_EXPR, type,
11193 TREE_OPERAND (arg0, 0), tem),
11194 build2 (LE_EXPR, type,
11195 TREE_OPERAND (arg0, 0), arg1));
11197 /* Convert ABS_EXPR<x> >= 0 to true. */
11198 strict_overflow_p = false;
11199 if (code == GE_EXPR
11200 && (integer_zerop (arg1)
11201 || (! HONOR_NANS (arg0)
11202 && real_zerop (arg1)))
11203 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11205 if (strict_overflow_p)
11206 fold_overflow_warning (("assuming signed overflow does not occur "
11207 "when simplifying comparison of "
11208 "absolute value and zero"),
11209 WARN_STRICT_OVERFLOW_CONDITIONAL);
11210 return omit_one_operand_loc (loc, type,
11211 constant_boolean_node (true, type),
11212 arg0);
11215 /* Convert ABS_EXPR<x> < 0 to false. */
11216 strict_overflow_p = false;
11217 if (code == LT_EXPR
11218 && (integer_zerop (arg1) || real_zerop (arg1))
11219 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11221 if (strict_overflow_p)
11222 fold_overflow_warning (("assuming signed overflow does not occur "
11223 "when simplifying comparison of "
11224 "absolute value and zero"),
11225 WARN_STRICT_OVERFLOW_CONDITIONAL);
11226 return omit_one_operand_loc (loc, type,
11227 constant_boolean_node (false, type),
11228 arg0);
11231 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11232 and similarly for >= into !=. */
11233 if ((code == LT_EXPR || code == GE_EXPR)
11234 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11235 && TREE_CODE (arg1) == LSHIFT_EXPR
11236 && integer_onep (TREE_OPERAND (arg1, 0)))
11237 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11238 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11239 TREE_OPERAND (arg1, 1)),
11240 build_zero_cst (TREE_TYPE (arg0)));
11242 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11243 otherwise Y might be >= # of bits in X's type and thus e.g.
11244 (unsigned char) (1 << Y) for Y 15 might be 0.
11245 If the cast is widening, then 1 << Y should have unsigned type,
11246 otherwise if Y is number of bits in the signed shift type minus 1,
11247 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11248 31 might be 0xffffffff80000000. */
11249 if ((code == LT_EXPR || code == GE_EXPR)
11250 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11251 && CONVERT_EXPR_P (arg1)
11252 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11253 && (element_precision (TREE_TYPE (arg1))
11254 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11255 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11256 || (element_precision (TREE_TYPE (arg1))
11257 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11258 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11260 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11261 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11262 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11263 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11264 build_zero_cst (TREE_TYPE (arg0)));
11267 return NULL_TREE;
11269 case UNORDERED_EXPR:
11270 case ORDERED_EXPR:
11271 case UNLT_EXPR:
11272 case UNLE_EXPR:
11273 case UNGT_EXPR:
11274 case UNGE_EXPR:
11275 case UNEQ_EXPR:
11276 case LTGT_EXPR:
11277 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11279 tree targ0 = strip_float_extensions (arg0);
11280 tree targ1 = strip_float_extensions (arg1);
11281 tree newtype = TREE_TYPE (targ0);
11283 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11284 newtype = TREE_TYPE (targ1);
11286 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11287 return fold_build2_loc (loc, code, type,
11288 fold_convert_loc (loc, newtype, targ0),
11289 fold_convert_loc (loc, newtype, targ1));
11292 return NULL_TREE;
11294 case COMPOUND_EXPR:
11295 /* When pedantic, a compound expression can be neither an lvalue
11296 nor an integer constant expression. */
11297 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11298 return NULL_TREE;
11299 /* Don't let (0, 0) be null pointer constant. */
11300 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11301 : fold_convert_loc (loc, type, arg1);
11302 return pedantic_non_lvalue_loc (loc, tem);
11304 case ASSERT_EXPR:
11305 /* An ASSERT_EXPR should never be passed to fold_binary. */
11306 gcc_unreachable ();
11308 default:
11309 return NULL_TREE;
11310 } /* switch (code) */
11313 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11314 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11315 of GOTO_EXPR. */
11317 static tree
11318 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11320 switch (TREE_CODE (*tp))
11322 case LABEL_EXPR:
11323 return *tp;
11325 case GOTO_EXPR:
11326 *walk_subtrees = 0;
11328 /* ... fall through ... */
11330 default:
11331 return NULL_TREE;
11335 /* Return whether the sub-tree ST contains a label which is accessible from
11336 outside the sub-tree. */
11338 static bool
11339 contains_label_p (tree st)
11341 return
11342 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11345 /* Fold a ternary expression of code CODE and type TYPE with operands
11346 OP0, OP1, and OP2. Return the folded expression if folding is
11347 successful. Otherwise, return NULL_TREE. */
11349 tree
11350 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11351 tree op0, tree op1, tree op2)
11353 tree tem;
11354 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11355 enum tree_code_class kind = TREE_CODE_CLASS (code);
11357 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11358 && TREE_CODE_LENGTH (code) == 3);
11360 /* If this is a commutative operation, and OP0 is a constant, move it
11361 to OP1 to reduce the number of tests below. */
11362 if (commutative_ternary_tree_code (code)
11363 && tree_swap_operands_p (op0, op1, true))
11364 return fold_build3_loc (loc, code, type, op1, op0, op2);
11366 tem = generic_simplify (loc, code, type, op0, op1, op2);
11367 if (tem)
11368 return tem;
11370 /* Strip any conversions that don't change the mode. This is safe
11371 for every expression, except for a comparison expression because
11372 its signedness is derived from its operands. So, in the latter
11373 case, only strip conversions that don't change the signedness.
11375 Note that this is done as an internal manipulation within the
11376 constant folder, in order to find the simplest representation of
11377 the arguments so that their form can be studied. In any cases,
11378 the appropriate type conversions should be put back in the tree
11379 that will get out of the constant folder. */
11380 if (op0)
11382 arg0 = op0;
11383 STRIP_NOPS (arg0);
11386 if (op1)
11388 arg1 = op1;
11389 STRIP_NOPS (arg1);
11392 if (op2)
11394 arg2 = op2;
11395 STRIP_NOPS (arg2);
11398 switch (code)
11400 case COMPONENT_REF:
11401 if (TREE_CODE (arg0) == CONSTRUCTOR
11402 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11404 unsigned HOST_WIDE_INT idx;
11405 tree field, value;
11406 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11407 if (field == arg1)
11408 return value;
11410 return NULL_TREE;
11412 case COND_EXPR:
11413 case VEC_COND_EXPR:
11414 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11415 so all simple results must be passed through pedantic_non_lvalue. */
11416 if (TREE_CODE (arg0) == INTEGER_CST)
11418 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11419 tem = integer_zerop (arg0) ? op2 : op1;
11420 /* Only optimize constant conditions when the selected branch
11421 has the same type as the COND_EXPR. This avoids optimizing
11422 away "c ? x : throw", where the throw has a void type.
11423 Avoid throwing away that operand which contains label. */
11424 if ((!TREE_SIDE_EFFECTS (unused_op)
11425 || !contains_label_p (unused_op))
11426 && (! VOID_TYPE_P (TREE_TYPE (tem))
11427 || VOID_TYPE_P (type)))
11428 return pedantic_non_lvalue_loc (loc, tem);
11429 return NULL_TREE;
11431 else if (TREE_CODE (arg0) == VECTOR_CST)
11433 if ((TREE_CODE (arg1) == VECTOR_CST
11434 || TREE_CODE (arg1) == CONSTRUCTOR)
11435 && (TREE_CODE (arg2) == VECTOR_CST
11436 || TREE_CODE (arg2) == CONSTRUCTOR))
11438 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11439 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11440 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11441 for (i = 0; i < nelts; i++)
11443 tree val = VECTOR_CST_ELT (arg0, i);
11444 if (integer_all_onesp (val))
11445 sel[i] = i;
11446 else if (integer_zerop (val))
11447 sel[i] = nelts + i;
11448 else /* Currently unreachable. */
11449 return NULL_TREE;
11451 tree t = fold_vec_perm (type, arg1, arg2, sel);
11452 if (t != NULL_TREE)
11453 return t;
11457 /* If we have A op B ? A : C, we may be able to convert this to a
11458 simpler expression, depending on the operation and the values
11459 of B and C. Signed zeros prevent all of these transformations,
11460 for reasons given above each one.
11462 Also try swapping the arguments and inverting the conditional. */
11463 if (COMPARISON_CLASS_P (arg0)
11464 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11465 arg1, TREE_OPERAND (arg0, 1))
11466 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11468 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11469 if (tem)
11470 return tem;
11473 if (COMPARISON_CLASS_P (arg0)
11474 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11475 op2,
11476 TREE_OPERAND (arg0, 1))
11477 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11479 location_t loc0 = expr_location_or (arg0, loc);
11480 tem = fold_invert_truthvalue (loc0, arg0);
11481 if (tem && COMPARISON_CLASS_P (tem))
11483 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11484 if (tem)
11485 return tem;
11489 /* If the second operand is simpler than the third, swap them
11490 since that produces better jump optimization results. */
11491 if (truth_value_p (TREE_CODE (arg0))
11492 && tree_swap_operands_p (op1, op2, false))
11494 location_t loc0 = expr_location_or (arg0, loc);
11495 /* See if this can be inverted. If it can't, possibly because
11496 it was a floating-point inequality comparison, don't do
11497 anything. */
11498 tem = fold_invert_truthvalue (loc0, arg0);
11499 if (tem)
11500 return fold_build3_loc (loc, code, type, tem, op2, op1);
11503 /* Convert A ? 1 : 0 to simply A. */
11504 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11505 : (integer_onep (op1)
11506 && !VECTOR_TYPE_P (type)))
11507 && integer_zerop (op2)
11508 /* If we try to convert OP0 to our type, the
11509 call to fold will try to move the conversion inside
11510 a COND, which will recurse. In that case, the COND_EXPR
11511 is probably the best choice, so leave it alone. */
11512 && type == TREE_TYPE (arg0))
11513 return pedantic_non_lvalue_loc (loc, arg0);
11515 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11516 over COND_EXPR in cases such as floating point comparisons. */
11517 if (integer_zerop (op1)
11518 && code == COND_EXPR
11519 && integer_onep (op2)
11520 && !VECTOR_TYPE_P (type)
11521 && truth_value_p (TREE_CODE (arg0)))
11522 return pedantic_non_lvalue_loc (loc,
11523 fold_convert_loc (loc, type,
11524 invert_truthvalue_loc (loc,
11525 arg0)));
11527 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11528 if (TREE_CODE (arg0) == LT_EXPR
11529 && integer_zerop (TREE_OPERAND (arg0, 1))
11530 && integer_zerop (op2)
11531 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11533 /* sign_bit_p looks through both zero and sign extensions,
11534 but for this optimization only sign extensions are
11535 usable. */
11536 tree tem2 = TREE_OPERAND (arg0, 0);
11537 while (tem != tem2)
11539 if (TREE_CODE (tem2) != NOP_EXPR
11540 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11542 tem = NULL_TREE;
11543 break;
11545 tem2 = TREE_OPERAND (tem2, 0);
11547 /* sign_bit_p only checks ARG1 bits within A's precision.
11548 If <sign bit of A> has wider type than A, bits outside
11549 of A's precision in <sign bit of A> need to be checked.
11550 If they are all 0, this optimization needs to be done
11551 in unsigned A's type, if they are all 1 in signed A's type,
11552 otherwise this can't be done. */
11553 if (tem
11554 && TYPE_PRECISION (TREE_TYPE (tem))
11555 < TYPE_PRECISION (TREE_TYPE (arg1))
11556 && TYPE_PRECISION (TREE_TYPE (tem))
11557 < TYPE_PRECISION (type))
11559 int inner_width, outer_width;
11560 tree tem_type;
11562 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11563 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11564 if (outer_width > TYPE_PRECISION (type))
11565 outer_width = TYPE_PRECISION (type);
11567 wide_int mask = wi::shifted_mask
11568 (inner_width, outer_width - inner_width, false,
11569 TYPE_PRECISION (TREE_TYPE (arg1)));
11571 wide_int common = mask & arg1;
11572 if (common == mask)
11574 tem_type = signed_type_for (TREE_TYPE (tem));
11575 tem = fold_convert_loc (loc, tem_type, tem);
11577 else if (common == 0)
11579 tem_type = unsigned_type_for (TREE_TYPE (tem));
11580 tem = fold_convert_loc (loc, tem_type, tem);
11582 else
11583 tem = NULL;
11586 if (tem)
11587 return
11588 fold_convert_loc (loc, type,
11589 fold_build2_loc (loc, BIT_AND_EXPR,
11590 TREE_TYPE (tem), tem,
11591 fold_convert_loc (loc,
11592 TREE_TYPE (tem),
11593 arg1)));
11596 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11597 already handled above. */
11598 if (TREE_CODE (arg0) == BIT_AND_EXPR
11599 && integer_onep (TREE_OPERAND (arg0, 1))
11600 && integer_zerop (op2)
11601 && integer_pow2p (arg1))
11603 tree tem = TREE_OPERAND (arg0, 0);
11604 STRIP_NOPS (tem);
11605 if (TREE_CODE (tem) == RSHIFT_EXPR
11606 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11607 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11608 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11609 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11610 TREE_OPERAND (tem, 0), arg1);
11613 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11614 is probably obsolete because the first operand should be a
11615 truth value (that's why we have the two cases above), but let's
11616 leave it in until we can confirm this for all front-ends. */
11617 if (integer_zerop (op2)
11618 && TREE_CODE (arg0) == NE_EXPR
11619 && integer_zerop (TREE_OPERAND (arg0, 1))
11620 && integer_pow2p (arg1)
11621 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11622 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11623 arg1, OEP_ONLY_CONST))
11624 return pedantic_non_lvalue_loc (loc,
11625 fold_convert_loc (loc, type,
11626 TREE_OPERAND (arg0, 0)));
11628 /* Disable the transformations below for vectors, since
11629 fold_binary_op_with_conditional_arg may undo them immediately,
11630 yielding an infinite loop. */
11631 if (code == VEC_COND_EXPR)
11632 return NULL_TREE;
11634 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11635 if (integer_zerop (op2)
11636 && truth_value_p (TREE_CODE (arg0))
11637 && truth_value_p (TREE_CODE (arg1))
11638 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11639 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11640 : TRUTH_ANDIF_EXPR,
11641 type, fold_convert_loc (loc, type, arg0), arg1);
11643 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11644 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11645 && truth_value_p (TREE_CODE (arg0))
11646 && truth_value_p (TREE_CODE (arg1))
11647 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11649 location_t loc0 = expr_location_or (arg0, loc);
11650 /* Only perform transformation if ARG0 is easily inverted. */
11651 tem = fold_invert_truthvalue (loc0, arg0);
11652 if (tem)
11653 return fold_build2_loc (loc, code == VEC_COND_EXPR
11654 ? BIT_IOR_EXPR
11655 : TRUTH_ORIF_EXPR,
11656 type, fold_convert_loc (loc, type, tem),
11657 arg1);
11660 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11661 if (integer_zerop (arg1)
11662 && truth_value_p (TREE_CODE (arg0))
11663 && truth_value_p (TREE_CODE (op2))
11664 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11666 location_t loc0 = expr_location_or (arg0, loc);
11667 /* Only perform transformation if ARG0 is easily inverted. */
11668 tem = fold_invert_truthvalue (loc0, arg0);
11669 if (tem)
11670 return fold_build2_loc (loc, code == VEC_COND_EXPR
11671 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11672 type, fold_convert_loc (loc, type, tem),
11673 op2);
11676 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11677 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11678 && truth_value_p (TREE_CODE (arg0))
11679 && truth_value_p (TREE_CODE (op2))
11680 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11681 return fold_build2_loc (loc, code == VEC_COND_EXPR
11682 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11683 type, fold_convert_loc (loc, type, arg0), op2);
11685 return NULL_TREE;
11687 case CALL_EXPR:
11688 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11689 of fold_ternary on them. */
11690 gcc_unreachable ();
11692 case BIT_FIELD_REF:
11693 if (TREE_CODE (arg0) == VECTOR_CST
11694 && (type == TREE_TYPE (TREE_TYPE (arg0))
11695 || (TREE_CODE (type) == VECTOR_TYPE
11696 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11698 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11699 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11700 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11701 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11703 if (n != 0
11704 && (idx % width) == 0
11705 && (n % width) == 0
11706 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11708 idx = idx / width;
11709 n = n / width;
11711 if (TREE_CODE (arg0) == VECTOR_CST)
11713 if (n == 1)
11714 return VECTOR_CST_ELT (arg0, idx);
11716 tree *vals = XALLOCAVEC (tree, n);
11717 for (unsigned i = 0; i < n; ++i)
11718 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11719 return build_vector (type, vals);
11724 /* On constants we can use native encode/interpret to constant
11725 fold (nearly) all BIT_FIELD_REFs. */
11726 if (CONSTANT_CLASS_P (arg0)
11727 && can_native_interpret_type_p (type)
11728 && BITS_PER_UNIT == 8)
11730 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11731 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11732 /* Limit us to a reasonable amount of work. To relax the
11733 other limitations we need bit-shifting of the buffer
11734 and rounding up the size. */
11735 if (bitpos % BITS_PER_UNIT == 0
11736 && bitsize % BITS_PER_UNIT == 0
11737 && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11739 unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11740 unsigned HOST_WIDE_INT len
11741 = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11742 bitpos / BITS_PER_UNIT);
11743 if (len > 0
11744 && len * BITS_PER_UNIT >= bitsize)
11746 tree v = native_interpret_expr (type, b,
11747 bitsize / BITS_PER_UNIT);
11748 if (v)
11749 return v;
11754 return NULL_TREE;
11756 case FMA_EXPR:
11757 /* For integers we can decompose the FMA if possible. */
11758 if (TREE_CODE (arg0) == INTEGER_CST
11759 && TREE_CODE (arg1) == INTEGER_CST)
11760 return fold_build2_loc (loc, PLUS_EXPR, type,
11761 const_binop (MULT_EXPR, arg0, arg1), arg2);
11762 if (integer_zerop (arg2))
11763 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11765 return fold_fma (loc, type, arg0, arg1, arg2);
11767 case VEC_PERM_EXPR:
11768 if (TREE_CODE (arg2) == VECTOR_CST)
11770 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11771 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11772 unsigned char *sel2 = sel + nelts;
11773 bool need_mask_canon = false;
11774 bool need_mask_canon2 = false;
11775 bool all_in_vec0 = true;
11776 bool all_in_vec1 = true;
11777 bool maybe_identity = true;
11778 bool single_arg = (op0 == op1);
11779 bool changed = false;
11781 mask2 = 2 * nelts - 1;
11782 mask = single_arg ? (nelts - 1) : mask2;
11783 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11784 for (i = 0; i < nelts; i++)
11786 tree val = VECTOR_CST_ELT (arg2, i);
11787 if (TREE_CODE (val) != INTEGER_CST)
11788 return NULL_TREE;
11790 /* Make sure that the perm value is in an acceptable
11791 range. */
11792 wide_int t = val;
11793 need_mask_canon |= wi::gtu_p (t, mask);
11794 need_mask_canon2 |= wi::gtu_p (t, mask2);
11795 sel[i] = t.to_uhwi () & mask;
11796 sel2[i] = t.to_uhwi () & mask2;
11798 if (sel[i] < nelts)
11799 all_in_vec1 = false;
11800 else
11801 all_in_vec0 = false;
11803 if ((sel[i] & (nelts-1)) != i)
11804 maybe_identity = false;
11807 if (maybe_identity)
11809 if (all_in_vec0)
11810 return op0;
11811 if (all_in_vec1)
11812 return op1;
11815 if (all_in_vec0)
11816 op1 = op0;
11817 else if (all_in_vec1)
11819 op0 = op1;
11820 for (i = 0; i < nelts; i++)
11821 sel[i] -= nelts;
11822 need_mask_canon = true;
11825 if ((TREE_CODE (op0) == VECTOR_CST
11826 || TREE_CODE (op0) == CONSTRUCTOR)
11827 && (TREE_CODE (op1) == VECTOR_CST
11828 || TREE_CODE (op1) == CONSTRUCTOR))
11830 tree t = fold_vec_perm (type, op0, op1, sel);
11831 if (t != NULL_TREE)
11832 return t;
11835 if (op0 == op1 && !single_arg)
11836 changed = true;
11838 /* Some targets are deficient and fail to expand a single
11839 argument permutation while still allowing an equivalent
11840 2-argument version. */
11841 if (need_mask_canon && arg2 == op2
11842 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11843 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11845 need_mask_canon = need_mask_canon2;
11846 sel = sel2;
11849 if (need_mask_canon && arg2 == op2)
11851 tree *tsel = XALLOCAVEC (tree, nelts);
11852 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11853 for (i = 0; i < nelts; i++)
11854 tsel[i] = build_int_cst (eltype, sel[i]);
11855 op2 = build_vector (TREE_TYPE (arg2), tsel);
11856 changed = true;
11859 if (changed)
11860 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11862 return NULL_TREE;
11864 case BIT_INSERT_EXPR:
11865 /* Perform (partial) constant folding of BIT_INSERT_EXPR. */
11866 if (TREE_CODE (arg0) == INTEGER_CST
11867 && TREE_CODE (arg1) == INTEGER_CST)
11869 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11870 unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11871 wide_int tem = wi::bit_and (arg0,
11872 wi::shifted_mask (bitpos, bitsize, true,
11873 TYPE_PRECISION (type)));
11874 wide_int tem2
11875 = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11876 bitsize), bitpos);
11877 return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11879 else if (TREE_CODE (arg0) == VECTOR_CST
11880 && CONSTANT_CLASS_P (arg1)
11881 && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11882 TREE_TYPE (arg1)))
11884 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11885 unsigned HOST_WIDE_INT elsize
11886 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11887 if (bitpos % elsize == 0)
11889 unsigned k = bitpos / elsize;
11890 if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11891 return arg0;
11892 else
11894 tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
11895 memcpy (elts, VECTOR_CST_ELTS (arg0),
11896 sizeof (tree) * TYPE_VECTOR_SUBPARTS (type));
11897 elts[k] = arg1;
11898 return build_vector (type, elts);
11902 return NULL_TREE;
11904 default:
11905 return NULL_TREE;
11906 } /* switch (code) */
11909 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11910 of an array (or vector). */
11912 tree
11913 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11915 tree index_type = NULL_TREE;
11916 offset_int low_bound = 0;
11918 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11920 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11921 if (domain_type && TYPE_MIN_VALUE (domain_type))
11923 /* Static constructors for variably sized objects makes no sense. */
11924 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11925 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11926 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11930 if (index_type)
11931 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11932 TYPE_SIGN (index_type));
11934 offset_int index = low_bound - 1;
11935 if (index_type)
11936 index = wi::ext (index, TYPE_PRECISION (index_type),
11937 TYPE_SIGN (index_type));
11939 offset_int max_index;
11940 unsigned HOST_WIDE_INT cnt;
11941 tree cfield, cval;
11943 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11945 /* Array constructor might explicitly set index, or specify a range,
11946 or leave index NULL meaning that it is next index after previous
11947 one. */
11948 if (cfield)
11950 if (TREE_CODE (cfield) == INTEGER_CST)
11951 max_index = index = wi::to_offset (cfield);
11952 else
11954 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11955 index = wi::to_offset (TREE_OPERAND (cfield, 0));
11956 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11959 else
11961 index += 1;
11962 if (index_type)
11963 index = wi::ext (index, TYPE_PRECISION (index_type),
11964 TYPE_SIGN (index_type));
11965 max_index = index;
11968 /* Do we have match? */
11969 if (wi::cmpu (access_index, index) >= 0
11970 && wi::cmpu (access_index, max_index) <= 0)
11971 return cval;
11973 return NULL_TREE;
11976 /* Perform constant folding and related simplification of EXPR.
11977 The related simplifications include x*1 => x, x*0 => 0, etc.,
11978 and application of the associative law.
11979 NOP_EXPR conversions may be removed freely (as long as we
11980 are careful not to change the type of the overall expression).
11981 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11982 but we can constant-fold them if they have constant operands. */
11984 #ifdef ENABLE_FOLD_CHECKING
11985 # define fold(x) fold_1 (x)
11986 static tree fold_1 (tree);
11987 static
11988 #endif
11989 tree
11990 fold (tree expr)
11992 const tree t = expr;
11993 enum tree_code code = TREE_CODE (t);
11994 enum tree_code_class kind = TREE_CODE_CLASS (code);
11995 tree tem;
11996 location_t loc = EXPR_LOCATION (expr);
11998 /* Return right away if a constant. */
11999 if (kind == tcc_constant)
12000 return t;
12002 /* CALL_EXPR-like objects with variable numbers of operands are
12003 treated specially. */
12004 if (kind == tcc_vl_exp)
12006 if (code == CALL_EXPR)
12008 tem = fold_call_expr (loc, expr, false);
12009 return tem ? tem : expr;
12011 return expr;
12014 if (IS_EXPR_CODE_CLASS (kind))
12016 tree type = TREE_TYPE (t);
12017 tree op0, op1, op2;
12019 switch (TREE_CODE_LENGTH (code))
12021 case 1:
12022 op0 = TREE_OPERAND (t, 0);
12023 tem = fold_unary_loc (loc, code, type, op0);
12024 return tem ? tem : expr;
12025 case 2:
12026 op0 = TREE_OPERAND (t, 0);
12027 op1 = TREE_OPERAND (t, 1);
12028 tem = fold_binary_loc (loc, code, type, op0, op1);
12029 return tem ? tem : expr;
12030 case 3:
12031 op0 = TREE_OPERAND (t, 0);
12032 op1 = TREE_OPERAND (t, 1);
12033 op2 = TREE_OPERAND (t, 2);
12034 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12035 return tem ? tem : expr;
12036 default:
12037 break;
12041 switch (code)
12043 case ARRAY_REF:
12045 tree op0 = TREE_OPERAND (t, 0);
12046 tree op1 = TREE_OPERAND (t, 1);
12048 if (TREE_CODE (op1) == INTEGER_CST
12049 && TREE_CODE (op0) == CONSTRUCTOR
12050 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12052 tree val = get_array_ctor_element_at_index (op0,
12053 wi::to_offset (op1));
12054 if (val)
12055 return val;
12058 return t;
12061 /* Return a VECTOR_CST if possible. */
12062 case CONSTRUCTOR:
12064 tree type = TREE_TYPE (t);
12065 if (TREE_CODE (type) != VECTOR_TYPE)
12066 return t;
12068 unsigned i;
12069 tree val;
12070 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12071 if (! CONSTANT_CLASS_P (val))
12072 return t;
12074 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12077 case CONST_DECL:
12078 return fold (DECL_INITIAL (t));
12080 default:
12081 return t;
12082 } /* switch (code) */
12085 #ifdef ENABLE_FOLD_CHECKING
12086 #undef fold
12088 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12089 hash_table<nofree_ptr_hash<const tree_node> > *);
12090 static void fold_check_failed (const_tree, const_tree);
12091 void print_fold_checksum (const_tree);
12093 /* When --enable-checking=fold, compute a digest of expr before
12094 and after actual fold call to see if fold did not accidentally
12095 change original expr. */
12097 tree
12098 fold (tree expr)
12100 tree ret;
12101 struct md5_ctx ctx;
12102 unsigned char checksum_before[16], checksum_after[16];
12103 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12105 md5_init_ctx (&ctx);
12106 fold_checksum_tree (expr, &ctx, &ht);
12107 md5_finish_ctx (&ctx, checksum_before);
12108 ht.empty ();
12110 ret = fold_1 (expr);
12112 md5_init_ctx (&ctx);
12113 fold_checksum_tree (expr, &ctx, &ht);
12114 md5_finish_ctx (&ctx, checksum_after);
12116 if (memcmp (checksum_before, checksum_after, 16))
12117 fold_check_failed (expr, ret);
12119 return ret;
12122 void
12123 print_fold_checksum (const_tree expr)
12125 struct md5_ctx ctx;
12126 unsigned char checksum[16], cnt;
12127 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12129 md5_init_ctx (&ctx);
12130 fold_checksum_tree (expr, &ctx, &ht);
12131 md5_finish_ctx (&ctx, checksum);
12132 for (cnt = 0; cnt < 16; ++cnt)
12133 fprintf (stderr, "%02x", checksum[cnt]);
12134 putc ('\n', stderr);
12137 static void
12138 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12140 internal_error ("fold check: original tree changed by fold");
12143 static void
12144 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12145 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12147 const tree_node **slot;
12148 enum tree_code code;
12149 union tree_node buf;
12150 int i, len;
12152 recursive_label:
12153 if (expr == NULL)
12154 return;
12155 slot = ht->find_slot (expr, INSERT);
12156 if (*slot != NULL)
12157 return;
12158 *slot = expr;
12159 code = TREE_CODE (expr);
12160 if (TREE_CODE_CLASS (code) == tcc_declaration
12161 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12163 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12164 memcpy ((char *) &buf, expr, tree_size (expr));
12165 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12166 buf.decl_with_vis.symtab_node = NULL;
12167 expr = (tree) &buf;
12169 else if (TREE_CODE_CLASS (code) == tcc_type
12170 && (TYPE_POINTER_TO (expr)
12171 || TYPE_REFERENCE_TO (expr)
12172 || TYPE_CACHED_VALUES_P (expr)
12173 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12174 || TYPE_NEXT_VARIANT (expr)
12175 || TYPE_ALIAS_SET_KNOWN_P (expr)))
12177 /* Allow these fields to be modified. */
12178 tree tmp;
12179 memcpy ((char *) &buf, expr, tree_size (expr));
12180 expr = tmp = (tree) &buf;
12181 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12182 TYPE_POINTER_TO (tmp) = NULL;
12183 TYPE_REFERENCE_TO (tmp) = NULL;
12184 TYPE_NEXT_VARIANT (tmp) = NULL;
12185 TYPE_ALIAS_SET (tmp) = -1;
12186 if (TYPE_CACHED_VALUES_P (tmp))
12188 TYPE_CACHED_VALUES_P (tmp) = 0;
12189 TYPE_CACHED_VALUES (tmp) = NULL;
12192 md5_process_bytes (expr, tree_size (expr), ctx);
12193 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12194 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12195 if (TREE_CODE_CLASS (code) != tcc_type
12196 && TREE_CODE_CLASS (code) != tcc_declaration
12197 && code != TREE_LIST
12198 && code != SSA_NAME
12199 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12200 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12201 switch (TREE_CODE_CLASS (code))
12203 case tcc_constant:
12204 switch (code)
12206 case STRING_CST:
12207 md5_process_bytes (TREE_STRING_POINTER (expr),
12208 TREE_STRING_LENGTH (expr), ctx);
12209 break;
12210 case COMPLEX_CST:
12211 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12212 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12213 break;
12214 case VECTOR_CST:
12215 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12216 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12217 break;
12218 default:
12219 break;
12221 break;
12222 case tcc_exceptional:
12223 switch (code)
12225 case TREE_LIST:
12226 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12227 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12228 expr = TREE_CHAIN (expr);
12229 goto recursive_label;
12230 break;
12231 case TREE_VEC:
12232 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12233 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12234 break;
12235 default:
12236 break;
12238 break;
12239 case tcc_expression:
12240 case tcc_reference:
12241 case tcc_comparison:
12242 case tcc_unary:
12243 case tcc_binary:
12244 case tcc_statement:
12245 case tcc_vl_exp:
12246 len = TREE_OPERAND_LENGTH (expr);
12247 for (i = 0; i < len; ++i)
12248 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12249 break;
12250 case tcc_declaration:
12251 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12252 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12253 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12255 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12256 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12257 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12258 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12259 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12262 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12264 if (TREE_CODE (expr) == FUNCTION_DECL)
12266 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12267 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12269 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12271 break;
12272 case tcc_type:
12273 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12274 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12275 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12276 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12277 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12278 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12279 if (INTEGRAL_TYPE_P (expr)
12280 || SCALAR_FLOAT_TYPE_P (expr))
12282 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12283 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12285 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12286 if (TREE_CODE (expr) == RECORD_TYPE
12287 || TREE_CODE (expr) == UNION_TYPE
12288 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12289 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12290 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12291 break;
12292 default:
12293 break;
12297 /* Helper function for outputting the checksum of a tree T. When
12298 debugging with gdb, you can "define mynext" to be "next" followed
12299 by "call debug_fold_checksum (op0)", then just trace down till the
12300 outputs differ. */
12302 DEBUG_FUNCTION void
12303 debug_fold_checksum (const_tree t)
12305 int i;
12306 unsigned char checksum[16];
12307 struct md5_ctx ctx;
12308 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12310 md5_init_ctx (&ctx);
12311 fold_checksum_tree (t, &ctx, &ht);
12312 md5_finish_ctx (&ctx, checksum);
12313 ht.empty ();
12315 for (i = 0; i < 16; i++)
12316 fprintf (stderr, "%d ", checksum[i]);
12318 fprintf (stderr, "\n");
12321 #endif
12323 /* Fold a unary tree expression with code CODE of type TYPE with an
12324 operand OP0. LOC is the location of the resulting expression.
12325 Return a folded expression if successful. Otherwise, return a tree
12326 expression with code CODE of type TYPE with an operand OP0. */
12328 tree
12329 fold_build1_stat_loc (location_t loc,
12330 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12332 tree tem;
12333 #ifdef ENABLE_FOLD_CHECKING
12334 unsigned char checksum_before[16], checksum_after[16];
12335 struct md5_ctx ctx;
12336 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12338 md5_init_ctx (&ctx);
12339 fold_checksum_tree (op0, &ctx, &ht);
12340 md5_finish_ctx (&ctx, checksum_before);
12341 ht.empty ();
12342 #endif
12344 tem = fold_unary_loc (loc, code, type, op0);
12345 if (!tem)
12346 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12348 #ifdef ENABLE_FOLD_CHECKING
12349 md5_init_ctx (&ctx);
12350 fold_checksum_tree (op0, &ctx, &ht);
12351 md5_finish_ctx (&ctx, checksum_after);
12353 if (memcmp (checksum_before, checksum_after, 16))
12354 fold_check_failed (op0, tem);
12355 #endif
12356 return tem;
12359 /* Fold a binary tree expression with code CODE of type TYPE with
12360 operands OP0 and OP1. LOC is the location of the resulting
12361 expression. Return a folded expression if successful. Otherwise,
12362 return a tree expression with code CODE of type TYPE with operands
12363 OP0 and OP1. */
12365 tree
12366 fold_build2_stat_loc (location_t loc,
12367 enum tree_code code, tree type, tree op0, tree op1
12368 MEM_STAT_DECL)
12370 tree tem;
12371 #ifdef ENABLE_FOLD_CHECKING
12372 unsigned char checksum_before_op0[16],
12373 checksum_before_op1[16],
12374 checksum_after_op0[16],
12375 checksum_after_op1[16];
12376 struct md5_ctx ctx;
12377 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12379 md5_init_ctx (&ctx);
12380 fold_checksum_tree (op0, &ctx, &ht);
12381 md5_finish_ctx (&ctx, checksum_before_op0);
12382 ht.empty ();
12384 md5_init_ctx (&ctx);
12385 fold_checksum_tree (op1, &ctx, &ht);
12386 md5_finish_ctx (&ctx, checksum_before_op1);
12387 ht.empty ();
12388 #endif
12390 tem = fold_binary_loc (loc, code, type, op0, op1);
12391 if (!tem)
12392 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12394 #ifdef ENABLE_FOLD_CHECKING
12395 md5_init_ctx (&ctx);
12396 fold_checksum_tree (op0, &ctx, &ht);
12397 md5_finish_ctx (&ctx, checksum_after_op0);
12398 ht.empty ();
12400 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12401 fold_check_failed (op0, tem);
12403 md5_init_ctx (&ctx);
12404 fold_checksum_tree (op1, &ctx, &ht);
12405 md5_finish_ctx (&ctx, checksum_after_op1);
12407 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12408 fold_check_failed (op1, tem);
12409 #endif
12410 return tem;
12413 /* Fold a ternary tree expression with code CODE of type TYPE with
12414 operands OP0, OP1, and OP2. Return a folded expression if
12415 successful. Otherwise, return a tree expression with code CODE of
12416 type TYPE with operands OP0, OP1, and OP2. */
12418 tree
12419 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12420 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12422 tree tem;
12423 #ifdef ENABLE_FOLD_CHECKING
12424 unsigned char checksum_before_op0[16],
12425 checksum_before_op1[16],
12426 checksum_before_op2[16],
12427 checksum_after_op0[16],
12428 checksum_after_op1[16],
12429 checksum_after_op2[16];
12430 struct md5_ctx ctx;
12431 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12433 md5_init_ctx (&ctx);
12434 fold_checksum_tree (op0, &ctx, &ht);
12435 md5_finish_ctx (&ctx, checksum_before_op0);
12436 ht.empty ();
12438 md5_init_ctx (&ctx);
12439 fold_checksum_tree (op1, &ctx, &ht);
12440 md5_finish_ctx (&ctx, checksum_before_op1);
12441 ht.empty ();
12443 md5_init_ctx (&ctx);
12444 fold_checksum_tree (op2, &ctx, &ht);
12445 md5_finish_ctx (&ctx, checksum_before_op2);
12446 ht.empty ();
12447 #endif
12449 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12450 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12451 if (!tem)
12452 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12454 #ifdef ENABLE_FOLD_CHECKING
12455 md5_init_ctx (&ctx);
12456 fold_checksum_tree (op0, &ctx, &ht);
12457 md5_finish_ctx (&ctx, checksum_after_op0);
12458 ht.empty ();
12460 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12461 fold_check_failed (op0, tem);
12463 md5_init_ctx (&ctx);
12464 fold_checksum_tree (op1, &ctx, &ht);
12465 md5_finish_ctx (&ctx, checksum_after_op1);
12466 ht.empty ();
12468 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12469 fold_check_failed (op1, tem);
12471 md5_init_ctx (&ctx);
12472 fold_checksum_tree (op2, &ctx, &ht);
12473 md5_finish_ctx (&ctx, checksum_after_op2);
12475 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12476 fold_check_failed (op2, tem);
12477 #endif
12478 return tem;
12481 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12482 arguments in ARGARRAY, and a null static chain.
12483 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12484 of type TYPE from the given operands as constructed by build_call_array. */
12486 tree
12487 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12488 int nargs, tree *argarray)
12490 tree tem;
12491 #ifdef ENABLE_FOLD_CHECKING
12492 unsigned char checksum_before_fn[16],
12493 checksum_before_arglist[16],
12494 checksum_after_fn[16],
12495 checksum_after_arglist[16];
12496 struct md5_ctx ctx;
12497 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12498 int i;
12500 md5_init_ctx (&ctx);
12501 fold_checksum_tree (fn, &ctx, &ht);
12502 md5_finish_ctx (&ctx, checksum_before_fn);
12503 ht.empty ();
12505 md5_init_ctx (&ctx);
12506 for (i = 0; i < nargs; i++)
12507 fold_checksum_tree (argarray[i], &ctx, &ht);
12508 md5_finish_ctx (&ctx, checksum_before_arglist);
12509 ht.empty ();
12510 #endif
12512 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12513 if (!tem)
12514 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12516 #ifdef ENABLE_FOLD_CHECKING
12517 md5_init_ctx (&ctx);
12518 fold_checksum_tree (fn, &ctx, &ht);
12519 md5_finish_ctx (&ctx, checksum_after_fn);
12520 ht.empty ();
12522 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12523 fold_check_failed (fn, tem);
12525 md5_init_ctx (&ctx);
12526 for (i = 0; i < nargs; i++)
12527 fold_checksum_tree (argarray[i], &ctx, &ht);
12528 md5_finish_ctx (&ctx, checksum_after_arglist);
12530 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12531 fold_check_failed (NULL_TREE, tem);
12532 #endif
12533 return tem;
12536 /* Perform constant folding and related simplification of initializer
12537 expression EXPR. These behave identically to "fold_buildN" but ignore
12538 potential run-time traps and exceptions that fold must preserve. */
12540 #define START_FOLD_INIT \
12541 int saved_signaling_nans = flag_signaling_nans;\
12542 int saved_trapping_math = flag_trapping_math;\
12543 int saved_rounding_math = flag_rounding_math;\
12544 int saved_trapv = flag_trapv;\
12545 int saved_folding_initializer = folding_initializer;\
12546 flag_signaling_nans = 0;\
12547 flag_trapping_math = 0;\
12548 flag_rounding_math = 0;\
12549 flag_trapv = 0;\
12550 folding_initializer = 1;
12552 #define END_FOLD_INIT \
12553 flag_signaling_nans = saved_signaling_nans;\
12554 flag_trapping_math = saved_trapping_math;\
12555 flag_rounding_math = saved_rounding_math;\
12556 flag_trapv = saved_trapv;\
12557 folding_initializer = saved_folding_initializer;
12559 tree
12560 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12561 tree type, tree op)
12563 tree result;
12564 START_FOLD_INIT;
12566 result = fold_build1_loc (loc, code, type, op);
12568 END_FOLD_INIT;
12569 return result;
12572 tree
12573 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12574 tree type, tree op0, tree op1)
12576 tree result;
12577 START_FOLD_INIT;
12579 result = fold_build2_loc (loc, code, type, op0, op1);
12581 END_FOLD_INIT;
12582 return result;
12585 tree
12586 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12587 int nargs, tree *argarray)
12589 tree result;
12590 START_FOLD_INIT;
12592 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12594 END_FOLD_INIT;
12595 return result;
12598 #undef START_FOLD_INIT
12599 #undef END_FOLD_INIT
12601 /* Determine if first argument is a multiple of second argument. Return 0 if
12602 it is not, or we cannot easily determined it to be.
12604 An example of the sort of thing we care about (at this point; this routine
12605 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12606 fold cases do now) is discovering that
12608 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12610 is a multiple of
12612 SAVE_EXPR (J * 8)
12614 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12616 This code also handles discovering that
12618 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12620 is a multiple of 8 so we don't have to worry about dealing with a
12621 possible remainder.
12623 Note that we *look* inside a SAVE_EXPR only to determine how it was
12624 calculated; it is not safe for fold to do much of anything else with the
12625 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12626 at run time. For example, the latter example above *cannot* be implemented
12627 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12628 evaluation time of the original SAVE_EXPR is not necessarily the same at
12629 the time the new expression is evaluated. The only optimization of this
12630 sort that would be valid is changing
12632 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12634 divided by 8 to
12636 SAVE_EXPR (I) * SAVE_EXPR (J)
12638 (where the same SAVE_EXPR (J) is used in the original and the
12639 transformed version). */
12642 multiple_of_p (tree type, const_tree top, const_tree bottom)
12644 if (operand_equal_p (top, bottom, 0))
12645 return 1;
12647 if (TREE_CODE (type) != INTEGER_TYPE)
12648 return 0;
12650 switch (TREE_CODE (top))
12652 case BIT_AND_EXPR:
12653 /* Bitwise and provides a power of two multiple. If the mask is
12654 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12655 if (!integer_pow2p (bottom))
12656 return 0;
12657 /* FALLTHRU */
12659 case MULT_EXPR:
12660 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12661 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12663 case PLUS_EXPR:
12664 case MINUS_EXPR:
12665 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12666 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12668 case LSHIFT_EXPR:
12669 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12671 tree op1, t1;
12673 op1 = TREE_OPERAND (top, 1);
12674 /* const_binop may not detect overflow correctly,
12675 so check for it explicitly here. */
12676 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12677 && 0 != (t1 = fold_convert (type,
12678 const_binop (LSHIFT_EXPR,
12679 size_one_node,
12680 op1)))
12681 && !TREE_OVERFLOW (t1))
12682 return multiple_of_p (type, t1, bottom);
12684 return 0;
12686 case NOP_EXPR:
12687 /* Can't handle conversions from non-integral or wider integral type. */
12688 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12689 || (TYPE_PRECISION (type)
12690 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12691 return 0;
12693 /* .. fall through ... */
12695 case SAVE_EXPR:
12696 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12698 case COND_EXPR:
12699 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12700 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12702 case INTEGER_CST:
12703 if (TREE_CODE (bottom) != INTEGER_CST
12704 || integer_zerop (bottom)
12705 || (TYPE_UNSIGNED (type)
12706 && (tree_int_cst_sgn (top) < 0
12707 || tree_int_cst_sgn (bottom) < 0)))
12708 return 0;
12709 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12710 SIGNED);
12712 default:
12713 return 0;
12717 #define tree_expr_nonnegative_warnv_p(X, Y) \
12718 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12720 #define RECURSE(X) \
12721 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12723 /* Return true if CODE or TYPE is known to be non-negative. */
12725 static bool
12726 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12728 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12729 && truth_value_p (code))
12730 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12731 have a signed:1 type (where the value is -1 and 0). */
12732 return true;
12733 return false;
12736 /* Return true if (CODE OP0) is known to be non-negative. If the return
12737 value is based on the assumption that signed overflow is undefined,
12738 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12739 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12741 bool
12742 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12743 bool *strict_overflow_p, int depth)
12745 if (TYPE_UNSIGNED (type))
12746 return true;
12748 switch (code)
12750 case ABS_EXPR:
12751 /* We can't return 1 if flag_wrapv is set because
12752 ABS_EXPR<INT_MIN> = INT_MIN. */
12753 if (!ANY_INTEGRAL_TYPE_P (type))
12754 return true;
12755 if (TYPE_OVERFLOW_UNDEFINED (type))
12757 *strict_overflow_p = true;
12758 return true;
12760 break;
12762 case NON_LVALUE_EXPR:
12763 case FLOAT_EXPR:
12764 case FIX_TRUNC_EXPR:
12765 return RECURSE (op0);
12767 CASE_CONVERT:
12769 tree inner_type = TREE_TYPE (op0);
12770 tree outer_type = type;
12772 if (TREE_CODE (outer_type) == REAL_TYPE)
12774 if (TREE_CODE (inner_type) == REAL_TYPE)
12775 return RECURSE (op0);
12776 if (INTEGRAL_TYPE_P (inner_type))
12778 if (TYPE_UNSIGNED (inner_type))
12779 return true;
12780 return RECURSE (op0);
12783 else if (INTEGRAL_TYPE_P (outer_type))
12785 if (TREE_CODE (inner_type) == REAL_TYPE)
12786 return RECURSE (op0);
12787 if (INTEGRAL_TYPE_P (inner_type))
12788 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12789 && TYPE_UNSIGNED (inner_type);
12792 break;
12794 default:
12795 return tree_simple_nonnegative_warnv_p (code, type);
12798 /* We don't know sign of `t', so be conservative and return false. */
12799 return false;
12802 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12803 value is based on the assumption that signed overflow is undefined,
12804 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12805 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12807 bool
12808 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12809 tree op1, bool *strict_overflow_p,
12810 int depth)
12812 if (TYPE_UNSIGNED (type))
12813 return true;
12815 switch (code)
12817 case POINTER_PLUS_EXPR:
12818 case PLUS_EXPR:
12819 if (FLOAT_TYPE_P (type))
12820 return RECURSE (op0) && RECURSE (op1);
12822 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12823 both unsigned and at least 2 bits shorter than the result. */
12824 if (TREE_CODE (type) == INTEGER_TYPE
12825 && TREE_CODE (op0) == NOP_EXPR
12826 && TREE_CODE (op1) == NOP_EXPR)
12828 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12829 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12830 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12831 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12833 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12834 TYPE_PRECISION (inner2)) + 1;
12835 return prec < TYPE_PRECISION (type);
12838 break;
12840 case MULT_EXPR:
12841 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12843 /* x * x is always non-negative for floating point x
12844 or without overflow. */
12845 if (operand_equal_p (op0, op1, 0)
12846 || (RECURSE (op0) && RECURSE (op1)))
12848 if (ANY_INTEGRAL_TYPE_P (type)
12849 && TYPE_OVERFLOW_UNDEFINED (type))
12850 *strict_overflow_p = true;
12851 return true;
12855 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12856 both unsigned and their total bits is shorter than the result. */
12857 if (TREE_CODE (type) == INTEGER_TYPE
12858 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12859 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12861 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12862 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12863 : TREE_TYPE (op0);
12864 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12865 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12866 : TREE_TYPE (op1);
12868 bool unsigned0 = TYPE_UNSIGNED (inner0);
12869 bool unsigned1 = TYPE_UNSIGNED (inner1);
12871 if (TREE_CODE (op0) == INTEGER_CST)
12872 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12874 if (TREE_CODE (op1) == INTEGER_CST)
12875 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12877 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12878 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12880 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12881 ? tree_int_cst_min_precision (op0, UNSIGNED)
12882 : TYPE_PRECISION (inner0);
12884 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12885 ? tree_int_cst_min_precision (op1, UNSIGNED)
12886 : TYPE_PRECISION (inner1);
12888 return precision0 + precision1 < TYPE_PRECISION (type);
12891 return false;
12893 case BIT_AND_EXPR:
12894 case MAX_EXPR:
12895 return RECURSE (op0) || RECURSE (op1);
12897 case BIT_IOR_EXPR:
12898 case BIT_XOR_EXPR:
12899 case MIN_EXPR:
12900 case RDIV_EXPR:
12901 case TRUNC_DIV_EXPR:
12902 case CEIL_DIV_EXPR:
12903 case FLOOR_DIV_EXPR:
12904 case ROUND_DIV_EXPR:
12905 return RECURSE (op0) && RECURSE (op1);
12907 case TRUNC_MOD_EXPR:
12908 return RECURSE (op0);
12910 case FLOOR_MOD_EXPR:
12911 return RECURSE (op1);
12913 case CEIL_MOD_EXPR:
12914 case ROUND_MOD_EXPR:
12915 default:
12916 return tree_simple_nonnegative_warnv_p (code, type);
12919 /* We don't know sign of `t', so be conservative and return false. */
12920 return false;
12923 /* Return true if T is known to be non-negative. If the return
12924 value is based on the assumption that signed overflow is undefined,
12925 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12926 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12928 bool
12929 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12931 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12932 return true;
12934 switch (TREE_CODE (t))
12936 case INTEGER_CST:
12937 return tree_int_cst_sgn (t) >= 0;
12939 case REAL_CST:
12940 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12942 case FIXED_CST:
12943 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12945 case COND_EXPR:
12946 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12948 case SSA_NAME:
12949 /* Limit the depth of recursion to avoid quadratic behavior.
12950 This is expected to catch almost all occurrences in practice.
12951 If this code misses important cases that unbounded recursion
12952 would not, passes that need this information could be revised
12953 to provide it through dataflow propagation. */
12954 return (!name_registered_for_update_p (t)
12955 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12956 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12957 strict_overflow_p, depth));
12959 default:
12960 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12964 /* Return true if T is known to be non-negative. If the return
12965 value is based on the assumption that signed overflow is undefined,
12966 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12967 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12969 bool
12970 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12971 bool *strict_overflow_p, int depth)
12973 switch (fn)
12975 CASE_CFN_ACOS:
12976 CASE_CFN_ACOSH:
12977 CASE_CFN_CABS:
12978 CASE_CFN_COSH:
12979 CASE_CFN_ERFC:
12980 CASE_CFN_EXP:
12981 CASE_CFN_EXP10:
12982 CASE_CFN_EXP2:
12983 CASE_CFN_FABS:
12984 CASE_CFN_FDIM:
12985 CASE_CFN_HYPOT:
12986 CASE_CFN_POW10:
12987 CASE_CFN_FFS:
12988 CASE_CFN_PARITY:
12989 CASE_CFN_POPCOUNT:
12990 CASE_CFN_CLZ:
12991 CASE_CFN_CLRSB:
12992 case CFN_BUILT_IN_BSWAP32:
12993 case CFN_BUILT_IN_BSWAP64:
12994 /* Always true. */
12995 return true;
12997 CASE_CFN_SQRT:
12998 /* sqrt(-0.0) is -0.0. */
12999 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13000 return true;
13001 return RECURSE (arg0);
13003 CASE_CFN_ASINH:
13004 CASE_CFN_ATAN:
13005 CASE_CFN_ATANH:
13006 CASE_CFN_CBRT:
13007 CASE_CFN_CEIL:
13008 CASE_CFN_ERF:
13009 CASE_CFN_EXPM1:
13010 CASE_CFN_FLOOR:
13011 CASE_CFN_FMOD:
13012 CASE_CFN_FREXP:
13013 CASE_CFN_ICEIL:
13014 CASE_CFN_IFLOOR:
13015 CASE_CFN_IRINT:
13016 CASE_CFN_IROUND:
13017 CASE_CFN_LCEIL:
13018 CASE_CFN_LDEXP:
13019 CASE_CFN_LFLOOR:
13020 CASE_CFN_LLCEIL:
13021 CASE_CFN_LLFLOOR:
13022 CASE_CFN_LLRINT:
13023 CASE_CFN_LLROUND:
13024 CASE_CFN_LRINT:
13025 CASE_CFN_LROUND:
13026 CASE_CFN_MODF:
13027 CASE_CFN_NEARBYINT:
13028 CASE_CFN_RINT:
13029 CASE_CFN_ROUND:
13030 CASE_CFN_SCALB:
13031 CASE_CFN_SCALBLN:
13032 CASE_CFN_SCALBN:
13033 CASE_CFN_SIGNBIT:
13034 CASE_CFN_SIGNIFICAND:
13035 CASE_CFN_SINH:
13036 CASE_CFN_TANH:
13037 CASE_CFN_TRUNC:
13038 /* True if the 1st argument is nonnegative. */
13039 return RECURSE (arg0);
13041 CASE_CFN_FMAX:
13042 /* True if the 1st OR 2nd arguments are nonnegative. */
13043 return RECURSE (arg0) || RECURSE (arg1);
13045 CASE_CFN_FMIN:
13046 /* True if the 1st AND 2nd arguments are nonnegative. */
13047 return RECURSE (arg0) && RECURSE (arg1);
13049 CASE_CFN_COPYSIGN:
13050 /* True if the 2nd argument is nonnegative. */
13051 return RECURSE (arg1);
13053 CASE_CFN_POWI:
13054 /* True if the 1st argument is nonnegative or the second
13055 argument is an even integer. */
13056 if (TREE_CODE (arg1) == INTEGER_CST
13057 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13058 return true;
13059 return RECURSE (arg0);
13061 CASE_CFN_POW:
13062 /* True if the 1st argument is nonnegative or the second
13063 argument is an even integer valued real. */
13064 if (TREE_CODE (arg1) == REAL_CST)
13066 REAL_VALUE_TYPE c;
13067 HOST_WIDE_INT n;
13069 c = TREE_REAL_CST (arg1);
13070 n = real_to_integer (&c);
13071 if ((n & 1) == 0)
13073 REAL_VALUE_TYPE cint;
13074 real_from_integer (&cint, VOIDmode, n, SIGNED);
13075 if (real_identical (&c, &cint))
13076 return true;
13079 return RECURSE (arg0);
13081 default:
13082 break;
13084 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13087 /* Return true if T is known to be non-negative. If the return
13088 value is based on the assumption that signed overflow is undefined,
13089 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13090 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13092 static bool
13093 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13095 enum tree_code code = TREE_CODE (t);
13096 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13097 return true;
13099 switch (code)
13101 case TARGET_EXPR:
13103 tree temp = TARGET_EXPR_SLOT (t);
13104 t = TARGET_EXPR_INITIAL (t);
13106 /* If the initializer is non-void, then it's a normal expression
13107 that will be assigned to the slot. */
13108 if (!VOID_TYPE_P (t))
13109 return RECURSE (t);
13111 /* Otherwise, the initializer sets the slot in some way. One common
13112 way is an assignment statement at the end of the initializer. */
13113 while (1)
13115 if (TREE_CODE (t) == BIND_EXPR)
13116 t = expr_last (BIND_EXPR_BODY (t));
13117 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13118 || TREE_CODE (t) == TRY_CATCH_EXPR)
13119 t = expr_last (TREE_OPERAND (t, 0));
13120 else if (TREE_CODE (t) == STATEMENT_LIST)
13121 t = expr_last (t);
13122 else
13123 break;
13125 if (TREE_CODE (t) == MODIFY_EXPR
13126 && TREE_OPERAND (t, 0) == temp)
13127 return RECURSE (TREE_OPERAND (t, 1));
13129 return false;
13132 case CALL_EXPR:
13134 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13135 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13137 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13138 get_call_combined_fn (t),
13139 arg0,
13140 arg1,
13141 strict_overflow_p, depth);
13143 case COMPOUND_EXPR:
13144 case MODIFY_EXPR:
13145 return RECURSE (TREE_OPERAND (t, 1));
13147 case BIND_EXPR:
13148 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13150 case SAVE_EXPR:
13151 return RECURSE (TREE_OPERAND (t, 0));
13153 default:
13154 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13158 #undef RECURSE
13159 #undef tree_expr_nonnegative_warnv_p
13161 /* Return true if T is known to be non-negative. If the return
13162 value is based on the assumption that signed overflow is undefined,
13163 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13164 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13166 bool
13167 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13169 enum tree_code code;
13170 if (t == error_mark_node)
13171 return false;
13173 code = TREE_CODE (t);
13174 switch (TREE_CODE_CLASS (code))
13176 case tcc_binary:
13177 case tcc_comparison:
13178 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13179 TREE_TYPE (t),
13180 TREE_OPERAND (t, 0),
13181 TREE_OPERAND (t, 1),
13182 strict_overflow_p, depth);
13184 case tcc_unary:
13185 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13186 TREE_TYPE (t),
13187 TREE_OPERAND (t, 0),
13188 strict_overflow_p, depth);
13190 case tcc_constant:
13191 case tcc_declaration:
13192 case tcc_reference:
13193 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13195 default:
13196 break;
13199 switch (code)
13201 case TRUTH_AND_EXPR:
13202 case TRUTH_OR_EXPR:
13203 case TRUTH_XOR_EXPR:
13204 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13205 TREE_TYPE (t),
13206 TREE_OPERAND (t, 0),
13207 TREE_OPERAND (t, 1),
13208 strict_overflow_p, depth);
13209 case TRUTH_NOT_EXPR:
13210 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13211 TREE_TYPE (t),
13212 TREE_OPERAND (t, 0),
13213 strict_overflow_p, depth);
13215 case COND_EXPR:
13216 case CONSTRUCTOR:
13217 case OBJ_TYPE_REF:
13218 case ASSERT_EXPR:
13219 case ADDR_EXPR:
13220 case WITH_SIZE_EXPR:
13221 case SSA_NAME:
13222 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13224 default:
13225 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13229 /* Return true if `t' is known to be non-negative. Handle warnings
13230 about undefined signed overflow. */
13232 bool
13233 tree_expr_nonnegative_p (tree t)
13235 bool ret, strict_overflow_p;
13237 strict_overflow_p = false;
13238 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13239 if (strict_overflow_p)
13240 fold_overflow_warning (("assuming signed overflow does not occur when "
13241 "determining that expression is always "
13242 "non-negative"),
13243 WARN_STRICT_OVERFLOW_MISC);
13244 return ret;
13248 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13249 For floating point we further ensure that T is not denormal.
13250 Similar logic is present in nonzero_address in rtlanal.h.
13252 If the return value is based on the assumption that signed overflow
13253 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13254 change *STRICT_OVERFLOW_P. */
13256 bool
13257 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13258 bool *strict_overflow_p)
13260 switch (code)
13262 case ABS_EXPR:
13263 return tree_expr_nonzero_warnv_p (op0,
13264 strict_overflow_p);
13266 case NOP_EXPR:
13268 tree inner_type = TREE_TYPE (op0);
13269 tree outer_type = type;
13271 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13272 && tree_expr_nonzero_warnv_p (op0,
13273 strict_overflow_p));
13275 break;
13277 case NON_LVALUE_EXPR:
13278 return tree_expr_nonzero_warnv_p (op0,
13279 strict_overflow_p);
13281 default:
13282 break;
13285 return false;
13288 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13289 For floating point we further ensure that T is not denormal.
13290 Similar logic is present in nonzero_address in rtlanal.h.
13292 If the return value is based on the assumption that signed overflow
13293 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13294 change *STRICT_OVERFLOW_P. */
13296 bool
13297 tree_binary_nonzero_warnv_p (enum tree_code code,
13298 tree type,
13299 tree op0,
13300 tree op1, bool *strict_overflow_p)
13302 bool sub_strict_overflow_p;
13303 switch (code)
13305 case POINTER_PLUS_EXPR:
13306 case PLUS_EXPR:
13307 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13309 /* With the presence of negative values it is hard
13310 to say something. */
13311 sub_strict_overflow_p = false;
13312 if (!tree_expr_nonnegative_warnv_p (op0,
13313 &sub_strict_overflow_p)
13314 || !tree_expr_nonnegative_warnv_p (op1,
13315 &sub_strict_overflow_p))
13316 return false;
13317 /* One of operands must be positive and the other non-negative. */
13318 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13319 overflows, on a twos-complement machine the sum of two
13320 nonnegative numbers can never be zero. */
13321 return (tree_expr_nonzero_warnv_p (op0,
13322 strict_overflow_p)
13323 || tree_expr_nonzero_warnv_p (op1,
13324 strict_overflow_p));
13326 break;
13328 case MULT_EXPR:
13329 if (TYPE_OVERFLOW_UNDEFINED (type))
13331 if (tree_expr_nonzero_warnv_p (op0,
13332 strict_overflow_p)
13333 && tree_expr_nonzero_warnv_p (op1,
13334 strict_overflow_p))
13336 *strict_overflow_p = true;
13337 return true;
13340 break;
13342 case MIN_EXPR:
13343 sub_strict_overflow_p = false;
13344 if (tree_expr_nonzero_warnv_p (op0,
13345 &sub_strict_overflow_p)
13346 && tree_expr_nonzero_warnv_p (op1,
13347 &sub_strict_overflow_p))
13349 if (sub_strict_overflow_p)
13350 *strict_overflow_p = true;
13352 break;
13354 case MAX_EXPR:
13355 sub_strict_overflow_p = false;
13356 if (tree_expr_nonzero_warnv_p (op0,
13357 &sub_strict_overflow_p))
13359 if (sub_strict_overflow_p)
13360 *strict_overflow_p = true;
13362 /* When both operands are nonzero, then MAX must be too. */
13363 if (tree_expr_nonzero_warnv_p (op1,
13364 strict_overflow_p))
13365 return true;
13367 /* MAX where operand 0 is positive is positive. */
13368 return tree_expr_nonnegative_warnv_p (op0,
13369 strict_overflow_p);
13371 /* MAX where operand 1 is positive is positive. */
13372 else if (tree_expr_nonzero_warnv_p (op1,
13373 &sub_strict_overflow_p)
13374 && tree_expr_nonnegative_warnv_p (op1,
13375 &sub_strict_overflow_p))
13377 if (sub_strict_overflow_p)
13378 *strict_overflow_p = true;
13379 return true;
13381 break;
13383 case BIT_IOR_EXPR:
13384 return (tree_expr_nonzero_warnv_p (op1,
13385 strict_overflow_p)
13386 || tree_expr_nonzero_warnv_p (op0,
13387 strict_overflow_p));
13389 default:
13390 break;
13393 return false;
13396 /* Return true when T is an address and is known to be nonzero.
13397 For floating point we further ensure that T is not denormal.
13398 Similar logic is present in nonzero_address in rtlanal.h.
13400 If the return value is based on the assumption that signed overflow
13401 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13402 change *STRICT_OVERFLOW_P. */
13404 bool
13405 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13407 bool sub_strict_overflow_p;
13408 switch (TREE_CODE (t))
13410 case INTEGER_CST:
13411 return !integer_zerop (t);
13413 case ADDR_EXPR:
13415 tree base = TREE_OPERAND (t, 0);
13417 if (!DECL_P (base))
13418 base = get_base_address (base);
13420 if (base && TREE_CODE (base) == TARGET_EXPR)
13421 base = TARGET_EXPR_SLOT (base);
13423 if (!base)
13424 return false;
13426 /* For objects in symbol table check if we know they are non-zero.
13427 Don't do anything for variables and functions before symtab is built;
13428 it is quite possible that they will be declared weak later. */
13429 int nonzero_addr = maybe_nonzero_address (base);
13430 if (nonzero_addr >= 0)
13431 return nonzero_addr;
13433 /* Function local objects are never NULL. */
13434 if (DECL_P (base)
13435 && (DECL_CONTEXT (base)
13436 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13437 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13438 return true;
13440 /* Constants are never weak. */
13441 if (CONSTANT_CLASS_P (base))
13442 return true;
13444 return false;
13447 case COND_EXPR:
13448 sub_strict_overflow_p = false;
13449 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13450 &sub_strict_overflow_p)
13451 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13452 &sub_strict_overflow_p))
13454 if (sub_strict_overflow_p)
13455 *strict_overflow_p = true;
13456 return true;
13458 break;
13460 default:
13461 break;
13463 return false;
13466 #define integer_valued_real_p(X) \
13467 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13469 #define RECURSE(X) \
13470 ((integer_valued_real_p) (X, depth + 1))
13472 /* Return true if the floating point result of (CODE OP0) has an
13473 integer value. We also allow +Inf, -Inf and NaN to be considered
13474 integer values. Return false for signaling NaN.
13476 DEPTH is the current nesting depth of the query. */
13478 bool
13479 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13481 switch (code)
13483 case FLOAT_EXPR:
13484 return true;
13486 case ABS_EXPR:
13487 return RECURSE (op0);
13489 CASE_CONVERT:
13491 tree type = TREE_TYPE (op0);
13492 if (TREE_CODE (type) == INTEGER_TYPE)
13493 return true;
13494 if (TREE_CODE (type) == REAL_TYPE)
13495 return RECURSE (op0);
13496 break;
13499 default:
13500 break;
13502 return false;
13505 /* Return true if the floating point result of (CODE OP0 OP1) has an
13506 integer value. We also allow +Inf, -Inf and NaN to be considered
13507 integer values. Return false for signaling NaN.
13509 DEPTH is the current nesting depth of the query. */
13511 bool
13512 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13514 switch (code)
13516 case PLUS_EXPR:
13517 case MINUS_EXPR:
13518 case MULT_EXPR:
13519 case MIN_EXPR:
13520 case MAX_EXPR:
13521 return RECURSE (op0) && RECURSE (op1);
13523 default:
13524 break;
13526 return false;
13529 /* Return true if the floating point result of calling FNDECL with arguments
13530 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13531 considered integer values. Return false for signaling NaN. If FNDECL
13532 takes fewer than 2 arguments, the remaining ARGn are null.
13534 DEPTH is the current nesting depth of the query. */
13536 bool
13537 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13539 switch (fn)
13541 CASE_CFN_CEIL:
13542 CASE_CFN_FLOOR:
13543 CASE_CFN_NEARBYINT:
13544 CASE_CFN_RINT:
13545 CASE_CFN_ROUND:
13546 CASE_CFN_TRUNC:
13547 return true;
13549 CASE_CFN_FMIN:
13550 CASE_CFN_FMAX:
13551 return RECURSE (arg0) && RECURSE (arg1);
13553 default:
13554 break;
13556 return false;
13559 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13560 has an integer value. We also allow +Inf, -Inf and NaN to be
13561 considered integer values. Return false for signaling NaN.
13563 DEPTH is the current nesting depth of the query. */
13565 bool
13566 integer_valued_real_single_p (tree t, int depth)
13568 switch (TREE_CODE (t))
13570 case REAL_CST:
13571 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13573 case COND_EXPR:
13574 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13576 case SSA_NAME:
13577 /* Limit the depth of recursion to avoid quadratic behavior.
13578 This is expected to catch almost all occurrences in practice.
13579 If this code misses important cases that unbounded recursion
13580 would not, passes that need this information could be revised
13581 to provide it through dataflow propagation. */
13582 return (!name_registered_for_update_p (t)
13583 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13584 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13585 depth));
13587 default:
13588 break;
13590 return false;
13593 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13594 has an integer value. We also allow +Inf, -Inf and NaN to be
13595 considered integer values. Return false for signaling NaN.
13597 DEPTH is the current nesting depth of the query. */
13599 static bool
13600 integer_valued_real_invalid_p (tree t, int depth)
13602 switch (TREE_CODE (t))
13604 case COMPOUND_EXPR:
13605 case MODIFY_EXPR:
13606 case BIND_EXPR:
13607 return RECURSE (TREE_OPERAND (t, 1));
13609 case SAVE_EXPR:
13610 return RECURSE (TREE_OPERAND (t, 0));
13612 default:
13613 break;
13615 return false;
13618 #undef RECURSE
13619 #undef integer_valued_real_p
13621 /* Return true if the floating point expression T has an integer value.
13622 We also allow +Inf, -Inf and NaN to be considered integer values.
13623 Return false for signaling NaN.
13625 DEPTH is the current nesting depth of the query. */
13627 bool
13628 integer_valued_real_p (tree t, int depth)
13630 if (t == error_mark_node)
13631 return false;
13633 tree_code code = TREE_CODE (t);
13634 switch (TREE_CODE_CLASS (code))
13636 case tcc_binary:
13637 case tcc_comparison:
13638 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13639 TREE_OPERAND (t, 1), depth);
13641 case tcc_unary:
13642 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13644 case tcc_constant:
13645 case tcc_declaration:
13646 case tcc_reference:
13647 return integer_valued_real_single_p (t, depth);
13649 default:
13650 break;
13653 switch (code)
13655 case COND_EXPR:
13656 case SSA_NAME:
13657 return integer_valued_real_single_p (t, depth);
13659 case CALL_EXPR:
13661 tree arg0 = (call_expr_nargs (t) > 0
13662 ? CALL_EXPR_ARG (t, 0)
13663 : NULL_TREE);
13664 tree arg1 = (call_expr_nargs (t) > 1
13665 ? CALL_EXPR_ARG (t, 1)
13666 : NULL_TREE);
13667 return integer_valued_real_call_p (get_call_combined_fn (t),
13668 arg0, arg1, depth);
13671 default:
13672 return integer_valued_real_invalid_p (t, depth);
13676 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13677 attempt to fold the expression to a constant without modifying TYPE,
13678 OP0 or OP1.
13680 If the expression could be simplified to a constant, then return
13681 the constant. If the expression would not be simplified to a
13682 constant, then return NULL_TREE. */
13684 tree
13685 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13687 tree tem = fold_binary (code, type, op0, op1);
13688 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13691 /* Given the components of a unary expression CODE, TYPE and OP0,
13692 attempt to fold the expression to a constant without modifying
13693 TYPE or OP0.
13695 If the expression could be simplified to a constant, then return
13696 the constant. If the expression would not be simplified to a
13697 constant, then return NULL_TREE. */
13699 tree
13700 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13702 tree tem = fold_unary (code, type, op0);
13703 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13706 /* If EXP represents referencing an element in a constant string
13707 (either via pointer arithmetic or array indexing), return the
13708 tree representing the value accessed, otherwise return NULL. */
13710 tree
13711 fold_read_from_constant_string (tree exp)
13713 if ((TREE_CODE (exp) == INDIRECT_REF
13714 || TREE_CODE (exp) == ARRAY_REF)
13715 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13717 tree exp1 = TREE_OPERAND (exp, 0);
13718 tree index;
13719 tree string;
13720 location_t loc = EXPR_LOCATION (exp);
13722 if (TREE_CODE (exp) == INDIRECT_REF)
13723 string = string_constant (exp1, &index);
13724 else
13726 tree low_bound = array_ref_low_bound (exp);
13727 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13729 /* Optimize the special-case of a zero lower bound.
13731 We convert the low_bound to sizetype to avoid some problems
13732 with constant folding. (E.g. suppose the lower bound is 1,
13733 and its mode is QI. Without the conversion,l (ARRAY
13734 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13735 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13736 if (! integer_zerop (low_bound))
13737 index = size_diffop_loc (loc, index,
13738 fold_convert_loc (loc, sizetype, low_bound));
13740 string = exp1;
13743 if (string
13744 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13745 && TREE_CODE (string) == STRING_CST
13746 && TREE_CODE (index) == INTEGER_CST
13747 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13748 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13749 == MODE_INT)
13750 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13751 return build_int_cst_type (TREE_TYPE (exp),
13752 (TREE_STRING_POINTER (string)
13753 [TREE_INT_CST_LOW (index)]));
13755 return NULL;
13758 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13759 an integer constant, real, or fixed-point constant.
13761 TYPE is the type of the result. */
13763 static tree
13764 fold_negate_const (tree arg0, tree type)
13766 tree t = NULL_TREE;
13768 switch (TREE_CODE (arg0))
13770 case INTEGER_CST:
13772 bool overflow;
13773 wide_int val = wi::neg (arg0, &overflow);
13774 t = force_fit_type (type, val, 1,
13775 (overflow | TREE_OVERFLOW (arg0))
13776 && !TYPE_UNSIGNED (type));
13777 break;
13780 case REAL_CST:
13781 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13782 break;
13784 case FIXED_CST:
13786 FIXED_VALUE_TYPE f;
13787 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13788 &(TREE_FIXED_CST (arg0)), NULL,
13789 TYPE_SATURATING (type));
13790 t = build_fixed (type, f);
13791 /* Propagate overflow flags. */
13792 if (overflow_p | TREE_OVERFLOW (arg0))
13793 TREE_OVERFLOW (t) = 1;
13794 break;
13797 default:
13798 gcc_unreachable ();
13801 return t;
13804 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13805 an integer constant or real constant.
13807 TYPE is the type of the result. */
13809 tree
13810 fold_abs_const (tree arg0, tree type)
13812 tree t = NULL_TREE;
13814 switch (TREE_CODE (arg0))
13816 case INTEGER_CST:
13818 /* If the value is unsigned or non-negative, then the absolute value
13819 is the same as the ordinary value. */
13820 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13821 t = arg0;
13823 /* If the value is negative, then the absolute value is
13824 its negation. */
13825 else
13827 bool overflow;
13828 wide_int val = wi::neg (arg0, &overflow);
13829 t = force_fit_type (type, val, -1,
13830 overflow | TREE_OVERFLOW (arg0));
13833 break;
13835 case REAL_CST:
13836 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13837 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13838 else
13839 t = arg0;
13840 break;
13842 default:
13843 gcc_unreachable ();
13846 return t;
13849 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13850 constant. TYPE is the type of the result. */
13852 static tree
13853 fold_not_const (const_tree arg0, tree type)
13855 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13857 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13860 /* Given CODE, a relational operator, the target type, TYPE and two
13861 constant operands OP0 and OP1, return the result of the
13862 relational operation. If the result is not a compile time
13863 constant, then return NULL_TREE. */
13865 static tree
13866 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13868 int result, invert;
13870 /* From here on, the only cases we handle are when the result is
13871 known to be a constant. */
13873 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13875 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13876 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13878 /* Handle the cases where either operand is a NaN. */
13879 if (real_isnan (c0) || real_isnan (c1))
13881 switch (code)
13883 case EQ_EXPR:
13884 case ORDERED_EXPR:
13885 result = 0;
13886 break;
13888 case NE_EXPR:
13889 case UNORDERED_EXPR:
13890 case UNLT_EXPR:
13891 case UNLE_EXPR:
13892 case UNGT_EXPR:
13893 case UNGE_EXPR:
13894 case UNEQ_EXPR:
13895 result = 1;
13896 break;
13898 case LT_EXPR:
13899 case LE_EXPR:
13900 case GT_EXPR:
13901 case GE_EXPR:
13902 case LTGT_EXPR:
13903 if (flag_trapping_math)
13904 return NULL_TREE;
13905 result = 0;
13906 break;
13908 default:
13909 gcc_unreachable ();
13912 return constant_boolean_node (result, type);
13915 return constant_boolean_node (real_compare (code, c0, c1), type);
13918 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13920 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13921 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13922 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13925 /* Handle equality/inequality of complex constants. */
13926 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13928 tree rcond = fold_relational_const (code, type,
13929 TREE_REALPART (op0),
13930 TREE_REALPART (op1));
13931 tree icond = fold_relational_const (code, type,
13932 TREE_IMAGPART (op0),
13933 TREE_IMAGPART (op1));
13934 if (code == EQ_EXPR)
13935 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13936 else if (code == NE_EXPR)
13937 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13938 else
13939 return NULL_TREE;
13942 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13944 if (!VECTOR_TYPE_P (type))
13946 /* Have vector comparison with scalar boolean result. */
13947 bool result = true;
13948 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13949 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
13950 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
13952 tree elem0 = VECTOR_CST_ELT (op0, i);
13953 tree elem1 = VECTOR_CST_ELT (op1, i);
13954 tree tmp = fold_relational_const (code, type, elem0, elem1);
13955 result &= integer_onep (tmp);
13957 if (code == NE_EXPR)
13958 result = !result;
13959 return constant_boolean_node (result, type);
13961 unsigned count = VECTOR_CST_NELTS (op0);
13962 tree *elts = XALLOCAVEC (tree, count);
13963 gcc_assert (VECTOR_CST_NELTS (op1) == count
13964 && TYPE_VECTOR_SUBPARTS (type) == count);
13966 for (unsigned i = 0; i < count; i++)
13968 tree elem_type = TREE_TYPE (type);
13969 tree elem0 = VECTOR_CST_ELT (op0, i);
13970 tree elem1 = VECTOR_CST_ELT (op1, i);
13972 tree tem = fold_relational_const (code, elem_type,
13973 elem0, elem1);
13975 if (tem == NULL_TREE)
13976 return NULL_TREE;
13978 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
13981 return build_vector (type, elts);
13984 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13986 To compute GT, swap the arguments and do LT.
13987 To compute GE, do LT and invert the result.
13988 To compute LE, swap the arguments, do LT and invert the result.
13989 To compute NE, do EQ and invert the result.
13991 Therefore, the code below must handle only EQ and LT. */
13993 if (code == LE_EXPR || code == GT_EXPR)
13995 std::swap (op0, op1);
13996 code = swap_tree_comparison (code);
13999 /* Note that it is safe to invert for real values here because we
14000 have already handled the one case that it matters. */
14002 invert = 0;
14003 if (code == NE_EXPR || code == GE_EXPR)
14005 invert = 1;
14006 code = invert_tree_comparison (code, false);
14009 /* Compute a result for LT or EQ if args permit;
14010 Otherwise return T. */
14011 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14013 if (code == EQ_EXPR)
14014 result = tree_int_cst_equal (op0, op1);
14015 else
14016 result = tree_int_cst_lt (op0, op1);
14018 else
14019 return NULL_TREE;
14021 if (invert)
14022 result ^= 1;
14023 return constant_boolean_node (result, type);
14026 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14027 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14028 itself. */
14030 tree
14031 fold_build_cleanup_point_expr (tree type, tree expr)
14033 /* If the expression does not have side effects then we don't have to wrap
14034 it with a cleanup point expression. */
14035 if (!TREE_SIDE_EFFECTS (expr))
14036 return expr;
14038 /* If the expression is a return, check to see if the expression inside the
14039 return has no side effects or the right hand side of the modify expression
14040 inside the return. If either don't have side effects set we don't need to
14041 wrap the expression in a cleanup point expression. Note we don't check the
14042 left hand side of the modify because it should always be a return decl. */
14043 if (TREE_CODE (expr) == RETURN_EXPR)
14045 tree op = TREE_OPERAND (expr, 0);
14046 if (!op || !TREE_SIDE_EFFECTS (op))
14047 return expr;
14048 op = TREE_OPERAND (op, 1);
14049 if (!TREE_SIDE_EFFECTS (op))
14050 return expr;
14053 return build1 (CLEANUP_POINT_EXPR, type, expr);
14056 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14057 of an indirection through OP0, or NULL_TREE if no simplification is
14058 possible. */
14060 tree
14061 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14063 tree sub = op0;
14064 tree subtype;
14066 STRIP_NOPS (sub);
14067 subtype = TREE_TYPE (sub);
14068 if (!POINTER_TYPE_P (subtype))
14069 return NULL_TREE;
14071 if (TREE_CODE (sub) == ADDR_EXPR)
14073 tree op = TREE_OPERAND (sub, 0);
14074 tree optype = TREE_TYPE (op);
14075 /* *&CONST_DECL -> to the value of the const decl. */
14076 if (TREE_CODE (op) == CONST_DECL)
14077 return DECL_INITIAL (op);
14078 /* *&p => p; make sure to handle *&"str"[cst] here. */
14079 if (type == optype)
14081 tree fop = fold_read_from_constant_string (op);
14082 if (fop)
14083 return fop;
14084 else
14085 return op;
14087 /* *(foo *)&fooarray => fooarray[0] */
14088 else if (TREE_CODE (optype) == ARRAY_TYPE
14089 && type == TREE_TYPE (optype)
14090 && (!in_gimple_form
14091 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14093 tree type_domain = TYPE_DOMAIN (optype);
14094 tree min_val = size_zero_node;
14095 if (type_domain && TYPE_MIN_VALUE (type_domain))
14096 min_val = TYPE_MIN_VALUE (type_domain);
14097 if (in_gimple_form
14098 && TREE_CODE (min_val) != INTEGER_CST)
14099 return NULL_TREE;
14100 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14101 NULL_TREE, NULL_TREE);
14103 /* *(foo *)&complexfoo => __real__ complexfoo */
14104 else if (TREE_CODE (optype) == COMPLEX_TYPE
14105 && type == TREE_TYPE (optype))
14106 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14107 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14108 else if (TREE_CODE (optype) == VECTOR_TYPE
14109 && type == TREE_TYPE (optype))
14111 tree part_width = TYPE_SIZE (type);
14112 tree index = bitsize_int (0);
14113 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14117 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14118 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14120 tree op00 = TREE_OPERAND (sub, 0);
14121 tree op01 = TREE_OPERAND (sub, 1);
14123 STRIP_NOPS (op00);
14124 if (TREE_CODE (op00) == ADDR_EXPR)
14126 tree op00type;
14127 op00 = TREE_OPERAND (op00, 0);
14128 op00type = TREE_TYPE (op00);
14130 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14131 if (TREE_CODE (op00type) == VECTOR_TYPE
14132 && type == TREE_TYPE (op00type))
14134 tree part_width = TYPE_SIZE (type);
14135 unsigned HOST_WIDE_INT max_offset
14136 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14137 * TYPE_VECTOR_SUBPARTS (op00type));
14138 if (tree_int_cst_sign_bit (op01) == 0
14139 && compare_tree_int (op01, max_offset) == -1)
14141 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14142 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14143 tree index = bitsize_int (indexi);
14144 return fold_build3_loc (loc,
14145 BIT_FIELD_REF, type, op00,
14146 part_width, index);
14149 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14150 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14151 && type == TREE_TYPE (op00type))
14153 tree size = TYPE_SIZE_UNIT (type);
14154 if (tree_int_cst_equal (size, op01))
14155 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14157 /* ((foo *)&fooarray)[1] => fooarray[1] */
14158 else if (TREE_CODE (op00type) == ARRAY_TYPE
14159 && type == TREE_TYPE (op00type))
14161 tree type_domain = TYPE_DOMAIN (op00type);
14162 tree min_val = size_zero_node;
14163 if (type_domain && TYPE_MIN_VALUE (type_domain))
14164 min_val = TYPE_MIN_VALUE (type_domain);
14165 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14166 TYPE_SIZE_UNIT (type));
14167 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14168 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14169 NULL_TREE, NULL_TREE);
14174 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14175 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14176 && type == TREE_TYPE (TREE_TYPE (subtype))
14177 && (!in_gimple_form
14178 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14180 tree type_domain;
14181 tree min_val = size_zero_node;
14182 sub = build_fold_indirect_ref_loc (loc, sub);
14183 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14184 if (type_domain && TYPE_MIN_VALUE (type_domain))
14185 min_val = TYPE_MIN_VALUE (type_domain);
14186 if (in_gimple_form
14187 && TREE_CODE (min_val) != INTEGER_CST)
14188 return NULL_TREE;
14189 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14190 NULL_TREE);
14193 return NULL_TREE;
14196 /* Builds an expression for an indirection through T, simplifying some
14197 cases. */
14199 tree
14200 build_fold_indirect_ref_loc (location_t loc, tree t)
14202 tree type = TREE_TYPE (TREE_TYPE (t));
14203 tree sub = fold_indirect_ref_1 (loc, type, t);
14205 if (sub)
14206 return sub;
14208 return build1_loc (loc, INDIRECT_REF, type, t);
14211 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14213 tree
14214 fold_indirect_ref_loc (location_t loc, tree t)
14216 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14218 if (sub)
14219 return sub;
14220 else
14221 return t;
14224 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14225 whose result is ignored. The type of the returned tree need not be
14226 the same as the original expression. */
14228 tree
14229 fold_ignored_result (tree t)
14231 if (!TREE_SIDE_EFFECTS (t))
14232 return integer_zero_node;
14234 for (;;)
14235 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14237 case tcc_unary:
14238 t = TREE_OPERAND (t, 0);
14239 break;
14241 case tcc_binary:
14242 case tcc_comparison:
14243 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14244 t = TREE_OPERAND (t, 0);
14245 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14246 t = TREE_OPERAND (t, 1);
14247 else
14248 return t;
14249 break;
14251 case tcc_expression:
14252 switch (TREE_CODE (t))
14254 case COMPOUND_EXPR:
14255 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14256 return t;
14257 t = TREE_OPERAND (t, 0);
14258 break;
14260 case COND_EXPR:
14261 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14262 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14263 return t;
14264 t = TREE_OPERAND (t, 0);
14265 break;
14267 default:
14268 return t;
14270 break;
14272 default:
14273 return t;
14277 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14279 tree
14280 round_up_loc (location_t loc, tree value, unsigned int divisor)
14282 tree div = NULL_TREE;
14284 if (divisor == 1)
14285 return value;
14287 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14288 have to do anything. Only do this when we are not given a const,
14289 because in that case, this check is more expensive than just
14290 doing it. */
14291 if (TREE_CODE (value) != INTEGER_CST)
14293 div = build_int_cst (TREE_TYPE (value), divisor);
14295 if (multiple_of_p (TREE_TYPE (value), value, div))
14296 return value;
14299 /* If divisor is a power of two, simplify this to bit manipulation. */
14300 if (divisor == (divisor & -divisor))
14302 if (TREE_CODE (value) == INTEGER_CST)
14304 wide_int val = value;
14305 bool overflow_p;
14307 if ((val & (divisor - 1)) == 0)
14308 return value;
14310 overflow_p = TREE_OVERFLOW (value);
14311 val += divisor - 1;
14312 val &= - (int) divisor;
14313 if (val == 0)
14314 overflow_p = true;
14316 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14318 else
14320 tree t;
14322 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14323 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14324 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14325 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14328 else
14330 if (!div)
14331 div = build_int_cst (TREE_TYPE (value), divisor);
14332 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14333 value = size_binop_loc (loc, MULT_EXPR, value, div);
14336 return value;
14339 /* Likewise, but round down. */
14341 tree
14342 round_down_loc (location_t loc, tree value, int divisor)
14344 tree div = NULL_TREE;
14346 gcc_assert (divisor > 0);
14347 if (divisor == 1)
14348 return value;
14350 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14351 have to do anything. Only do this when we are not given a const,
14352 because in that case, this check is more expensive than just
14353 doing it. */
14354 if (TREE_CODE (value) != INTEGER_CST)
14356 div = build_int_cst (TREE_TYPE (value), divisor);
14358 if (multiple_of_p (TREE_TYPE (value), value, div))
14359 return value;
14362 /* If divisor is a power of two, simplify this to bit manipulation. */
14363 if (divisor == (divisor & -divisor))
14365 tree t;
14367 t = build_int_cst (TREE_TYPE (value), -divisor);
14368 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14370 else
14372 if (!div)
14373 div = build_int_cst (TREE_TYPE (value), divisor);
14374 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14375 value = size_binop_loc (loc, MULT_EXPR, value, div);
14378 return value;
14381 /* Returns the pointer to the base of the object addressed by EXP and
14382 extracts the information about the offset of the access, storing it
14383 to PBITPOS and POFFSET. */
14385 static tree
14386 split_address_to_core_and_offset (tree exp,
14387 HOST_WIDE_INT *pbitpos, tree *poffset)
14389 tree core;
14390 machine_mode mode;
14391 int unsignedp, reversep, volatilep;
14392 HOST_WIDE_INT bitsize;
14393 location_t loc = EXPR_LOCATION (exp);
14395 if (TREE_CODE (exp) == ADDR_EXPR)
14397 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14398 poffset, &mode, &unsignedp, &reversep,
14399 &volatilep, false);
14400 core = build_fold_addr_expr_loc (loc, core);
14402 else
14404 core = exp;
14405 *pbitpos = 0;
14406 *poffset = NULL_TREE;
14409 return core;
14412 /* Returns true if addresses of E1 and E2 differ by a constant, false
14413 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14415 bool
14416 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14418 tree core1, core2;
14419 HOST_WIDE_INT bitpos1, bitpos2;
14420 tree toffset1, toffset2, tdiff, type;
14422 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14423 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14425 if (bitpos1 % BITS_PER_UNIT != 0
14426 || bitpos2 % BITS_PER_UNIT != 0
14427 || !operand_equal_p (core1, core2, 0))
14428 return false;
14430 if (toffset1 && toffset2)
14432 type = TREE_TYPE (toffset1);
14433 if (type != TREE_TYPE (toffset2))
14434 toffset2 = fold_convert (type, toffset2);
14436 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14437 if (!cst_and_fits_in_hwi (tdiff))
14438 return false;
14440 *diff = int_cst_value (tdiff);
14442 else if (toffset1 || toffset2)
14444 /* If only one of the offsets is non-constant, the difference cannot
14445 be a constant. */
14446 return false;
14448 else
14449 *diff = 0;
14451 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14452 return true;
14455 /* Return OFF converted to a pointer offset type suitable as offset for
14456 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14457 tree
14458 convert_to_ptrofftype_loc (location_t loc, tree off)
14460 return fold_convert_loc (loc, sizetype, off);
14463 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14464 tree
14465 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14467 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14468 ptr, convert_to_ptrofftype_loc (loc, off));
14471 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14472 tree
14473 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14475 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14476 ptr, size_int (off));
14479 /* Return a char pointer for a C string if it is a string constant
14480 or sum of string constant and integer constant. */
14482 const char *
14483 c_getstr (tree src)
14485 tree offset_node;
14487 src = string_constant (src, &offset_node);
14488 if (src == 0)
14489 return 0;
14491 if (offset_node == 0)
14492 return TREE_STRING_POINTER (src);
14493 else if (!tree_fits_uhwi_p (offset_node)
14494 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14495 return 0;
14497 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);