gcc/
[official-gcc.git] / gcc / fold-const.c
blobd896d7a16750b80b7054ac61cb22832d2165938d
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
28 /* The entry points in this file are fold, size_int_wide and size_binop.
30 fold takes a tree as argument and returns a simplified tree.
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "alias.h"
49 #include "symtab.h"
50 #include "tree.h"
51 #include "fold-const.h"
52 #include "stor-layout.h"
53 #include "calls.h"
54 #include "tree-iterator.h"
55 #include "realmpfr.h"
56 #include "rtl.h"
57 #include "hard-reg-set.h"
58 #include "function.h"
59 #include "insn-config.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "emit-rtl.h"
64 #include "varasm.h"
65 #include "stmt.h"
66 #include "expr.h"
67 #include "tm_p.h"
68 #include "target.h"
69 #include "diagnostic-core.h"
70 #include "intl.h"
71 #include "langhooks.h"
72 #include "md5.h"
73 #include "predict.h"
74 #include "basic-block.h"
75 #include "tree-ssa-alias.h"
76 #include "internal-fn.h"
77 #include "tree-eh.h"
78 #include "gimple-expr.h"
79 #include "gimple.h"
80 #include "gimplify.h"
81 #include "tree-dfa.h"
82 #include "builtins.h"
83 #include "cgraph.h"
84 #include "generic-match.h"
85 #include "optabs.h"
87 /* Nonzero if we are folding constants inside an initializer; zero
88 otherwise. */
89 int folding_initializer = 0;
91 /* The following constants represent a bit based encoding of GCC's
92 comparison operators. This encoding simplifies transformations
93 on relational comparison operators, such as AND and OR. */
94 enum comparison_code {
95 COMPCODE_FALSE = 0,
96 COMPCODE_LT = 1,
97 COMPCODE_EQ = 2,
98 COMPCODE_LE = 3,
99 COMPCODE_GT = 4,
100 COMPCODE_LTGT = 5,
101 COMPCODE_GE = 6,
102 COMPCODE_ORD = 7,
103 COMPCODE_UNORD = 8,
104 COMPCODE_UNLT = 9,
105 COMPCODE_UNEQ = 10,
106 COMPCODE_UNLE = 11,
107 COMPCODE_UNGT = 12,
108 COMPCODE_NE = 13,
109 COMPCODE_UNGE = 14,
110 COMPCODE_TRUE = 15
113 static bool negate_mathfn_p (enum built_in_function);
114 static bool negate_expr_p (tree);
115 static tree negate_expr (tree);
116 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
117 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
118 static enum comparison_code comparison_to_compcode (enum tree_code);
119 static enum tree_code compcode_to_comparison (enum comparison_code);
120 static int operand_equal_for_comparison_p (tree, tree, tree);
121 static int twoval_comparison_p (tree, tree *, tree *, int *);
122 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
123 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
124 static tree make_bit_field_ref (location_t, tree, tree,
125 HOST_WIDE_INT, HOST_WIDE_INT, int);
126 static tree optimize_bit_field_compare (location_t, enum tree_code,
127 tree, tree, tree);
128 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
129 HOST_WIDE_INT *,
130 machine_mode *, int *, int *,
131 tree *, tree *);
132 static int simple_operand_p (const_tree);
133 static bool simple_operand_p_2 (tree);
134 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
135 static tree range_predecessor (tree);
136 static tree range_successor (tree);
137 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
138 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
139 static tree unextend (tree, int, int, tree);
140 static tree optimize_minmax_comparison (location_t, enum tree_code,
141 tree, tree, tree);
142 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
143 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
144 static tree fold_binary_op_with_conditional_arg (location_t,
145 enum tree_code, tree,
146 tree, tree,
147 tree, tree, int);
148 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
149 static bool reorder_operands_p (const_tree, const_tree);
150 static tree fold_negate_const (tree, tree);
151 static tree fold_not_const (const_tree, tree);
152 static tree fold_relational_const (enum tree_code, tree, tree, tree);
153 static tree fold_convert_const (enum tree_code, tree, tree);
154 static tree fold_view_convert_expr (tree, tree);
155 static bool vec_cst_ctor_to_array (tree, tree *);
158 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
159 Otherwise, return LOC. */
161 static location_t
162 expr_location_or (tree t, location_t loc)
164 location_t tloc = EXPR_LOCATION (t);
165 return tloc == UNKNOWN_LOCATION ? loc : tloc;
168 /* Similar to protected_set_expr_location, but never modify x in place,
169 if location can and needs to be set, unshare it. */
171 static inline tree
172 protected_set_expr_location_unshare (tree x, location_t loc)
174 if (CAN_HAVE_LOCATION_P (x)
175 && EXPR_LOCATION (x) != loc
176 && !(TREE_CODE (x) == SAVE_EXPR
177 || TREE_CODE (x) == TARGET_EXPR
178 || TREE_CODE (x) == BIND_EXPR))
180 x = copy_node (x);
181 SET_EXPR_LOCATION (x, loc);
183 return x;
186 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
187 division and returns the quotient. Otherwise returns
188 NULL_TREE. */
190 tree
191 div_if_zero_remainder (const_tree arg1, const_tree arg2)
193 widest_int quo;
195 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
196 SIGNED, &quo))
197 return wide_int_to_tree (TREE_TYPE (arg1), quo);
199 return NULL_TREE;
202 /* This is nonzero if we should defer warnings about undefined
203 overflow. This facility exists because these warnings are a
204 special case. The code to estimate loop iterations does not want
205 to issue any warnings, since it works with expressions which do not
206 occur in user code. Various bits of cleanup code call fold(), but
207 only use the result if it has certain characteristics (e.g., is a
208 constant); that code only wants to issue a warning if the result is
209 used. */
211 static int fold_deferring_overflow_warnings;
213 /* If a warning about undefined overflow is deferred, this is the
214 warning. Note that this may cause us to turn two warnings into
215 one, but that is fine since it is sufficient to only give one
216 warning per expression. */
218 static const char* fold_deferred_overflow_warning;
220 /* If a warning about undefined overflow is deferred, this is the
221 level at which the warning should be emitted. */
223 static enum warn_strict_overflow_code fold_deferred_overflow_code;
225 /* Start deferring overflow warnings. We could use a stack here to
226 permit nested calls, but at present it is not necessary. */
228 void
229 fold_defer_overflow_warnings (void)
231 ++fold_deferring_overflow_warnings;
234 /* Stop deferring overflow warnings. If there is a pending warning,
235 and ISSUE is true, then issue the warning if appropriate. STMT is
236 the statement with which the warning should be associated (used for
237 location information); STMT may be NULL. CODE is the level of the
238 warning--a warn_strict_overflow_code value. This function will use
239 the smaller of CODE and the deferred code when deciding whether to
240 issue the warning. CODE may be zero to mean to always use the
241 deferred code. */
243 void
244 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
246 const char *warnmsg;
247 location_t locus;
249 gcc_assert (fold_deferring_overflow_warnings > 0);
250 --fold_deferring_overflow_warnings;
251 if (fold_deferring_overflow_warnings > 0)
253 if (fold_deferred_overflow_warning != NULL
254 && code != 0
255 && code < (int) fold_deferred_overflow_code)
256 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
257 return;
260 warnmsg = fold_deferred_overflow_warning;
261 fold_deferred_overflow_warning = NULL;
263 if (!issue || warnmsg == NULL)
264 return;
266 if (gimple_no_warning_p (stmt))
267 return;
269 /* Use the smallest code level when deciding to issue the
270 warning. */
271 if (code == 0 || code > (int) fold_deferred_overflow_code)
272 code = fold_deferred_overflow_code;
274 if (!issue_strict_overflow_warning (code))
275 return;
277 if (stmt == NULL)
278 locus = input_location;
279 else
280 locus = gimple_location (stmt);
281 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
284 /* Stop deferring overflow warnings, ignoring any deferred
285 warnings. */
287 void
288 fold_undefer_and_ignore_overflow_warnings (void)
290 fold_undefer_overflow_warnings (false, NULL, 0);
293 /* Whether we are deferring overflow warnings. */
295 bool
296 fold_deferring_overflow_warnings_p (void)
298 return fold_deferring_overflow_warnings > 0;
301 /* This is called when we fold something based on the fact that signed
302 overflow is undefined. */
304 static void
305 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
307 if (fold_deferring_overflow_warnings > 0)
309 if (fold_deferred_overflow_warning == NULL
310 || wc < fold_deferred_overflow_code)
312 fold_deferred_overflow_warning = gmsgid;
313 fold_deferred_overflow_code = wc;
316 else if (issue_strict_overflow_warning (wc))
317 warning (OPT_Wstrict_overflow, gmsgid);
320 /* Return true if the built-in mathematical function specified by CODE
321 is odd, i.e. -f(x) == f(-x). */
323 static bool
324 negate_mathfn_p (enum built_in_function code)
326 switch (code)
328 CASE_FLT_FN (BUILT_IN_ASIN):
329 CASE_FLT_FN (BUILT_IN_ASINH):
330 CASE_FLT_FN (BUILT_IN_ATAN):
331 CASE_FLT_FN (BUILT_IN_ATANH):
332 CASE_FLT_FN (BUILT_IN_CASIN):
333 CASE_FLT_FN (BUILT_IN_CASINH):
334 CASE_FLT_FN (BUILT_IN_CATAN):
335 CASE_FLT_FN (BUILT_IN_CATANH):
336 CASE_FLT_FN (BUILT_IN_CBRT):
337 CASE_FLT_FN (BUILT_IN_CPROJ):
338 CASE_FLT_FN (BUILT_IN_CSIN):
339 CASE_FLT_FN (BUILT_IN_CSINH):
340 CASE_FLT_FN (BUILT_IN_CTAN):
341 CASE_FLT_FN (BUILT_IN_CTANH):
342 CASE_FLT_FN (BUILT_IN_ERF):
343 CASE_FLT_FN (BUILT_IN_LLROUND):
344 CASE_FLT_FN (BUILT_IN_LROUND):
345 CASE_FLT_FN (BUILT_IN_ROUND):
346 CASE_FLT_FN (BUILT_IN_SIN):
347 CASE_FLT_FN (BUILT_IN_SINH):
348 CASE_FLT_FN (BUILT_IN_TAN):
349 CASE_FLT_FN (BUILT_IN_TANH):
350 CASE_FLT_FN (BUILT_IN_TRUNC):
351 return true;
353 CASE_FLT_FN (BUILT_IN_LLRINT):
354 CASE_FLT_FN (BUILT_IN_LRINT):
355 CASE_FLT_FN (BUILT_IN_NEARBYINT):
356 CASE_FLT_FN (BUILT_IN_RINT):
357 return !flag_rounding_math;
359 default:
360 break;
362 return false;
365 /* Check whether we may negate an integer constant T without causing
366 overflow. */
368 bool
369 may_negate_without_overflow_p (const_tree t)
371 tree type;
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
379 return !wi::only_sign_bit_p (t);
382 /* Determine whether an expression T can be cheaply negated using
383 the function negate_expr without introducing undefined overflow. */
385 static bool
386 negate_expr_p (tree t)
388 tree type;
390 if (t == 0)
391 return false;
393 type = TREE_TYPE (t);
395 STRIP_SIGN_NOPS (t);
396 switch (TREE_CODE (t))
398 case INTEGER_CST:
399 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
400 return true;
402 /* Check that -CST will not overflow type. */
403 return may_negate_without_overflow_p (t);
404 case BIT_NOT_EXPR:
405 return (INTEGRAL_TYPE_P (type)
406 && TYPE_OVERFLOW_WRAPS (type));
408 case FIXED_CST:
409 return true;
411 case NEGATE_EXPR:
412 return !TYPE_OVERFLOW_SANITIZED (type);
414 case REAL_CST:
415 /* We want to canonicalize to positive real constants. Pretend
416 that only negative ones can be easily negated. */
417 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419 case COMPLEX_CST:
420 return negate_expr_p (TREE_REALPART (t))
421 && negate_expr_p (TREE_IMAGPART (t));
423 case VECTOR_CST:
425 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
426 return true;
428 int count = TYPE_VECTOR_SUBPARTS (type), i;
430 for (i = 0; i < count; i++)
431 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
432 return false;
434 return true;
437 case COMPLEX_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0))
439 && negate_expr_p (TREE_OPERAND (t, 1));
441 case CONJ_EXPR:
442 return negate_expr_p (TREE_OPERAND (t, 0));
444 case PLUS_EXPR:
445 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 || HONOR_SIGNED_ZEROS (element_mode (type)))
447 return false;
448 /* -(A + B) -> (-B) - A. */
449 if (negate_expr_p (TREE_OPERAND (t, 1))
450 && reorder_operands_p (TREE_OPERAND (t, 0),
451 TREE_OPERAND (t, 1)))
452 return true;
453 /* -(A + B) -> (-A) - B. */
454 return negate_expr_p (TREE_OPERAND (t, 0));
456 case MINUS_EXPR:
457 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
458 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
459 && !HONOR_SIGNED_ZEROS (element_mode (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (TREE_TYPE (t)))
465 break;
467 /* Fall through. */
469 case RDIV_EXPR:
470 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
471 return negate_expr_p (TREE_OPERAND (t, 1))
472 || negate_expr_p (TREE_OPERAND (t, 0));
473 break;
475 case TRUNC_DIV_EXPR:
476 case ROUND_DIV_EXPR:
477 case EXACT_DIV_EXPR:
478 /* In general we can't negate A / B, because if A is INT_MIN and
479 B is 1, we may turn this into INT_MIN / -1 which is undefined
480 and actually traps on some architectures. But if overflow is
481 undefined, we can negate, because - (INT_MIN / 1) is an
482 overflow. */
483 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
485 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
486 break;
487 /* If overflow is undefined then we have to be careful because
488 we ask whether it's ok to associate the negate with the
489 division which is not ok for example for
490 -((a - b) / c) where (-(a - b)) / c may invoke undefined
491 overflow because of negating INT_MIN. So do not use
492 negate_expr_p here but open-code the two important cases. */
493 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
494 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
495 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
496 return true;
498 else if (negate_expr_p (TREE_OPERAND (t, 0)))
499 return true;
500 return negate_expr_p (TREE_OPERAND (t, 1));
502 case NOP_EXPR:
503 /* Negate -((double)float) as (double)(-float). */
504 if (TREE_CODE (type) == REAL_TYPE)
506 tree tem = strip_float_extensions (t);
507 if (tem != t)
508 return negate_expr_p (tem);
510 break;
512 case CALL_EXPR:
513 /* Negate -f(x) as f(-x). */
514 if (negate_mathfn_p (builtin_mathfn_code (t)))
515 return negate_expr_p (CALL_EXPR_ARG (t, 0));
516 break;
518 case RSHIFT_EXPR:
519 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
520 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
522 tree op1 = TREE_OPERAND (t, 1);
523 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
524 return true;
526 break;
528 default:
529 break;
531 return false;
534 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
535 simplification is possible.
536 If negate_expr_p would return true for T, NULL_TREE will never be
537 returned. */
539 static tree
540 fold_negate_expr (location_t loc, tree t)
542 tree type = TREE_TYPE (t);
543 tree tem;
545 switch (TREE_CODE (t))
547 /* Convert - (~A) to A + 1. */
548 case BIT_NOT_EXPR:
549 if (INTEGRAL_TYPE_P (type))
550 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
551 build_one_cst (type));
552 break;
554 case INTEGER_CST:
555 tem = fold_negate_const (t, type);
556 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
557 || (ANY_INTEGRAL_TYPE_P (type)
558 && !TYPE_OVERFLOW_TRAPS (type)
559 && TYPE_OVERFLOW_WRAPS (type))
560 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
561 return tem;
562 break;
564 case REAL_CST:
565 tem = fold_negate_const (t, type);
566 return tem;
568 case FIXED_CST:
569 tem = fold_negate_const (t, type);
570 return tem;
572 case COMPLEX_CST:
574 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
575 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
576 if (rpart && ipart)
577 return build_complex (type, rpart, ipart);
579 break;
581 case VECTOR_CST:
583 int count = TYPE_VECTOR_SUBPARTS (type), i;
584 tree *elts = XALLOCAVEC (tree, count);
586 for (i = 0; i < count; i++)
588 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
589 if (elts[i] == NULL_TREE)
590 return NULL_TREE;
593 return build_vector (type, elts);
596 case COMPLEX_EXPR:
597 if (negate_expr_p (t))
598 return fold_build2_loc (loc, COMPLEX_EXPR, type,
599 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
600 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
601 break;
603 case CONJ_EXPR:
604 if (negate_expr_p (t))
605 return fold_build1_loc (loc, CONJ_EXPR, type,
606 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
607 break;
609 case NEGATE_EXPR:
610 if (!TYPE_OVERFLOW_SANITIZED (type))
611 return TREE_OPERAND (t, 0);
612 break;
614 case PLUS_EXPR:
615 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
616 && !HONOR_SIGNED_ZEROS (element_mode (type)))
618 /* -(A + B) -> (-B) - A. */
619 if (negate_expr_p (TREE_OPERAND (t, 1))
620 && reorder_operands_p (TREE_OPERAND (t, 0),
621 TREE_OPERAND (t, 1)))
623 tem = negate_expr (TREE_OPERAND (t, 1));
624 return fold_build2_loc (loc, MINUS_EXPR, type,
625 tem, TREE_OPERAND (t, 0));
628 /* -(A + B) -> (-A) - B. */
629 if (negate_expr_p (TREE_OPERAND (t, 0)))
631 tem = negate_expr (TREE_OPERAND (t, 0));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 1));
636 break;
638 case MINUS_EXPR:
639 /* - (A - B) -> B - A */
640 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
641 && !HONOR_SIGNED_ZEROS (element_mode (type))
642 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
643 return fold_build2_loc (loc, MINUS_EXPR, type,
644 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
645 break;
647 case MULT_EXPR:
648 if (TYPE_UNSIGNED (type))
649 break;
651 /* Fall through. */
653 case RDIV_EXPR:
654 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
656 tem = TREE_OPERAND (t, 1);
657 if (negate_expr_p (tem))
658 return fold_build2_loc (loc, TREE_CODE (t), type,
659 TREE_OPERAND (t, 0), negate_expr (tem));
660 tem = TREE_OPERAND (t, 0);
661 if (negate_expr_p (tem))
662 return fold_build2_loc (loc, TREE_CODE (t), type,
663 negate_expr (tem), TREE_OPERAND (t, 1));
665 break;
667 case TRUNC_DIV_EXPR:
668 case ROUND_DIV_EXPR:
669 case EXACT_DIV_EXPR:
670 /* In general we can't negate A / B, because if A is INT_MIN and
671 B is 1, we may turn this into INT_MIN / -1 which is undefined
672 and actually traps on some architectures. But if overflow is
673 undefined, we can negate, because - (INT_MIN / 1) is an
674 overflow. */
675 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
677 const char * const warnmsg = G_("assuming signed overflow does not "
678 "occur when negating a division");
679 tem = TREE_OPERAND (t, 1);
680 if (negate_expr_p (tem))
682 if (INTEGRAL_TYPE_P (type)
683 && (TREE_CODE (tem) != INTEGER_CST
684 || integer_onep (tem)))
685 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
686 return fold_build2_loc (loc, TREE_CODE (t), type,
687 TREE_OPERAND (t, 0), negate_expr (tem));
689 /* If overflow is undefined then we have to be careful because
690 we ask whether it's ok to associate the negate with the
691 division which is not ok for example for
692 -((a - b) / c) where (-(a - b)) / c may invoke undefined
693 overflow because of negating INT_MIN. So do not use
694 negate_expr_p here but open-code the two important cases. */
695 tem = TREE_OPERAND (t, 0);
696 if ((INTEGRAL_TYPE_P (type)
697 && (TREE_CODE (tem) == NEGATE_EXPR
698 || (TREE_CODE (tem) == INTEGER_CST
699 && may_negate_without_overflow_p (tem))))
700 || !INTEGRAL_TYPE_P (type))
701 return fold_build2_loc (loc, TREE_CODE (t), type,
702 negate_expr (tem), TREE_OPERAND (t, 1));
704 break;
706 case NOP_EXPR:
707 /* Convert -((double)float) into (double)(-float). */
708 if (TREE_CODE (type) == REAL_TYPE)
710 tem = strip_float_extensions (t);
711 if (tem != t && negate_expr_p (tem))
712 return fold_convert_loc (loc, type, negate_expr (tem));
714 break;
716 case CALL_EXPR:
717 /* Negate -f(x) as f(-x). */
718 if (negate_mathfn_p (builtin_mathfn_code (t))
719 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
721 tree fndecl, arg;
723 fndecl = get_callee_fndecl (t);
724 arg = negate_expr (CALL_EXPR_ARG (t, 0));
725 return build_call_expr_loc (loc, fndecl, 1, arg);
727 break;
729 case RSHIFT_EXPR:
730 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
731 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
733 tree op1 = TREE_OPERAND (t, 1);
734 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
736 tree ntype = TYPE_UNSIGNED (type)
737 ? signed_type_for (type)
738 : unsigned_type_for (type);
739 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
740 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
741 return fold_convert_loc (loc, type, temp);
744 break;
746 default:
747 break;
750 return NULL_TREE;
753 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
754 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
755 return NULL_TREE. */
757 static tree
758 negate_expr (tree t)
760 tree type, tem;
761 location_t loc;
763 if (t == NULL_TREE)
764 return NULL_TREE;
766 loc = EXPR_LOCATION (t);
767 type = TREE_TYPE (t);
768 STRIP_SIGN_NOPS (t);
770 tem = fold_negate_expr (loc, t);
771 if (!tem)
772 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
773 return fold_convert_loc (loc, type, tem);
776 /* Split a tree IN into a constant, literal and variable parts that could be
777 combined with CODE to make IN. "constant" means an expression with
778 TREE_CONSTANT but that isn't an actual constant. CODE must be a
779 commutative arithmetic operation. Store the constant part into *CONP,
780 the literal in *LITP and return the variable part. If a part isn't
781 present, set it to null. If the tree does not decompose in this way,
782 return the entire tree as the variable part and the other parts as null.
784 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
785 case, we negate an operand that was subtracted. Except if it is a
786 literal for which we use *MINUS_LITP instead.
788 If NEGATE_P is true, we are negating all of IN, again except a literal
789 for which we use *MINUS_LITP instead.
791 If IN is itself a literal or constant, return it as appropriate.
793 Note that we do not guarantee that any of the three values will be the
794 same type as IN, but they will have the same signedness and mode. */
796 static tree
797 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
798 tree *minus_litp, int negate_p)
800 tree var = 0;
802 *conp = 0;
803 *litp = 0;
804 *minus_litp = 0;
806 /* Strip any conversions that don't change the machine mode or signedness. */
807 STRIP_SIGN_NOPS (in);
809 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
810 || TREE_CODE (in) == FIXED_CST)
811 *litp = in;
812 else if (TREE_CODE (in) == code
813 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
814 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
815 /* We can associate addition and subtraction together (even
816 though the C standard doesn't say so) for integers because
817 the value is not affected. For reals, the value might be
818 affected, so we can't. */
819 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
820 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
822 tree op0 = TREE_OPERAND (in, 0);
823 tree op1 = TREE_OPERAND (in, 1);
824 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
825 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
827 /* First see if either of the operands is a literal, then a constant. */
828 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
829 || TREE_CODE (op0) == FIXED_CST)
830 *litp = op0, op0 = 0;
831 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
832 || TREE_CODE (op1) == FIXED_CST)
833 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
835 if (op0 != 0 && TREE_CONSTANT (op0))
836 *conp = op0, op0 = 0;
837 else if (op1 != 0 && TREE_CONSTANT (op1))
838 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
840 /* If we haven't dealt with either operand, this is not a case we can
841 decompose. Otherwise, VAR is either of the ones remaining, if any. */
842 if (op0 != 0 && op1 != 0)
843 var = in;
844 else if (op0 != 0)
845 var = op0;
846 else
847 var = op1, neg_var_p = neg1_p;
849 /* Now do any needed negations. */
850 if (neg_litp_p)
851 *minus_litp = *litp, *litp = 0;
852 if (neg_conp_p)
853 *conp = negate_expr (*conp);
854 if (neg_var_p)
855 var = negate_expr (var);
857 else if (TREE_CODE (in) == BIT_NOT_EXPR
858 && code == PLUS_EXPR)
860 /* -X - 1 is folded to ~X, undo that here. */
861 *minus_litp = build_one_cst (TREE_TYPE (in));
862 var = negate_expr (TREE_OPERAND (in, 0));
864 else if (TREE_CONSTANT (in))
865 *conp = in;
866 else
867 var = in;
869 if (negate_p)
871 if (*litp)
872 *minus_litp = *litp, *litp = 0;
873 else if (*minus_litp)
874 *litp = *minus_litp, *minus_litp = 0;
875 *conp = negate_expr (*conp);
876 var = negate_expr (var);
879 return var;
882 /* Re-associate trees split by the above function. T1 and T2 are
883 either expressions to associate or null. Return the new
884 expression, if any. LOC is the location of the new expression. If
885 we build an operation, do it in TYPE and with CODE. */
887 static tree
888 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
890 if (t1 == 0)
891 return t2;
892 else if (t2 == 0)
893 return t1;
895 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
896 try to fold this since we will have infinite recursion. But do
897 deal with any NEGATE_EXPRs. */
898 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
899 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
901 if (code == PLUS_EXPR)
903 if (TREE_CODE (t1) == NEGATE_EXPR)
904 return build2_loc (loc, MINUS_EXPR, type,
905 fold_convert_loc (loc, type, t2),
906 fold_convert_loc (loc, type,
907 TREE_OPERAND (t1, 0)));
908 else if (TREE_CODE (t2) == NEGATE_EXPR)
909 return build2_loc (loc, MINUS_EXPR, type,
910 fold_convert_loc (loc, type, t1),
911 fold_convert_loc (loc, type,
912 TREE_OPERAND (t2, 0)));
913 else if (integer_zerop (t2))
914 return fold_convert_loc (loc, type, t1);
916 else if (code == MINUS_EXPR)
918 if (integer_zerop (t2))
919 return fold_convert_loc (loc, type, t1);
922 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type, t2));
926 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
927 fold_convert_loc (loc, type, t2));
930 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
931 for use in int_const_binop, size_binop and size_diffop. */
933 static bool
934 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
936 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
937 return false;
938 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
939 return false;
941 switch (code)
943 case LSHIFT_EXPR:
944 case RSHIFT_EXPR:
945 case LROTATE_EXPR:
946 case RROTATE_EXPR:
947 return true;
949 default:
950 break;
953 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
954 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
955 && TYPE_MODE (type1) == TYPE_MODE (type2);
959 /* Combine two integer constants ARG1 and ARG2 under operation CODE
960 to produce a new constant. Return NULL_TREE if we don't know how
961 to evaluate CODE at compile-time. */
963 static tree
964 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
965 int overflowable)
967 wide_int res;
968 tree t;
969 tree type = TREE_TYPE (arg1);
970 signop sign = TYPE_SIGN (type);
971 bool overflow = false;
973 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
974 TYPE_SIGN (TREE_TYPE (parg2)));
976 switch (code)
978 case BIT_IOR_EXPR:
979 res = wi::bit_or (arg1, arg2);
980 break;
982 case BIT_XOR_EXPR:
983 res = wi::bit_xor (arg1, arg2);
984 break;
986 case BIT_AND_EXPR:
987 res = wi::bit_and (arg1, arg2);
988 break;
990 case RSHIFT_EXPR:
991 case LSHIFT_EXPR:
992 if (wi::neg_p (arg2))
994 arg2 = -arg2;
995 if (code == RSHIFT_EXPR)
996 code = LSHIFT_EXPR;
997 else
998 code = RSHIFT_EXPR;
1001 if (code == RSHIFT_EXPR)
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 res = wi::rshift (arg1, arg2, sign);
1006 else
1007 res = wi::lshift (arg1, arg2);
1008 break;
1010 case RROTATE_EXPR:
1011 case LROTATE_EXPR:
1012 if (wi::neg_p (arg2))
1014 arg2 = -arg2;
1015 if (code == RROTATE_EXPR)
1016 code = LROTATE_EXPR;
1017 else
1018 code = RROTATE_EXPR;
1021 if (code == RROTATE_EXPR)
1022 res = wi::rrotate (arg1, arg2);
1023 else
1024 res = wi::lrotate (arg1, arg2);
1025 break;
1027 case PLUS_EXPR:
1028 res = wi::add (arg1, arg2, sign, &overflow);
1029 break;
1031 case MINUS_EXPR:
1032 res = wi::sub (arg1, arg2, sign, &overflow);
1033 break;
1035 case MULT_EXPR:
1036 res = wi::mul (arg1, arg2, sign, &overflow);
1037 break;
1039 case MULT_HIGHPART_EXPR:
1040 res = wi::mul_high (arg1, arg2, sign);
1041 break;
1043 case TRUNC_DIV_EXPR:
1044 case EXACT_DIV_EXPR:
1045 if (arg2 == 0)
1046 return NULL_TREE;
1047 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1048 break;
1050 case FLOOR_DIV_EXPR:
1051 if (arg2 == 0)
1052 return NULL_TREE;
1053 res = wi::div_floor (arg1, arg2, sign, &overflow);
1054 break;
1056 case CEIL_DIV_EXPR:
1057 if (arg2 == 0)
1058 return NULL_TREE;
1059 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1060 break;
1062 case ROUND_DIV_EXPR:
1063 if (arg2 == 0)
1064 return NULL_TREE;
1065 res = wi::div_round (arg1, arg2, sign, &overflow);
1066 break;
1068 case TRUNC_MOD_EXPR:
1069 if (arg2 == 0)
1070 return NULL_TREE;
1071 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1072 break;
1074 case FLOOR_MOD_EXPR:
1075 if (arg2 == 0)
1076 return NULL_TREE;
1077 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1078 break;
1080 case CEIL_MOD_EXPR:
1081 if (arg2 == 0)
1082 return NULL_TREE;
1083 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1084 break;
1086 case ROUND_MOD_EXPR:
1087 if (arg2 == 0)
1088 return NULL_TREE;
1089 res = wi::mod_round (arg1, arg2, sign, &overflow);
1090 break;
1092 case MIN_EXPR:
1093 res = wi::min (arg1, arg2, sign);
1094 break;
1096 case MAX_EXPR:
1097 res = wi::max (arg1, arg2, sign);
1098 break;
1100 default:
1101 return NULL_TREE;
1104 t = force_fit_type (type, res, overflowable,
1105 (((sign == SIGNED || overflowable == -1)
1106 && overflow)
1107 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1109 return t;
1112 tree
1113 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1115 return int_const_binop_1 (code, arg1, arg2, 1);
1118 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1119 constant. We assume ARG1 and ARG2 have the same data type, or at least
1120 are the same kind of constant and the same machine mode. Return zero if
1121 combining the constants is not allowed in the current operating mode. */
1123 static tree
1124 const_binop (enum tree_code code, tree arg1, tree arg2)
1126 /* Sanity check for the recursive cases. */
1127 if (!arg1 || !arg2)
1128 return NULL_TREE;
1130 STRIP_NOPS (arg1);
1131 STRIP_NOPS (arg2);
1133 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1135 if (code == POINTER_PLUS_EXPR)
1136 return int_const_binop (PLUS_EXPR,
1137 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1139 return int_const_binop (code, arg1, arg2);
1142 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1144 machine_mode mode;
1145 REAL_VALUE_TYPE d1;
1146 REAL_VALUE_TYPE d2;
1147 REAL_VALUE_TYPE value;
1148 REAL_VALUE_TYPE result;
1149 bool inexact;
1150 tree t, type;
1152 /* The following codes are handled by real_arithmetic. */
1153 switch (code)
1155 case PLUS_EXPR:
1156 case MINUS_EXPR:
1157 case MULT_EXPR:
1158 case RDIV_EXPR:
1159 case MIN_EXPR:
1160 case MAX_EXPR:
1161 break;
1163 default:
1164 return NULL_TREE;
1167 d1 = TREE_REAL_CST (arg1);
1168 d2 = TREE_REAL_CST (arg2);
1170 type = TREE_TYPE (arg1);
1171 mode = TYPE_MODE (type);
1173 /* Don't perform operation if we honor signaling NaNs and
1174 either operand is a NaN. */
1175 if (HONOR_SNANS (mode)
1176 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1177 return NULL_TREE;
1179 /* Don't perform operation if it would raise a division
1180 by zero exception. */
1181 if (code == RDIV_EXPR
1182 && REAL_VALUES_EQUAL (d2, dconst0)
1183 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1184 return NULL_TREE;
1186 /* If either operand is a NaN, just return it. Otherwise, set up
1187 for floating-point trap; we return an overflow. */
1188 if (REAL_VALUE_ISNAN (d1))
1189 return arg1;
1190 else if (REAL_VALUE_ISNAN (d2))
1191 return arg2;
1193 inexact = real_arithmetic (&value, code, &d1, &d2);
1194 real_convert (&result, mode, &value);
1196 /* Don't constant fold this floating point operation if
1197 the result has overflowed and flag_trapping_math. */
1198 if (flag_trapping_math
1199 && MODE_HAS_INFINITIES (mode)
1200 && REAL_VALUE_ISINF (result)
1201 && !REAL_VALUE_ISINF (d1)
1202 && !REAL_VALUE_ISINF (d2))
1203 return NULL_TREE;
1205 /* Don't constant fold this floating point operation if the
1206 result may dependent upon the run-time rounding mode and
1207 flag_rounding_math is set, or if GCC's software emulation
1208 is unable to accurately represent the result. */
1209 if ((flag_rounding_math
1210 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1211 && (inexact || !real_identical (&result, &value)))
1212 return NULL_TREE;
1214 t = build_real (type, result);
1216 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1217 return t;
1220 if (TREE_CODE (arg1) == FIXED_CST)
1222 FIXED_VALUE_TYPE f1;
1223 FIXED_VALUE_TYPE f2;
1224 FIXED_VALUE_TYPE result;
1225 tree t, type;
1226 int sat_p;
1227 bool overflow_p;
1229 /* The following codes are handled by fixed_arithmetic. */
1230 switch (code)
1232 case PLUS_EXPR:
1233 case MINUS_EXPR:
1234 case MULT_EXPR:
1235 case TRUNC_DIV_EXPR:
1236 if (TREE_CODE (arg2) != FIXED_CST)
1237 return NULL_TREE;
1238 f2 = TREE_FIXED_CST (arg2);
1239 break;
1241 case LSHIFT_EXPR:
1242 case RSHIFT_EXPR:
1244 if (TREE_CODE (arg2) != INTEGER_CST)
1245 return NULL_TREE;
1246 wide_int w2 = arg2;
1247 f2.data.high = w2.elt (1);
1248 f2.data.low = w2.elt (0);
1249 f2.mode = SImode;
1251 break;
1253 default:
1254 return NULL_TREE;
1257 f1 = TREE_FIXED_CST (arg1);
1258 type = TREE_TYPE (arg1);
1259 sat_p = TYPE_SATURATING (type);
1260 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1261 t = build_fixed (type, result);
1262 /* Propagate overflow flags. */
1263 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1264 TREE_OVERFLOW (t) = 1;
1265 return t;
1268 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1270 tree type = TREE_TYPE (arg1);
1271 tree r1 = TREE_REALPART (arg1);
1272 tree i1 = TREE_IMAGPART (arg1);
1273 tree r2 = TREE_REALPART (arg2);
1274 tree i2 = TREE_IMAGPART (arg2);
1275 tree real, imag;
1277 switch (code)
1279 case PLUS_EXPR:
1280 case MINUS_EXPR:
1281 real = const_binop (code, r1, r2);
1282 imag = const_binop (code, i1, i2);
1283 break;
1285 case MULT_EXPR:
1286 if (COMPLEX_FLOAT_TYPE_P (type))
1287 return do_mpc_arg2 (arg1, arg2, type,
1288 /* do_nonfinite= */ folding_initializer,
1289 mpc_mul);
1291 real = const_binop (MINUS_EXPR,
1292 const_binop (MULT_EXPR, r1, r2),
1293 const_binop (MULT_EXPR, i1, i2));
1294 imag = const_binop (PLUS_EXPR,
1295 const_binop (MULT_EXPR, r1, i2),
1296 const_binop (MULT_EXPR, i1, r2));
1297 break;
1299 case RDIV_EXPR:
1300 if (COMPLEX_FLOAT_TYPE_P (type))
1301 return do_mpc_arg2 (arg1, arg2, type,
1302 /* do_nonfinite= */ folding_initializer,
1303 mpc_div);
1304 /* Fallthru ... */
1305 case TRUNC_DIV_EXPR:
1306 case CEIL_DIV_EXPR:
1307 case FLOOR_DIV_EXPR:
1308 case ROUND_DIV_EXPR:
1309 if (flag_complex_method == 0)
1311 /* Keep this algorithm in sync with
1312 tree-complex.c:expand_complex_div_straight().
1314 Expand complex division to scalars, straightforward algorithm.
1315 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1316 t = br*br + bi*bi
1318 tree magsquared
1319 = const_binop (PLUS_EXPR,
1320 const_binop (MULT_EXPR, r2, r2),
1321 const_binop (MULT_EXPR, i2, i2));
1322 tree t1
1323 = const_binop (PLUS_EXPR,
1324 const_binop (MULT_EXPR, r1, r2),
1325 const_binop (MULT_EXPR, i1, i2));
1326 tree t2
1327 = const_binop (MINUS_EXPR,
1328 const_binop (MULT_EXPR, i1, r2),
1329 const_binop (MULT_EXPR, r1, i2));
1331 real = const_binop (code, t1, magsquared);
1332 imag = const_binop (code, t2, magsquared);
1334 else
1336 /* Keep this algorithm in sync with
1337 tree-complex.c:expand_complex_div_wide().
1339 Expand complex division to scalars, modified algorithm to minimize
1340 overflow with wide input ranges. */
1341 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1342 fold_abs_const (r2, TREE_TYPE (type)),
1343 fold_abs_const (i2, TREE_TYPE (type)));
1345 if (integer_nonzerop (compare))
1347 /* In the TRUE branch, we compute
1348 ratio = br/bi;
1349 div = (br * ratio) + bi;
1350 tr = (ar * ratio) + ai;
1351 ti = (ai * ratio) - ar;
1352 tr = tr / div;
1353 ti = ti / div; */
1354 tree ratio = const_binop (code, r2, i2);
1355 tree div = const_binop (PLUS_EXPR, i2,
1356 const_binop (MULT_EXPR, r2, ratio));
1357 real = const_binop (MULT_EXPR, r1, ratio);
1358 real = const_binop (PLUS_EXPR, real, i1);
1359 real = const_binop (code, real, div);
1361 imag = const_binop (MULT_EXPR, i1, ratio);
1362 imag = const_binop (MINUS_EXPR, imag, r1);
1363 imag = const_binop (code, imag, div);
1365 else
1367 /* In the FALSE branch, we compute
1368 ratio = d/c;
1369 divisor = (d * ratio) + c;
1370 tr = (b * ratio) + a;
1371 ti = b - (a * ratio);
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, i2, r2);
1375 tree div = const_binop (PLUS_EXPR, r2,
1376 const_binop (MULT_EXPR, i2, ratio));
1378 real = const_binop (MULT_EXPR, i1, ratio);
1379 real = const_binop (PLUS_EXPR, real, r1);
1380 real = const_binop (code, real, div);
1382 imag = const_binop (MULT_EXPR, r1, ratio);
1383 imag = const_binop (MINUS_EXPR, i1, imag);
1384 imag = const_binop (code, imag, div);
1387 break;
1389 default:
1390 return NULL_TREE;
1393 if (real && imag)
1394 return build_complex (type, real, imag);
1397 if (TREE_CODE (arg1) == VECTOR_CST
1398 && TREE_CODE (arg2) == VECTOR_CST)
1400 tree type = TREE_TYPE (arg1);
1401 int count = TYPE_VECTOR_SUBPARTS (type), i;
1402 tree *elts = XALLOCAVEC (tree, count);
1404 for (i = 0; i < count; i++)
1406 tree elem1 = VECTOR_CST_ELT (arg1, i);
1407 tree elem2 = VECTOR_CST_ELT (arg2, i);
1409 elts[i] = const_binop (code, elem1, elem2);
1411 /* It is possible that const_binop cannot handle the given
1412 code and return NULL_TREE */
1413 if (elts[i] == NULL_TREE)
1414 return NULL_TREE;
1417 return build_vector (type, elts);
1420 /* Shifts allow a scalar offset for a vector. */
1421 if (TREE_CODE (arg1) == VECTOR_CST
1422 && TREE_CODE (arg2) == INTEGER_CST)
1424 tree type = TREE_TYPE (arg1);
1425 int count = TYPE_VECTOR_SUBPARTS (type), i;
1426 tree *elts = XALLOCAVEC (tree, count);
1428 for (i = 0; i < count; i++)
1430 tree elem1 = VECTOR_CST_ELT (arg1, i);
1432 elts[i] = const_binop (code, elem1, arg2);
1434 /* It is possible that const_binop cannot handle the given
1435 code and return NULL_TREE. */
1436 if (elts[i] == NULL_TREE)
1437 return NULL_TREE;
1440 return build_vector (type, elts);
1442 return NULL_TREE;
1445 /* Overload that adds a TYPE parameter to be able to dispatch
1446 to fold_relational_const. */
1448 tree
1449 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1451 if (TREE_CODE_CLASS (code) == tcc_comparison)
1452 return fold_relational_const (code, type, arg1, arg2);
1454 /* ??? Until we make the const_binop worker take the type of the
1455 result as argument put those cases that need it here. */
1456 switch (code)
1458 case COMPLEX_EXPR:
1459 if ((TREE_CODE (arg1) == REAL_CST
1460 && TREE_CODE (arg2) == REAL_CST)
1461 || (TREE_CODE (arg1) == INTEGER_CST
1462 && TREE_CODE (arg2) == INTEGER_CST))
1463 return build_complex (type, arg1, arg2);
1464 return NULL_TREE;
1466 case VEC_PACK_TRUNC_EXPR:
1467 case VEC_PACK_FIX_TRUNC_EXPR:
1469 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1470 tree *elts;
1472 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1473 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1474 if (TREE_CODE (arg1) != VECTOR_CST
1475 || TREE_CODE (arg2) != VECTOR_CST)
1476 return NULL_TREE;
1478 elts = XALLOCAVEC (tree, nelts);
1479 if (!vec_cst_ctor_to_array (arg1, elts)
1480 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1481 return NULL_TREE;
1483 for (i = 0; i < nelts; i++)
1485 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1486 ? NOP_EXPR : FIX_TRUNC_EXPR,
1487 TREE_TYPE (type), elts[i]);
1488 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1489 return NULL_TREE;
1492 return build_vector (type, elts);
1495 case VEC_WIDEN_MULT_LO_EXPR:
1496 case VEC_WIDEN_MULT_HI_EXPR:
1497 case VEC_WIDEN_MULT_EVEN_EXPR:
1498 case VEC_WIDEN_MULT_ODD_EXPR:
1500 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1501 unsigned int out, ofs, scale;
1502 tree *elts;
1504 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1505 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1506 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1507 return NULL_TREE;
1509 elts = XALLOCAVEC (tree, nelts * 4);
1510 if (!vec_cst_ctor_to_array (arg1, elts)
1511 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1512 return NULL_TREE;
1514 if (code == VEC_WIDEN_MULT_LO_EXPR)
1515 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1516 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1517 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1518 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1519 scale = 1, ofs = 0;
1520 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1521 scale = 1, ofs = 1;
1523 for (out = 0; out < nelts; out++)
1525 unsigned int in1 = (out << scale) + ofs;
1526 unsigned int in2 = in1 + nelts * 2;
1527 tree t1, t2;
1529 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1530 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1532 if (t1 == NULL_TREE || t2 == NULL_TREE)
1533 return NULL_TREE;
1534 elts[out] = const_binop (MULT_EXPR, t1, t2);
1535 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1536 return NULL_TREE;
1539 return build_vector (type, elts);
1542 default:;
1545 if (TREE_CODE_CLASS (code) != tcc_binary)
1546 return NULL_TREE;
1548 /* Make sure type and arg0 have the same saturating flag. */
1549 gcc_checking_assert (TYPE_SATURATING (type)
1550 == TYPE_SATURATING (TREE_TYPE (arg1)));
1552 return const_binop (code, arg1, arg2);
1555 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1556 Return zero if computing the constants is not possible. */
1558 tree
1559 const_unop (enum tree_code code, tree type, tree arg0)
1561 switch (code)
1563 CASE_CONVERT:
1564 case FLOAT_EXPR:
1565 case FIX_TRUNC_EXPR:
1566 case FIXED_CONVERT_EXPR:
1567 return fold_convert_const (code, type, arg0);
1569 case ADDR_SPACE_CONVERT_EXPR:
1570 if (integer_zerop (arg0))
1571 return fold_convert_const (code, type, arg0);
1572 break;
1574 case VIEW_CONVERT_EXPR:
1575 return fold_view_convert_expr (type, arg0);
1577 case NEGATE_EXPR:
1579 /* Can't call fold_negate_const directly here as that doesn't
1580 handle all cases and we might not be able to negate some
1581 constants. */
1582 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1583 if (tem && CONSTANT_CLASS_P (tem))
1584 return tem;
1585 break;
1588 case ABS_EXPR:
1589 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1590 return fold_abs_const (arg0, type);
1591 break;
1593 case CONJ_EXPR:
1594 if (TREE_CODE (arg0) == COMPLEX_CST)
1596 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1597 TREE_TYPE (type));
1598 return build_complex (type, TREE_REALPART (arg0), ipart);
1600 break;
1602 case BIT_NOT_EXPR:
1603 if (TREE_CODE (arg0) == INTEGER_CST)
1604 return fold_not_const (arg0, type);
1605 /* Perform BIT_NOT_EXPR on each element individually. */
1606 else if (TREE_CODE (arg0) == VECTOR_CST)
1608 tree *elements;
1609 tree elem;
1610 unsigned count = VECTOR_CST_NELTS (arg0), i;
1612 elements = XALLOCAVEC (tree, count);
1613 for (i = 0; i < count; i++)
1615 elem = VECTOR_CST_ELT (arg0, i);
1616 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1617 if (elem == NULL_TREE)
1618 break;
1619 elements[i] = elem;
1621 if (i == count)
1622 return build_vector (type, elements);
1624 break;
1626 case TRUTH_NOT_EXPR:
1627 if (TREE_CODE (arg0) == INTEGER_CST)
1628 return constant_boolean_node (integer_zerop (arg0), type);
1629 break;
1631 case REALPART_EXPR:
1632 if (TREE_CODE (arg0) == COMPLEX_CST)
1633 return fold_convert (type, TREE_REALPART (arg0));
1634 break;
1636 case IMAGPART_EXPR:
1637 if (TREE_CODE (arg0) == COMPLEX_CST)
1638 return fold_convert (type, TREE_IMAGPART (arg0));
1639 break;
1641 case VEC_UNPACK_LO_EXPR:
1642 case VEC_UNPACK_HI_EXPR:
1643 case VEC_UNPACK_FLOAT_LO_EXPR:
1644 case VEC_UNPACK_FLOAT_HI_EXPR:
1646 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1647 tree *elts;
1648 enum tree_code subcode;
1650 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1651 if (TREE_CODE (arg0) != VECTOR_CST)
1652 return NULL_TREE;
1654 elts = XALLOCAVEC (tree, nelts * 2);
1655 if (!vec_cst_ctor_to_array (arg0, elts))
1656 return NULL_TREE;
1658 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1659 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1660 elts += nelts;
1662 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1663 subcode = NOP_EXPR;
1664 else
1665 subcode = FLOAT_EXPR;
1667 for (i = 0; i < nelts; i++)
1669 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1670 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1671 return NULL_TREE;
1674 return build_vector (type, elts);
1677 case REDUC_MIN_EXPR:
1678 case REDUC_MAX_EXPR:
1679 case REDUC_PLUS_EXPR:
1681 unsigned int nelts, i;
1682 tree *elts;
1683 enum tree_code subcode;
1685 if (TREE_CODE (arg0) != VECTOR_CST)
1686 return NULL_TREE;
1687 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1689 elts = XALLOCAVEC (tree, nelts);
1690 if (!vec_cst_ctor_to_array (arg0, elts))
1691 return NULL_TREE;
1693 switch (code)
1695 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1696 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1697 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1698 default: gcc_unreachable ();
1701 for (i = 1; i < nelts; i++)
1703 elts[0] = const_binop (subcode, elts[0], elts[i]);
1704 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1705 return NULL_TREE;
1708 return elts[0];
1711 default:
1712 break;
1715 return NULL_TREE;
1718 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1719 indicates which particular sizetype to create. */
1721 tree
1722 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1724 return build_int_cst (sizetype_tab[(int) kind], number);
1727 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1728 is a tree code. The type of the result is taken from the operands.
1729 Both must be equivalent integer types, ala int_binop_types_match_p.
1730 If the operands are constant, so is the result. */
1732 tree
1733 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1735 tree type = TREE_TYPE (arg0);
1737 if (arg0 == error_mark_node || arg1 == error_mark_node)
1738 return error_mark_node;
1740 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1741 TREE_TYPE (arg1)));
1743 /* Handle the special case of two integer constants faster. */
1744 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1746 /* And some specific cases even faster than that. */
1747 if (code == PLUS_EXPR)
1749 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1750 return arg1;
1751 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1752 return arg0;
1754 else if (code == MINUS_EXPR)
1756 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1757 return arg0;
1759 else if (code == MULT_EXPR)
1761 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1762 return arg1;
1765 /* Handle general case of two integer constants. For sizetype
1766 constant calculations we always want to know about overflow,
1767 even in the unsigned case. */
1768 return int_const_binop_1 (code, arg0, arg1, -1);
1771 return fold_build2_loc (loc, code, type, arg0, arg1);
1774 /* Given two values, either both of sizetype or both of bitsizetype,
1775 compute the difference between the two values. Return the value
1776 in signed type corresponding to the type of the operands. */
1778 tree
1779 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1781 tree type = TREE_TYPE (arg0);
1782 tree ctype;
1784 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1785 TREE_TYPE (arg1)));
1787 /* If the type is already signed, just do the simple thing. */
1788 if (!TYPE_UNSIGNED (type))
1789 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1791 if (type == sizetype)
1792 ctype = ssizetype;
1793 else if (type == bitsizetype)
1794 ctype = sbitsizetype;
1795 else
1796 ctype = signed_type_for (type);
1798 /* If either operand is not a constant, do the conversions to the signed
1799 type and subtract. The hardware will do the right thing with any
1800 overflow in the subtraction. */
1801 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1802 return size_binop_loc (loc, MINUS_EXPR,
1803 fold_convert_loc (loc, ctype, arg0),
1804 fold_convert_loc (loc, ctype, arg1));
1806 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1807 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1808 overflow) and negate (which can't either). Special-case a result
1809 of zero while we're here. */
1810 if (tree_int_cst_equal (arg0, arg1))
1811 return build_int_cst (ctype, 0);
1812 else if (tree_int_cst_lt (arg1, arg0))
1813 return fold_convert_loc (loc, ctype,
1814 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1815 else
1816 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1817 fold_convert_loc (loc, ctype,
1818 size_binop_loc (loc,
1819 MINUS_EXPR,
1820 arg1, arg0)));
1823 /* A subroutine of fold_convert_const handling conversions of an
1824 INTEGER_CST to another integer type. */
1826 static tree
1827 fold_convert_const_int_from_int (tree type, const_tree arg1)
1829 /* Given an integer constant, make new constant with new type,
1830 appropriately sign-extended or truncated. Use widest_int
1831 so that any extension is done according ARG1's type. */
1832 return force_fit_type (type, wi::to_widest (arg1),
1833 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1834 TREE_OVERFLOW (arg1));
1837 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1838 to an integer type. */
1840 static tree
1841 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1843 bool overflow = false;
1844 tree t;
1846 /* The following code implements the floating point to integer
1847 conversion rules required by the Java Language Specification,
1848 that IEEE NaNs are mapped to zero and values that overflow
1849 the target precision saturate, i.e. values greater than
1850 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1851 are mapped to INT_MIN. These semantics are allowed by the
1852 C and C++ standards that simply state that the behavior of
1853 FP-to-integer conversion is unspecified upon overflow. */
1855 wide_int val;
1856 REAL_VALUE_TYPE r;
1857 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1859 switch (code)
1861 case FIX_TRUNC_EXPR:
1862 real_trunc (&r, VOIDmode, &x);
1863 break;
1865 default:
1866 gcc_unreachable ();
1869 /* If R is NaN, return zero and show we have an overflow. */
1870 if (REAL_VALUE_ISNAN (r))
1872 overflow = true;
1873 val = wi::zero (TYPE_PRECISION (type));
1876 /* See if R is less than the lower bound or greater than the
1877 upper bound. */
1879 if (! overflow)
1881 tree lt = TYPE_MIN_VALUE (type);
1882 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1883 if (REAL_VALUES_LESS (r, l))
1885 overflow = true;
1886 val = lt;
1890 if (! overflow)
1892 tree ut = TYPE_MAX_VALUE (type);
1893 if (ut)
1895 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1896 if (REAL_VALUES_LESS (u, r))
1898 overflow = true;
1899 val = ut;
1904 if (! overflow)
1905 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1907 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1908 return t;
1911 /* A subroutine of fold_convert_const handling conversions of a
1912 FIXED_CST to an integer type. */
1914 static tree
1915 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1917 tree t;
1918 double_int temp, temp_trunc;
1919 unsigned int mode;
1921 /* Right shift FIXED_CST to temp by fbit. */
1922 temp = TREE_FIXED_CST (arg1).data;
1923 mode = TREE_FIXED_CST (arg1).mode;
1924 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1926 temp = temp.rshift (GET_MODE_FBIT (mode),
1927 HOST_BITS_PER_DOUBLE_INT,
1928 SIGNED_FIXED_POINT_MODE_P (mode));
1930 /* Left shift temp to temp_trunc by fbit. */
1931 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1932 HOST_BITS_PER_DOUBLE_INT,
1933 SIGNED_FIXED_POINT_MODE_P (mode));
1935 else
1937 temp = double_int_zero;
1938 temp_trunc = double_int_zero;
1941 /* If FIXED_CST is negative, we need to round the value toward 0.
1942 By checking if the fractional bits are not zero to add 1 to temp. */
1943 if (SIGNED_FIXED_POINT_MODE_P (mode)
1944 && temp_trunc.is_negative ()
1945 && TREE_FIXED_CST (arg1).data != temp_trunc)
1946 temp += double_int_one;
1948 /* Given a fixed-point constant, make new constant with new type,
1949 appropriately sign-extended or truncated. */
1950 t = force_fit_type (type, temp, -1,
1951 (temp.is_negative ()
1952 && (TYPE_UNSIGNED (type)
1953 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1954 | TREE_OVERFLOW (arg1));
1956 return t;
1959 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1960 to another floating point type. */
1962 static tree
1963 fold_convert_const_real_from_real (tree type, const_tree arg1)
1965 REAL_VALUE_TYPE value;
1966 tree t;
1968 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1969 t = build_real (type, value);
1971 /* If converting an infinity or NAN to a representation that doesn't
1972 have one, set the overflow bit so that we can produce some kind of
1973 error message at the appropriate point if necessary. It's not the
1974 most user-friendly message, but it's better than nothing. */
1975 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1976 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1977 TREE_OVERFLOW (t) = 1;
1978 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1979 && !MODE_HAS_NANS (TYPE_MODE (type)))
1980 TREE_OVERFLOW (t) = 1;
1981 /* Regular overflow, conversion produced an infinity in a mode that
1982 can't represent them. */
1983 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1984 && REAL_VALUE_ISINF (value)
1985 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1986 TREE_OVERFLOW (t) = 1;
1987 else
1988 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1989 return t;
1992 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1993 to a floating point type. */
1995 static tree
1996 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1998 REAL_VALUE_TYPE value;
1999 tree t;
2001 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2002 t = build_real (type, value);
2004 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2005 return t;
2008 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2009 to another fixed-point type. */
2011 static tree
2012 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2014 FIXED_VALUE_TYPE value;
2015 tree t;
2016 bool overflow_p;
2018 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2019 TYPE_SATURATING (type));
2020 t = build_fixed (type, value);
2022 /* Propagate overflow flags. */
2023 if (overflow_p | TREE_OVERFLOW (arg1))
2024 TREE_OVERFLOW (t) = 1;
2025 return t;
2028 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2029 to a fixed-point type. */
2031 static tree
2032 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2034 FIXED_VALUE_TYPE value;
2035 tree t;
2036 bool overflow_p;
2037 double_int di;
2039 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2041 di.low = TREE_INT_CST_ELT (arg1, 0);
2042 if (TREE_INT_CST_NUNITS (arg1) == 1)
2043 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2044 else
2045 di.high = TREE_INT_CST_ELT (arg1, 1);
2047 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2048 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2049 TYPE_SATURATING (type));
2050 t = build_fixed (type, value);
2052 /* Propagate overflow flags. */
2053 if (overflow_p | TREE_OVERFLOW (arg1))
2054 TREE_OVERFLOW (t) = 1;
2055 return t;
2058 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2059 to a fixed-point type. */
2061 static tree
2062 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2064 FIXED_VALUE_TYPE value;
2065 tree t;
2066 bool overflow_p;
2068 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2069 &TREE_REAL_CST (arg1),
2070 TYPE_SATURATING (type));
2071 t = build_fixed (type, value);
2073 /* Propagate overflow flags. */
2074 if (overflow_p | TREE_OVERFLOW (arg1))
2075 TREE_OVERFLOW (t) = 1;
2076 return t;
2079 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2080 type TYPE. If no simplification can be done return NULL_TREE. */
2082 static tree
2083 fold_convert_const (enum tree_code code, tree type, tree arg1)
2085 if (TREE_TYPE (arg1) == type)
2086 return arg1;
2088 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2089 || TREE_CODE (type) == OFFSET_TYPE)
2091 if (TREE_CODE (arg1) == INTEGER_CST)
2092 return fold_convert_const_int_from_int (type, arg1);
2093 else if (TREE_CODE (arg1) == REAL_CST)
2094 return fold_convert_const_int_from_real (code, type, arg1);
2095 else if (TREE_CODE (arg1) == FIXED_CST)
2096 return fold_convert_const_int_from_fixed (type, arg1);
2098 else if (TREE_CODE (type) == REAL_TYPE)
2100 if (TREE_CODE (arg1) == INTEGER_CST)
2101 return build_real_from_int_cst (type, arg1);
2102 else if (TREE_CODE (arg1) == REAL_CST)
2103 return fold_convert_const_real_from_real (type, arg1);
2104 else if (TREE_CODE (arg1) == FIXED_CST)
2105 return fold_convert_const_real_from_fixed (type, arg1);
2107 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2109 if (TREE_CODE (arg1) == FIXED_CST)
2110 return fold_convert_const_fixed_from_fixed (type, arg1);
2111 else if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_fixed_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_fixed_from_real (type, arg1);
2116 return NULL_TREE;
2119 /* Construct a vector of zero elements of vector type TYPE. */
2121 static tree
2122 build_zero_vector (tree type)
2124 tree t;
2126 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2127 return build_vector_from_val (type, t);
2130 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2132 bool
2133 fold_convertible_p (const_tree type, const_tree arg)
2135 tree orig = TREE_TYPE (arg);
2137 if (type == orig)
2138 return true;
2140 if (TREE_CODE (arg) == ERROR_MARK
2141 || TREE_CODE (type) == ERROR_MARK
2142 || TREE_CODE (orig) == ERROR_MARK)
2143 return false;
2145 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2146 return true;
2148 switch (TREE_CODE (type))
2150 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2151 case POINTER_TYPE: case REFERENCE_TYPE:
2152 case OFFSET_TYPE:
2153 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2154 || TREE_CODE (orig) == OFFSET_TYPE)
2155 return true;
2156 return (TREE_CODE (orig) == VECTOR_TYPE
2157 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2159 case REAL_TYPE:
2160 case FIXED_POINT_TYPE:
2161 case COMPLEX_TYPE:
2162 case VECTOR_TYPE:
2163 case VOID_TYPE:
2164 return TREE_CODE (type) == TREE_CODE (orig);
2166 default:
2167 return false;
2171 /* Convert expression ARG to type TYPE. Used by the middle-end for
2172 simple conversions in preference to calling the front-end's convert. */
2174 tree
2175 fold_convert_loc (location_t loc, tree type, tree arg)
2177 tree orig = TREE_TYPE (arg);
2178 tree tem;
2180 if (type == orig)
2181 return arg;
2183 if (TREE_CODE (arg) == ERROR_MARK
2184 || TREE_CODE (type) == ERROR_MARK
2185 || TREE_CODE (orig) == ERROR_MARK)
2186 return error_mark_node;
2188 switch (TREE_CODE (type))
2190 case POINTER_TYPE:
2191 case REFERENCE_TYPE:
2192 /* Handle conversions between pointers to different address spaces. */
2193 if (POINTER_TYPE_P (orig)
2194 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2195 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2196 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2197 /* fall through */
2199 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2200 case OFFSET_TYPE:
2201 if (TREE_CODE (arg) == INTEGER_CST)
2203 tem = fold_convert_const (NOP_EXPR, type, arg);
2204 if (tem != NULL_TREE)
2205 return tem;
2207 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2208 || TREE_CODE (orig) == OFFSET_TYPE)
2209 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2210 if (TREE_CODE (orig) == COMPLEX_TYPE)
2211 return fold_convert_loc (loc, type,
2212 fold_build1_loc (loc, REALPART_EXPR,
2213 TREE_TYPE (orig), arg));
2214 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2215 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2216 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2218 case REAL_TYPE:
2219 if (TREE_CODE (arg) == INTEGER_CST)
2221 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2222 if (tem != NULL_TREE)
2223 return tem;
2225 else if (TREE_CODE (arg) == REAL_CST)
2227 tem = fold_convert_const (NOP_EXPR, type, arg);
2228 if (tem != NULL_TREE)
2229 return tem;
2231 else if (TREE_CODE (arg) == FIXED_CST)
2233 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2234 if (tem != NULL_TREE)
2235 return tem;
2238 switch (TREE_CODE (orig))
2240 case INTEGER_TYPE:
2241 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2242 case POINTER_TYPE: case REFERENCE_TYPE:
2243 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2245 case REAL_TYPE:
2246 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2248 case FIXED_POINT_TYPE:
2249 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2251 case COMPLEX_TYPE:
2252 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2253 return fold_convert_loc (loc, type, tem);
2255 default:
2256 gcc_unreachable ();
2259 case FIXED_POINT_TYPE:
2260 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2261 || TREE_CODE (arg) == REAL_CST)
2263 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2264 if (tem != NULL_TREE)
2265 goto fold_convert_exit;
2268 switch (TREE_CODE (orig))
2270 case FIXED_POINT_TYPE:
2271 case INTEGER_TYPE:
2272 case ENUMERAL_TYPE:
2273 case BOOLEAN_TYPE:
2274 case REAL_TYPE:
2275 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2277 case COMPLEX_TYPE:
2278 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2279 return fold_convert_loc (loc, type, tem);
2281 default:
2282 gcc_unreachable ();
2285 case COMPLEX_TYPE:
2286 switch (TREE_CODE (orig))
2288 case INTEGER_TYPE:
2289 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2290 case POINTER_TYPE: case REFERENCE_TYPE:
2291 case REAL_TYPE:
2292 case FIXED_POINT_TYPE:
2293 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2294 fold_convert_loc (loc, TREE_TYPE (type), arg),
2295 fold_convert_loc (loc, TREE_TYPE (type),
2296 integer_zero_node));
2297 case COMPLEX_TYPE:
2299 tree rpart, ipart;
2301 if (TREE_CODE (arg) == COMPLEX_EXPR)
2303 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2304 TREE_OPERAND (arg, 0));
2305 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2306 TREE_OPERAND (arg, 1));
2307 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2310 arg = save_expr (arg);
2311 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2312 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2313 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2314 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2315 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2318 default:
2319 gcc_unreachable ();
2322 case VECTOR_TYPE:
2323 if (integer_zerop (arg))
2324 return build_zero_vector (type);
2325 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2326 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2327 || TREE_CODE (orig) == VECTOR_TYPE);
2328 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2330 case VOID_TYPE:
2331 tem = fold_ignored_result (arg);
2332 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2334 default:
2335 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2336 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2337 gcc_unreachable ();
2339 fold_convert_exit:
2340 protected_set_expr_location_unshare (tem, loc);
2341 return tem;
2344 /* Return false if expr can be assumed not to be an lvalue, true
2345 otherwise. */
2347 static bool
2348 maybe_lvalue_p (const_tree x)
2350 /* We only need to wrap lvalue tree codes. */
2351 switch (TREE_CODE (x))
2353 case VAR_DECL:
2354 case PARM_DECL:
2355 case RESULT_DECL:
2356 case LABEL_DECL:
2357 case FUNCTION_DECL:
2358 case SSA_NAME:
2360 case COMPONENT_REF:
2361 case MEM_REF:
2362 case INDIRECT_REF:
2363 case ARRAY_REF:
2364 case ARRAY_RANGE_REF:
2365 case BIT_FIELD_REF:
2366 case OBJ_TYPE_REF:
2368 case REALPART_EXPR:
2369 case IMAGPART_EXPR:
2370 case PREINCREMENT_EXPR:
2371 case PREDECREMENT_EXPR:
2372 case SAVE_EXPR:
2373 case TRY_CATCH_EXPR:
2374 case WITH_CLEANUP_EXPR:
2375 case COMPOUND_EXPR:
2376 case MODIFY_EXPR:
2377 case TARGET_EXPR:
2378 case COND_EXPR:
2379 case BIND_EXPR:
2380 break;
2382 default:
2383 /* Assume the worst for front-end tree codes. */
2384 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2385 break;
2386 return false;
2389 return true;
2392 /* Return an expr equal to X but certainly not valid as an lvalue. */
2394 tree
2395 non_lvalue_loc (location_t loc, tree x)
2397 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2398 us. */
2399 if (in_gimple_form)
2400 return x;
2402 if (! maybe_lvalue_p (x))
2403 return x;
2404 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2407 /* When pedantic, return an expr equal to X but certainly not valid as a
2408 pedantic lvalue. Otherwise, return X. */
2410 static tree
2411 pedantic_non_lvalue_loc (location_t loc, tree x)
2413 return protected_set_expr_location_unshare (x, loc);
2416 /* Given a tree comparison code, return the code that is the logical inverse.
2417 It is generally not safe to do this for floating-point comparisons, except
2418 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2419 ERROR_MARK in this case. */
2421 enum tree_code
2422 invert_tree_comparison (enum tree_code code, bool honor_nans)
2424 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2425 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2426 return ERROR_MARK;
2428 switch (code)
2430 case EQ_EXPR:
2431 return NE_EXPR;
2432 case NE_EXPR:
2433 return EQ_EXPR;
2434 case GT_EXPR:
2435 return honor_nans ? UNLE_EXPR : LE_EXPR;
2436 case GE_EXPR:
2437 return honor_nans ? UNLT_EXPR : LT_EXPR;
2438 case LT_EXPR:
2439 return honor_nans ? UNGE_EXPR : GE_EXPR;
2440 case LE_EXPR:
2441 return honor_nans ? UNGT_EXPR : GT_EXPR;
2442 case LTGT_EXPR:
2443 return UNEQ_EXPR;
2444 case UNEQ_EXPR:
2445 return LTGT_EXPR;
2446 case UNGT_EXPR:
2447 return LE_EXPR;
2448 case UNGE_EXPR:
2449 return LT_EXPR;
2450 case UNLT_EXPR:
2451 return GE_EXPR;
2452 case UNLE_EXPR:
2453 return GT_EXPR;
2454 case ORDERED_EXPR:
2455 return UNORDERED_EXPR;
2456 case UNORDERED_EXPR:
2457 return ORDERED_EXPR;
2458 default:
2459 gcc_unreachable ();
2463 /* Similar, but return the comparison that results if the operands are
2464 swapped. This is safe for floating-point. */
2466 enum tree_code
2467 swap_tree_comparison (enum tree_code code)
2469 switch (code)
2471 case EQ_EXPR:
2472 case NE_EXPR:
2473 case ORDERED_EXPR:
2474 case UNORDERED_EXPR:
2475 case LTGT_EXPR:
2476 case UNEQ_EXPR:
2477 return code;
2478 case GT_EXPR:
2479 return LT_EXPR;
2480 case GE_EXPR:
2481 return LE_EXPR;
2482 case LT_EXPR:
2483 return GT_EXPR;
2484 case LE_EXPR:
2485 return GE_EXPR;
2486 case UNGT_EXPR:
2487 return UNLT_EXPR;
2488 case UNGE_EXPR:
2489 return UNLE_EXPR;
2490 case UNLT_EXPR:
2491 return UNGT_EXPR;
2492 case UNLE_EXPR:
2493 return UNGE_EXPR;
2494 default:
2495 gcc_unreachable ();
2500 /* Convert a comparison tree code from an enum tree_code representation
2501 into a compcode bit-based encoding. This function is the inverse of
2502 compcode_to_comparison. */
2504 static enum comparison_code
2505 comparison_to_compcode (enum tree_code code)
2507 switch (code)
2509 case LT_EXPR:
2510 return COMPCODE_LT;
2511 case EQ_EXPR:
2512 return COMPCODE_EQ;
2513 case LE_EXPR:
2514 return COMPCODE_LE;
2515 case GT_EXPR:
2516 return COMPCODE_GT;
2517 case NE_EXPR:
2518 return COMPCODE_NE;
2519 case GE_EXPR:
2520 return COMPCODE_GE;
2521 case ORDERED_EXPR:
2522 return COMPCODE_ORD;
2523 case UNORDERED_EXPR:
2524 return COMPCODE_UNORD;
2525 case UNLT_EXPR:
2526 return COMPCODE_UNLT;
2527 case UNEQ_EXPR:
2528 return COMPCODE_UNEQ;
2529 case UNLE_EXPR:
2530 return COMPCODE_UNLE;
2531 case UNGT_EXPR:
2532 return COMPCODE_UNGT;
2533 case LTGT_EXPR:
2534 return COMPCODE_LTGT;
2535 case UNGE_EXPR:
2536 return COMPCODE_UNGE;
2537 default:
2538 gcc_unreachable ();
2542 /* Convert a compcode bit-based encoding of a comparison operator back
2543 to GCC's enum tree_code representation. This function is the
2544 inverse of comparison_to_compcode. */
2546 static enum tree_code
2547 compcode_to_comparison (enum comparison_code code)
2549 switch (code)
2551 case COMPCODE_LT:
2552 return LT_EXPR;
2553 case COMPCODE_EQ:
2554 return EQ_EXPR;
2555 case COMPCODE_LE:
2556 return LE_EXPR;
2557 case COMPCODE_GT:
2558 return GT_EXPR;
2559 case COMPCODE_NE:
2560 return NE_EXPR;
2561 case COMPCODE_GE:
2562 return GE_EXPR;
2563 case COMPCODE_ORD:
2564 return ORDERED_EXPR;
2565 case COMPCODE_UNORD:
2566 return UNORDERED_EXPR;
2567 case COMPCODE_UNLT:
2568 return UNLT_EXPR;
2569 case COMPCODE_UNEQ:
2570 return UNEQ_EXPR;
2571 case COMPCODE_UNLE:
2572 return UNLE_EXPR;
2573 case COMPCODE_UNGT:
2574 return UNGT_EXPR;
2575 case COMPCODE_LTGT:
2576 return LTGT_EXPR;
2577 case COMPCODE_UNGE:
2578 return UNGE_EXPR;
2579 default:
2580 gcc_unreachable ();
2584 /* Return a tree for the comparison which is the combination of
2585 doing the AND or OR (depending on CODE) of the two operations LCODE
2586 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2587 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2588 if this makes the transformation invalid. */
2590 tree
2591 combine_comparisons (location_t loc,
2592 enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2596 bool honor_nans = HONOR_NANS (ll_arg);
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2599 int compcode;
2601 switch (code)
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2605 break;
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2609 break;
2611 default:
2612 return NULL_TREE;
2615 if (!honor_nans)
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2625 else if (flag_trapping_math)
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2647 rtrap = false;
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2651 if (rtrap && !ltrap
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2653 return NULL_TREE;
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2657 return NULL_TREE;
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2664 else
2666 enum tree_code tcode;
2668 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2669 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2673 /* Return nonzero if two operands (typically of the same tree node)
2674 are necessarily equal. If either argument has side-effects this
2675 function returns zero. FLAGS modifies behavior as follows:
2677 If OEP_ONLY_CONST is set, only return nonzero for constants.
2678 This function tests whether the operands are indistinguishable;
2679 it does not test whether they are equal using C's == operation.
2680 The distinction is important for IEEE floating point, because
2681 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2682 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2684 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2685 even though it may hold multiple values during a function.
2686 This is because a GCC tree node guarantees that nothing else is
2687 executed between the evaluation of its "operands" (which may often
2688 be evaluated in arbitrary order). Hence if the operands themselves
2689 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2690 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2691 unset means assuming isochronic (or instantaneous) tree equivalence.
2692 Unless comparing arbitrary expression trees, such as from different
2693 statements, this flag can usually be left unset.
2695 If OEP_PURE_SAME is set, then pure functions with identical arguments
2696 are considered the same. It is used when the caller has other ways
2697 to ensure that global memory is unchanged in between. */
2700 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2702 /* If either is ERROR_MARK, they aren't equal. */
2703 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2704 || TREE_TYPE (arg0) == error_mark_node
2705 || TREE_TYPE (arg1) == error_mark_node)
2706 return 0;
2708 /* Similar, if either does not have a type (like a released SSA name),
2709 they aren't equal. */
2710 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2711 return 0;
2713 /* Check equality of integer constants before bailing out due to
2714 precision differences. */
2715 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2716 return tree_int_cst_equal (arg0, arg1);
2718 /* If both types don't have the same signedness, then we can't consider
2719 them equal. We must check this before the STRIP_NOPS calls
2720 because they may change the signedness of the arguments. As pointers
2721 strictly don't have a signedness, require either two pointers or
2722 two non-pointers as well. */
2723 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2724 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2725 return 0;
2727 /* We cannot consider pointers to different address space equal. */
2728 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2729 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2730 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2731 return 0;
2733 /* If both types don't have the same precision, then it is not safe
2734 to strip NOPs. */
2735 if (element_precision (TREE_TYPE (arg0))
2736 != element_precision (TREE_TYPE (arg1)))
2737 return 0;
2739 STRIP_NOPS (arg0);
2740 STRIP_NOPS (arg1);
2742 /* In case both args are comparisons but with different comparison
2743 code, try to swap the comparison operands of one arg to produce
2744 a match and compare that variant. */
2745 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2746 && COMPARISON_CLASS_P (arg0)
2747 && COMPARISON_CLASS_P (arg1))
2749 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2751 if (TREE_CODE (arg0) == swap_code)
2752 return operand_equal_p (TREE_OPERAND (arg0, 0),
2753 TREE_OPERAND (arg1, 1), flags)
2754 && operand_equal_p (TREE_OPERAND (arg0, 1),
2755 TREE_OPERAND (arg1, 0), flags);
2758 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2759 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2760 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2761 return 0;
2763 /* This is needed for conversions and for COMPONENT_REF.
2764 Might as well play it safe and always test this. */
2765 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2766 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2767 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2768 return 0;
2770 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2771 We don't care about side effects in that case because the SAVE_EXPR
2772 takes care of that for us. In all other cases, two expressions are
2773 equal if they have no side effects. If we have two identical
2774 expressions with side effects that should be treated the same due
2775 to the only side effects being identical SAVE_EXPR's, that will
2776 be detected in the recursive calls below.
2777 If we are taking an invariant address of two identical objects
2778 they are necessarily equal as well. */
2779 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2780 && (TREE_CODE (arg0) == SAVE_EXPR
2781 || (flags & OEP_CONSTANT_ADDRESS_OF)
2782 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2783 return 1;
2785 /* Next handle constant cases, those for which we can return 1 even
2786 if ONLY_CONST is set. */
2787 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2788 switch (TREE_CODE (arg0))
2790 case INTEGER_CST:
2791 return tree_int_cst_equal (arg0, arg1);
2793 case FIXED_CST:
2794 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2795 TREE_FIXED_CST (arg1));
2797 case REAL_CST:
2798 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2799 TREE_REAL_CST (arg1)))
2800 return 1;
2803 if (!HONOR_SIGNED_ZEROS (arg0))
2805 /* If we do not distinguish between signed and unsigned zero,
2806 consider them equal. */
2807 if (real_zerop (arg0) && real_zerop (arg1))
2808 return 1;
2810 return 0;
2812 case VECTOR_CST:
2814 unsigned i;
2816 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2817 return 0;
2819 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2821 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2822 VECTOR_CST_ELT (arg1, i), flags))
2823 return 0;
2825 return 1;
2828 case COMPLEX_CST:
2829 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2830 flags)
2831 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2832 flags));
2834 case STRING_CST:
2835 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2836 && ! memcmp (TREE_STRING_POINTER (arg0),
2837 TREE_STRING_POINTER (arg1),
2838 TREE_STRING_LENGTH (arg0)));
2840 case ADDR_EXPR:
2841 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2842 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2843 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2844 default:
2845 break;
2848 if (flags & OEP_ONLY_CONST)
2849 return 0;
2851 /* Define macros to test an operand from arg0 and arg1 for equality and a
2852 variant that allows null and views null as being different from any
2853 non-null value. In the latter case, if either is null, the both
2854 must be; otherwise, do the normal comparison. */
2855 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2856 TREE_OPERAND (arg1, N), flags)
2858 #define OP_SAME_WITH_NULL(N) \
2859 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2860 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2862 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2864 case tcc_unary:
2865 /* Two conversions are equal only if signedness and modes match. */
2866 switch (TREE_CODE (arg0))
2868 CASE_CONVERT:
2869 case FIX_TRUNC_EXPR:
2870 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2871 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2872 return 0;
2873 break;
2874 default:
2875 break;
2878 return OP_SAME (0);
2881 case tcc_comparison:
2882 case tcc_binary:
2883 if (OP_SAME (0) && OP_SAME (1))
2884 return 1;
2886 /* For commutative ops, allow the other order. */
2887 return (commutative_tree_code (TREE_CODE (arg0))
2888 && operand_equal_p (TREE_OPERAND (arg0, 0),
2889 TREE_OPERAND (arg1, 1), flags)
2890 && operand_equal_p (TREE_OPERAND (arg0, 1),
2891 TREE_OPERAND (arg1, 0), flags));
2893 case tcc_reference:
2894 /* If either of the pointer (or reference) expressions we are
2895 dereferencing contain a side effect, these cannot be equal,
2896 but their addresses can be. */
2897 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2898 && (TREE_SIDE_EFFECTS (arg0)
2899 || TREE_SIDE_EFFECTS (arg1)))
2900 return 0;
2902 switch (TREE_CODE (arg0))
2904 case INDIRECT_REF:
2905 if (!(flags & OEP_ADDRESS_OF)
2906 && (TYPE_ALIGN (TREE_TYPE (arg0))
2907 != TYPE_ALIGN (TREE_TYPE (arg1))))
2908 return 0;
2909 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2910 return OP_SAME (0);
2912 case REALPART_EXPR:
2913 case IMAGPART_EXPR:
2914 return OP_SAME (0);
2916 case TARGET_MEM_REF:
2917 case MEM_REF:
2918 /* Require equal access sizes, and similar pointer types.
2919 We can have incomplete types for array references of
2920 variable-sized arrays from the Fortran frontend
2921 though. Also verify the types are compatible. */
2922 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2923 || (TYPE_SIZE (TREE_TYPE (arg0))
2924 && TYPE_SIZE (TREE_TYPE (arg1))
2925 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2926 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2927 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2928 && ((flags & OEP_ADDRESS_OF)
2929 || (alias_ptr_types_compatible_p
2930 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2931 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2932 && (MR_DEPENDENCE_CLIQUE (arg0)
2933 == MR_DEPENDENCE_CLIQUE (arg1))
2934 && (MR_DEPENDENCE_BASE (arg0)
2935 == MR_DEPENDENCE_BASE (arg1))
2936 && (TYPE_ALIGN (TREE_TYPE (arg0))
2937 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2938 return 0;
2939 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2940 return (OP_SAME (0) && OP_SAME (1)
2941 /* TARGET_MEM_REF require equal extra operands. */
2942 && (TREE_CODE (arg0) != TARGET_MEM_REF
2943 || (OP_SAME_WITH_NULL (2)
2944 && OP_SAME_WITH_NULL (3)
2945 && OP_SAME_WITH_NULL (4))));
2947 case ARRAY_REF:
2948 case ARRAY_RANGE_REF:
2949 /* Operands 2 and 3 may be null.
2950 Compare the array index by value if it is constant first as we
2951 may have different types but same value here. */
2952 if (!OP_SAME (0))
2953 return 0;
2954 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2955 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2956 TREE_OPERAND (arg1, 1))
2957 || OP_SAME (1))
2958 && OP_SAME_WITH_NULL (2)
2959 && OP_SAME_WITH_NULL (3));
2961 case COMPONENT_REF:
2962 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2963 may be NULL when we're called to compare MEM_EXPRs. */
2964 if (!OP_SAME_WITH_NULL (0)
2965 || !OP_SAME (1))
2966 return 0;
2967 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2968 return OP_SAME_WITH_NULL (2);
2970 case BIT_FIELD_REF:
2971 if (!OP_SAME (0))
2972 return 0;
2973 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2974 return OP_SAME (1) && OP_SAME (2);
2976 default:
2977 return 0;
2980 case tcc_expression:
2981 switch (TREE_CODE (arg0))
2983 case ADDR_EXPR:
2984 return operand_equal_p (TREE_OPERAND (arg0, 0),
2985 TREE_OPERAND (arg1, 0),
2986 flags | OEP_ADDRESS_OF);
2988 case TRUTH_NOT_EXPR:
2989 return OP_SAME (0);
2991 case TRUTH_ANDIF_EXPR:
2992 case TRUTH_ORIF_EXPR:
2993 return OP_SAME (0) && OP_SAME (1);
2995 case FMA_EXPR:
2996 case WIDEN_MULT_PLUS_EXPR:
2997 case WIDEN_MULT_MINUS_EXPR:
2998 if (!OP_SAME (2))
2999 return 0;
3000 /* The multiplcation operands are commutative. */
3001 /* FALLTHRU */
3003 case TRUTH_AND_EXPR:
3004 case TRUTH_OR_EXPR:
3005 case TRUTH_XOR_EXPR:
3006 if (OP_SAME (0) && OP_SAME (1))
3007 return 1;
3009 /* Otherwise take into account this is a commutative operation. */
3010 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3011 TREE_OPERAND (arg1, 1), flags)
3012 && operand_equal_p (TREE_OPERAND (arg0, 1),
3013 TREE_OPERAND (arg1, 0), flags));
3015 case COND_EXPR:
3016 case VEC_COND_EXPR:
3017 case DOT_PROD_EXPR:
3018 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3020 default:
3021 return 0;
3024 case tcc_vl_exp:
3025 switch (TREE_CODE (arg0))
3027 case CALL_EXPR:
3028 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3029 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3030 /* If not both CALL_EXPRs are either internal or normal function
3031 functions, then they are not equal. */
3032 return 0;
3033 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3035 /* If the CALL_EXPRs call different internal functions, then they
3036 are not equal. */
3037 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3038 return 0;
3040 else
3042 /* If the CALL_EXPRs call different functions, then they are not
3043 equal. */
3044 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3045 flags))
3046 return 0;
3050 unsigned int cef = call_expr_flags (arg0);
3051 if (flags & OEP_PURE_SAME)
3052 cef &= ECF_CONST | ECF_PURE;
3053 else
3054 cef &= ECF_CONST;
3055 if (!cef)
3056 return 0;
3059 /* Now see if all the arguments are the same. */
3061 const_call_expr_arg_iterator iter0, iter1;
3062 const_tree a0, a1;
3063 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3064 a1 = first_const_call_expr_arg (arg1, &iter1);
3065 a0 && a1;
3066 a0 = next_const_call_expr_arg (&iter0),
3067 a1 = next_const_call_expr_arg (&iter1))
3068 if (! operand_equal_p (a0, a1, flags))
3069 return 0;
3071 /* If we get here and both argument lists are exhausted
3072 then the CALL_EXPRs are equal. */
3073 return ! (a0 || a1);
3075 default:
3076 return 0;
3079 case tcc_declaration:
3080 /* Consider __builtin_sqrt equal to sqrt. */
3081 return (TREE_CODE (arg0) == FUNCTION_DECL
3082 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3083 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3084 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3086 default:
3087 return 0;
3090 #undef OP_SAME
3091 #undef OP_SAME_WITH_NULL
3094 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3095 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3097 When in doubt, return 0. */
3099 static int
3100 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3102 int unsignedp1, unsignedpo;
3103 tree primarg0, primarg1, primother;
3104 unsigned int correct_width;
3106 if (operand_equal_p (arg0, arg1, 0))
3107 return 1;
3109 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3110 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3111 return 0;
3113 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3114 and see if the inner values are the same. This removes any
3115 signedness comparison, which doesn't matter here. */
3116 primarg0 = arg0, primarg1 = arg1;
3117 STRIP_NOPS (primarg0);
3118 STRIP_NOPS (primarg1);
3119 if (operand_equal_p (primarg0, primarg1, 0))
3120 return 1;
3122 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3123 actual comparison operand, ARG0.
3125 First throw away any conversions to wider types
3126 already present in the operands. */
3128 primarg1 = get_narrower (arg1, &unsignedp1);
3129 primother = get_narrower (other, &unsignedpo);
3131 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3132 if (unsignedp1 == unsignedpo
3133 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3134 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3136 tree type = TREE_TYPE (arg0);
3138 /* Make sure shorter operand is extended the right way
3139 to match the longer operand. */
3140 primarg1 = fold_convert (signed_or_unsigned_type_for
3141 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3143 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3144 return 1;
3147 return 0;
3150 /* See if ARG is an expression that is either a comparison or is performing
3151 arithmetic on comparisons. The comparisons must only be comparing
3152 two different values, which will be stored in *CVAL1 and *CVAL2; if
3153 they are nonzero it means that some operands have already been found.
3154 No variables may be used anywhere else in the expression except in the
3155 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3156 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3158 If this is true, return 1. Otherwise, return zero. */
3160 static int
3161 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3163 enum tree_code code = TREE_CODE (arg);
3164 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3166 /* We can handle some of the tcc_expression cases here. */
3167 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3168 tclass = tcc_unary;
3169 else if (tclass == tcc_expression
3170 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3171 || code == COMPOUND_EXPR))
3172 tclass = tcc_binary;
3174 else if (tclass == tcc_expression && code == SAVE_EXPR
3175 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3177 /* If we've already found a CVAL1 or CVAL2, this expression is
3178 two complex to handle. */
3179 if (*cval1 || *cval2)
3180 return 0;
3182 tclass = tcc_unary;
3183 *save_p = 1;
3186 switch (tclass)
3188 case tcc_unary:
3189 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3191 case tcc_binary:
3192 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3193 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3194 cval1, cval2, save_p));
3196 case tcc_constant:
3197 return 1;
3199 case tcc_expression:
3200 if (code == COND_EXPR)
3201 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3202 cval1, cval2, save_p)
3203 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3204 cval1, cval2, save_p)
3205 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3206 cval1, cval2, save_p));
3207 return 0;
3209 case tcc_comparison:
3210 /* First see if we can handle the first operand, then the second. For
3211 the second operand, we know *CVAL1 can't be zero. It must be that
3212 one side of the comparison is each of the values; test for the
3213 case where this isn't true by failing if the two operands
3214 are the same. */
3216 if (operand_equal_p (TREE_OPERAND (arg, 0),
3217 TREE_OPERAND (arg, 1), 0))
3218 return 0;
3220 if (*cval1 == 0)
3221 *cval1 = TREE_OPERAND (arg, 0);
3222 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3224 else if (*cval2 == 0)
3225 *cval2 = TREE_OPERAND (arg, 0);
3226 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3228 else
3229 return 0;
3231 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3233 else if (*cval2 == 0)
3234 *cval2 = TREE_OPERAND (arg, 1);
3235 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3237 else
3238 return 0;
3240 return 1;
3242 default:
3243 return 0;
3247 /* ARG is a tree that is known to contain just arithmetic operations and
3248 comparisons. Evaluate the operations in the tree substituting NEW0 for
3249 any occurrence of OLD0 as an operand of a comparison and likewise for
3250 NEW1 and OLD1. */
3252 static tree
3253 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3254 tree old1, tree new1)
3256 tree type = TREE_TYPE (arg);
3257 enum tree_code code = TREE_CODE (arg);
3258 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3260 /* We can handle some of the tcc_expression cases here. */
3261 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3262 tclass = tcc_unary;
3263 else if (tclass == tcc_expression
3264 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3265 tclass = tcc_binary;
3267 switch (tclass)
3269 case tcc_unary:
3270 return fold_build1_loc (loc, code, type,
3271 eval_subst (loc, TREE_OPERAND (arg, 0),
3272 old0, new0, old1, new1));
3274 case tcc_binary:
3275 return fold_build2_loc (loc, code, type,
3276 eval_subst (loc, TREE_OPERAND (arg, 0),
3277 old0, new0, old1, new1),
3278 eval_subst (loc, TREE_OPERAND (arg, 1),
3279 old0, new0, old1, new1));
3281 case tcc_expression:
3282 switch (code)
3284 case SAVE_EXPR:
3285 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3286 old1, new1);
3288 case COMPOUND_EXPR:
3289 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3290 old1, new1);
3292 case COND_EXPR:
3293 return fold_build3_loc (loc, code, type,
3294 eval_subst (loc, TREE_OPERAND (arg, 0),
3295 old0, new0, old1, new1),
3296 eval_subst (loc, TREE_OPERAND (arg, 1),
3297 old0, new0, old1, new1),
3298 eval_subst (loc, TREE_OPERAND (arg, 2),
3299 old0, new0, old1, new1));
3300 default:
3301 break;
3303 /* Fall through - ??? */
3305 case tcc_comparison:
3307 tree arg0 = TREE_OPERAND (arg, 0);
3308 tree arg1 = TREE_OPERAND (arg, 1);
3310 /* We need to check both for exact equality and tree equality. The
3311 former will be true if the operand has a side-effect. In that
3312 case, we know the operand occurred exactly once. */
3314 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3315 arg0 = new0;
3316 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3317 arg0 = new1;
3319 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3320 arg1 = new0;
3321 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3322 arg1 = new1;
3324 return fold_build2_loc (loc, code, type, arg0, arg1);
3327 default:
3328 return arg;
3332 /* Return a tree for the case when the result of an expression is RESULT
3333 converted to TYPE and OMITTED was previously an operand of the expression
3334 but is now not needed (e.g., we folded OMITTED * 0).
3336 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3337 the conversion of RESULT to TYPE. */
3339 tree
3340 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3342 tree t = fold_convert_loc (loc, type, result);
3344 /* If the resulting operand is an empty statement, just return the omitted
3345 statement casted to void. */
3346 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3347 return build1_loc (loc, NOP_EXPR, void_type_node,
3348 fold_ignored_result (omitted));
3350 if (TREE_SIDE_EFFECTS (omitted))
3351 return build2_loc (loc, COMPOUND_EXPR, type,
3352 fold_ignored_result (omitted), t);
3354 return non_lvalue_loc (loc, t);
3357 /* Return a tree for the case when the result of an expression is RESULT
3358 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3359 of the expression but are now not needed.
3361 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3362 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3363 evaluated before OMITTED2. Otherwise, if neither has side effects,
3364 just do the conversion of RESULT to TYPE. */
3366 tree
3367 omit_two_operands_loc (location_t loc, tree type, tree result,
3368 tree omitted1, tree omitted2)
3370 tree t = fold_convert_loc (loc, type, result);
3372 if (TREE_SIDE_EFFECTS (omitted2))
3373 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3374 if (TREE_SIDE_EFFECTS (omitted1))
3375 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3377 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3381 /* Return a simplified tree node for the truth-negation of ARG. This
3382 never alters ARG itself. We assume that ARG is an operation that
3383 returns a truth value (0 or 1).
3385 FIXME: one would think we would fold the result, but it causes
3386 problems with the dominator optimizer. */
3388 static tree
3389 fold_truth_not_expr (location_t loc, tree arg)
3391 tree type = TREE_TYPE (arg);
3392 enum tree_code code = TREE_CODE (arg);
3393 location_t loc1, loc2;
3395 /* If this is a comparison, we can simply invert it, except for
3396 floating-point non-equality comparisons, in which case we just
3397 enclose a TRUTH_NOT_EXPR around what we have. */
3399 if (TREE_CODE_CLASS (code) == tcc_comparison)
3401 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3402 if (FLOAT_TYPE_P (op_type)
3403 && flag_trapping_math
3404 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3405 && code != NE_EXPR && code != EQ_EXPR)
3406 return NULL_TREE;
3408 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3409 if (code == ERROR_MARK)
3410 return NULL_TREE;
3412 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3413 TREE_OPERAND (arg, 1));
3416 switch (code)
3418 case INTEGER_CST:
3419 return constant_boolean_node (integer_zerop (arg), type);
3421 case TRUTH_AND_EXPR:
3422 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3423 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3424 return build2_loc (loc, TRUTH_OR_EXPR, type,
3425 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3426 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3428 case TRUTH_OR_EXPR:
3429 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3430 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3431 return build2_loc (loc, TRUTH_AND_EXPR, type,
3432 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3433 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3435 case TRUTH_XOR_EXPR:
3436 /* Here we can invert either operand. We invert the first operand
3437 unless the second operand is a TRUTH_NOT_EXPR in which case our
3438 result is the XOR of the first operand with the inside of the
3439 negation of the second operand. */
3441 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3442 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3443 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3444 else
3445 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3446 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3447 TREE_OPERAND (arg, 1));
3449 case TRUTH_ANDIF_EXPR:
3450 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3451 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3452 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3453 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3454 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3456 case TRUTH_ORIF_EXPR:
3457 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3458 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3459 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3460 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3461 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3463 case TRUTH_NOT_EXPR:
3464 return TREE_OPERAND (arg, 0);
3466 case COND_EXPR:
3468 tree arg1 = TREE_OPERAND (arg, 1);
3469 tree arg2 = TREE_OPERAND (arg, 2);
3471 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3472 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3474 /* A COND_EXPR may have a throw as one operand, which
3475 then has void type. Just leave void operands
3476 as they are. */
3477 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3478 VOID_TYPE_P (TREE_TYPE (arg1))
3479 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3480 VOID_TYPE_P (TREE_TYPE (arg2))
3481 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3484 case COMPOUND_EXPR:
3485 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3486 return build2_loc (loc, COMPOUND_EXPR, type,
3487 TREE_OPERAND (arg, 0),
3488 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3490 case NON_LVALUE_EXPR:
3491 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3492 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3494 CASE_CONVERT:
3495 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3496 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3498 /* ... fall through ... */
3500 case FLOAT_EXPR:
3501 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3502 return build1_loc (loc, TREE_CODE (arg), type,
3503 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3505 case BIT_AND_EXPR:
3506 if (!integer_onep (TREE_OPERAND (arg, 1)))
3507 return NULL_TREE;
3508 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3510 case SAVE_EXPR:
3511 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3513 case CLEANUP_POINT_EXPR:
3514 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3515 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3516 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3518 default:
3519 return NULL_TREE;
3523 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3524 assume that ARG is an operation that returns a truth value (0 or 1
3525 for scalars, 0 or -1 for vectors). Return the folded expression if
3526 folding is successful. Otherwise, return NULL_TREE. */
3528 static tree
3529 fold_invert_truthvalue (location_t loc, tree arg)
3531 tree type = TREE_TYPE (arg);
3532 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3533 ? BIT_NOT_EXPR
3534 : TRUTH_NOT_EXPR,
3535 type, arg);
3538 /* Return a simplified tree node for the truth-negation of ARG. This
3539 never alters ARG itself. We assume that ARG is an operation that
3540 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3542 tree
3543 invert_truthvalue_loc (location_t loc, tree arg)
3545 if (TREE_CODE (arg) == ERROR_MARK)
3546 return arg;
3548 tree type = TREE_TYPE (arg);
3549 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3550 ? BIT_NOT_EXPR
3551 : TRUTH_NOT_EXPR,
3552 type, arg);
3555 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3556 operands are another bit-wise operation with a common input. If so,
3557 distribute the bit operations to save an operation and possibly two if
3558 constants are involved. For example, convert
3559 (A | B) & (A | C) into A | (B & C)
3560 Further simplification will occur if B and C are constants.
3562 If this optimization cannot be done, 0 will be returned. */
3564 static tree
3565 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3566 tree arg0, tree arg1)
3568 tree common;
3569 tree left, right;
3571 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3572 || TREE_CODE (arg0) == code
3573 || (TREE_CODE (arg0) != BIT_AND_EXPR
3574 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3575 return 0;
3577 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3579 common = TREE_OPERAND (arg0, 0);
3580 left = TREE_OPERAND (arg0, 1);
3581 right = TREE_OPERAND (arg1, 1);
3583 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3585 common = TREE_OPERAND (arg0, 0);
3586 left = TREE_OPERAND (arg0, 1);
3587 right = TREE_OPERAND (arg1, 0);
3589 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3591 common = TREE_OPERAND (arg0, 1);
3592 left = TREE_OPERAND (arg0, 0);
3593 right = TREE_OPERAND (arg1, 1);
3595 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3597 common = TREE_OPERAND (arg0, 1);
3598 left = TREE_OPERAND (arg0, 0);
3599 right = TREE_OPERAND (arg1, 0);
3601 else
3602 return 0;
3604 common = fold_convert_loc (loc, type, common);
3605 left = fold_convert_loc (loc, type, left);
3606 right = fold_convert_loc (loc, type, right);
3607 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3608 fold_build2_loc (loc, code, type, left, right));
3611 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3612 with code CODE. This optimization is unsafe. */
3613 static tree
3614 distribute_real_division (location_t loc, enum tree_code code, tree type,
3615 tree arg0, tree arg1)
3617 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3618 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3620 /* (A / C) +- (B / C) -> (A +- B) / C. */
3621 if (mul0 == mul1
3622 && operand_equal_p (TREE_OPERAND (arg0, 1),
3623 TREE_OPERAND (arg1, 1), 0))
3624 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3625 fold_build2_loc (loc, code, type,
3626 TREE_OPERAND (arg0, 0),
3627 TREE_OPERAND (arg1, 0)),
3628 TREE_OPERAND (arg0, 1));
3630 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3631 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3632 TREE_OPERAND (arg1, 0), 0)
3633 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3634 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3636 REAL_VALUE_TYPE r0, r1;
3637 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3638 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3639 if (!mul0)
3640 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3641 if (!mul1)
3642 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3643 real_arithmetic (&r0, code, &r0, &r1);
3644 return fold_build2_loc (loc, MULT_EXPR, type,
3645 TREE_OPERAND (arg0, 0),
3646 build_real (type, r0));
3649 return NULL_TREE;
3652 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3653 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3655 static tree
3656 make_bit_field_ref (location_t loc, tree inner, tree type,
3657 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3659 tree result, bftype;
3661 if (bitpos == 0)
3663 tree size = TYPE_SIZE (TREE_TYPE (inner));
3664 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3665 || POINTER_TYPE_P (TREE_TYPE (inner)))
3666 && tree_fits_shwi_p (size)
3667 && tree_to_shwi (size) == bitsize)
3668 return fold_convert_loc (loc, type, inner);
3671 bftype = type;
3672 if (TYPE_PRECISION (bftype) != bitsize
3673 || TYPE_UNSIGNED (bftype) == !unsignedp)
3674 bftype = build_nonstandard_integer_type (bitsize, 0);
3676 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3677 size_int (bitsize), bitsize_int (bitpos));
3679 if (bftype != type)
3680 result = fold_convert_loc (loc, type, result);
3682 return result;
3685 /* Optimize a bit-field compare.
3687 There are two cases: First is a compare against a constant and the
3688 second is a comparison of two items where the fields are at the same
3689 bit position relative to the start of a chunk (byte, halfword, word)
3690 large enough to contain it. In these cases we can avoid the shift
3691 implicit in bitfield extractions.
3693 For constants, we emit a compare of the shifted constant with the
3694 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3695 compared. For two fields at the same position, we do the ANDs with the
3696 similar mask and compare the result of the ANDs.
3698 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3699 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3700 are the left and right operands of the comparison, respectively.
3702 If the optimization described above can be done, we return the resulting
3703 tree. Otherwise we return zero. */
3705 static tree
3706 optimize_bit_field_compare (location_t loc, enum tree_code code,
3707 tree compare_type, tree lhs, tree rhs)
3709 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3710 tree type = TREE_TYPE (lhs);
3711 tree unsigned_type;
3712 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3713 machine_mode lmode, rmode, nmode;
3714 int lunsignedp, runsignedp;
3715 int lvolatilep = 0, rvolatilep = 0;
3716 tree linner, rinner = NULL_TREE;
3717 tree mask;
3718 tree offset;
3720 /* Get all the information about the extractions being done. If the bit size
3721 if the same as the size of the underlying object, we aren't doing an
3722 extraction at all and so can do nothing. We also don't want to
3723 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3724 then will no longer be able to replace it. */
3725 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3726 &lunsignedp, &lvolatilep, false);
3727 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3728 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3729 return 0;
3731 if (!const_p)
3733 /* If this is not a constant, we can only do something if bit positions,
3734 sizes, and signedness are the same. */
3735 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3736 &runsignedp, &rvolatilep, false);
3738 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3739 || lunsignedp != runsignedp || offset != 0
3740 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3741 return 0;
3744 /* See if we can find a mode to refer to this field. We should be able to,
3745 but fail if we can't. */
3746 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3747 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3748 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3749 TYPE_ALIGN (TREE_TYPE (rinner))),
3750 word_mode, false);
3751 if (nmode == VOIDmode)
3752 return 0;
3754 /* Set signed and unsigned types of the precision of this mode for the
3755 shifts below. */
3756 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3758 /* Compute the bit position and size for the new reference and our offset
3759 within it. If the new reference is the same size as the original, we
3760 won't optimize anything, so return zero. */
3761 nbitsize = GET_MODE_BITSIZE (nmode);
3762 nbitpos = lbitpos & ~ (nbitsize - 1);
3763 lbitpos -= nbitpos;
3764 if (nbitsize == lbitsize)
3765 return 0;
3767 if (BYTES_BIG_ENDIAN)
3768 lbitpos = nbitsize - lbitsize - lbitpos;
3770 /* Make the mask to be used against the extracted field. */
3771 mask = build_int_cst_type (unsigned_type, -1);
3772 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3773 mask = const_binop (RSHIFT_EXPR, mask,
3774 size_int (nbitsize - lbitsize - lbitpos));
3776 if (! const_p)
3777 /* If not comparing with constant, just rework the comparison
3778 and return. */
3779 return fold_build2_loc (loc, code, compare_type,
3780 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3781 make_bit_field_ref (loc, linner,
3782 unsigned_type,
3783 nbitsize, nbitpos,
3785 mask),
3786 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3787 make_bit_field_ref (loc, rinner,
3788 unsigned_type,
3789 nbitsize, nbitpos,
3791 mask));
3793 /* Otherwise, we are handling the constant case. See if the constant is too
3794 big for the field. Warn and return a tree of for 0 (false) if so. We do
3795 this not only for its own sake, but to avoid having to test for this
3796 error case below. If we didn't, we might generate wrong code.
3798 For unsigned fields, the constant shifted right by the field length should
3799 be all zero. For signed fields, the high-order bits should agree with
3800 the sign bit. */
3802 if (lunsignedp)
3804 if (wi::lrshift (rhs, lbitsize) != 0)
3806 warning (0, "comparison is always %d due to width of bit-field",
3807 code == NE_EXPR);
3808 return constant_boolean_node (code == NE_EXPR, compare_type);
3811 else
3813 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3814 if (tem != 0 && tem != -1)
3816 warning (0, "comparison is always %d due to width of bit-field",
3817 code == NE_EXPR);
3818 return constant_boolean_node (code == NE_EXPR, compare_type);
3822 /* Single-bit compares should always be against zero. */
3823 if (lbitsize == 1 && ! integer_zerop (rhs))
3825 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3826 rhs = build_int_cst (type, 0);
3829 /* Make a new bitfield reference, shift the constant over the
3830 appropriate number of bits and mask it with the computed mask
3831 (in case this was a signed field). If we changed it, make a new one. */
3832 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3834 rhs = const_binop (BIT_AND_EXPR,
3835 const_binop (LSHIFT_EXPR,
3836 fold_convert_loc (loc, unsigned_type, rhs),
3837 size_int (lbitpos)),
3838 mask);
3840 lhs = build2_loc (loc, code, compare_type,
3841 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3842 return lhs;
3845 /* Subroutine for fold_truth_andor_1: decode a field reference.
3847 If EXP is a comparison reference, we return the innermost reference.
3849 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3850 set to the starting bit number.
3852 If the innermost field can be completely contained in a mode-sized
3853 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3855 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3856 otherwise it is not changed.
3858 *PUNSIGNEDP is set to the signedness of the field.
3860 *PMASK is set to the mask used. This is either contained in a
3861 BIT_AND_EXPR or derived from the width of the field.
3863 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3865 Return 0 if this is not a component reference or is one that we can't
3866 do anything with. */
3868 static tree
3869 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3870 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3871 int *punsignedp, int *pvolatilep,
3872 tree *pmask, tree *pand_mask)
3874 tree outer_type = 0;
3875 tree and_mask = 0;
3876 tree mask, inner, offset;
3877 tree unsigned_type;
3878 unsigned int precision;
3880 /* All the optimizations using this function assume integer fields.
3881 There are problems with FP fields since the type_for_size call
3882 below can fail for, e.g., XFmode. */
3883 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3884 return 0;
3886 /* We are interested in the bare arrangement of bits, so strip everything
3887 that doesn't affect the machine mode. However, record the type of the
3888 outermost expression if it may matter below. */
3889 if (CONVERT_EXPR_P (exp)
3890 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3891 outer_type = TREE_TYPE (exp);
3892 STRIP_NOPS (exp);
3894 if (TREE_CODE (exp) == BIT_AND_EXPR)
3896 and_mask = TREE_OPERAND (exp, 1);
3897 exp = TREE_OPERAND (exp, 0);
3898 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3899 if (TREE_CODE (and_mask) != INTEGER_CST)
3900 return 0;
3903 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3904 punsignedp, pvolatilep, false);
3905 if ((inner == exp && and_mask == 0)
3906 || *pbitsize < 0 || offset != 0
3907 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3908 return 0;
3910 /* If the number of bits in the reference is the same as the bitsize of
3911 the outer type, then the outer type gives the signedness. Otherwise
3912 (in case of a small bitfield) the signedness is unchanged. */
3913 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3914 *punsignedp = TYPE_UNSIGNED (outer_type);
3916 /* Compute the mask to access the bitfield. */
3917 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3918 precision = TYPE_PRECISION (unsigned_type);
3920 mask = build_int_cst_type (unsigned_type, -1);
3922 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3923 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3925 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3926 if (and_mask != 0)
3927 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3928 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3930 *pmask = mask;
3931 *pand_mask = and_mask;
3932 return inner;
3935 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3936 bit positions and MASK is SIGNED. */
3938 static int
3939 all_ones_mask_p (const_tree mask, unsigned int size)
3941 tree type = TREE_TYPE (mask);
3942 unsigned int precision = TYPE_PRECISION (type);
3944 /* If this function returns true when the type of the mask is
3945 UNSIGNED, then there will be errors. In particular see
3946 gcc.c-torture/execute/990326-1.c. There does not appear to be
3947 any documentation paper trail as to why this is so. But the pre
3948 wide-int worked with that restriction and it has been preserved
3949 here. */
3950 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3951 return false;
3953 return wi::mask (size, false, precision) == mask;
3956 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3957 represents the sign bit of EXP's type. If EXP represents a sign
3958 or zero extension, also test VAL against the unextended type.
3959 The return value is the (sub)expression whose sign bit is VAL,
3960 or NULL_TREE otherwise. */
3962 tree
3963 sign_bit_p (tree exp, const_tree val)
3965 int width;
3966 tree t;
3968 /* Tree EXP must have an integral type. */
3969 t = TREE_TYPE (exp);
3970 if (! INTEGRAL_TYPE_P (t))
3971 return NULL_TREE;
3973 /* Tree VAL must be an integer constant. */
3974 if (TREE_CODE (val) != INTEGER_CST
3975 || TREE_OVERFLOW (val))
3976 return NULL_TREE;
3978 width = TYPE_PRECISION (t);
3979 if (wi::only_sign_bit_p (val, width))
3980 return exp;
3982 /* Handle extension from a narrower type. */
3983 if (TREE_CODE (exp) == NOP_EXPR
3984 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3985 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3987 return NULL_TREE;
3990 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3991 to be evaluated unconditionally. */
3993 static int
3994 simple_operand_p (const_tree exp)
3996 /* Strip any conversions that don't change the machine mode. */
3997 STRIP_NOPS (exp);
3999 return (CONSTANT_CLASS_P (exp)
4000 || TREE_CODE (exp) == SSA_NAME
4001 || (DECL_P (exp)
4002 && ! TREE_ADDRESSABLE (exp)
4003 && ! TREE_THIS_VOLATILE (exp)
4004 && ! DECL_NONLOCAL (exp)
4005 /* Don't regard global variables as simple. They may be
4006 allocated in ways unknown to the compiler (shared memory,
4007 #pragma weak, etc). */
4008 && ! TREE_PUBLIC (exp)
4009 && ! DECL_EXTERNAL (exp)
4010 /* Weakrefs are not safe to be read, since they can be NULL.
4011 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4012 have DECL_WEAK flag set. */
4013 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4014 /* Loading a static variable is unduly expensive, but global
4015 registers aren't expensive. */
4016 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4019 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4020 to be evaluated unconditionally.
4021 I addition to simple_operand_p, we assume that comparisons, conversions,
4022 and logic-not operations are simple, if their operands are simple, too. */
4024 static bool
4025 simple_operand_p_2 (tree exp)
4027 enum tree_code code;
4029 if (TREE_SIDE_EFFECTS (exp)
4030 || tree_could_trap_p (exp))
4031 return false;
4033 while (CONVERT_EXPR_P (exp))
4034 exp = TREE_OPERAND (exp, 0);
4036 code = TREE_CODE (exp);
4038 if (TREE_CODE_CLASS (code) == tcc_comparison)
4039 return (simple_operand_p (TREE_OPERAND (exp, 0))
4040 && simple_operand_p (TREE_OPERAND (exp, 1)));
4042 if (code == TRUTH_NOT_EXPR)
4043 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4045 return simple_operand_p (exp);
4049 /* The following functions are subroutines to fold_range_test and allow it to
4050 try to change a logical combination of comparisons into a range test.
4052 For example, both
4053 X == 2 || X == 3 || X == 4 || X == 5
4055 X >= 2 && X <= 5
4056 are converted to
4057 (unsigned) (X - 2) <= 3
4059 We describe each set of comparisons as being either inside or outside
4060 a range, using a variable named like IN_P, and then describe the
4061 range with a lower and upper bound. If one of the bounds is omitted,
4062 it represents either the highest or lowest value of the type.
4064 In the comments below, we represent a range by two numbers in brackets
4065 preceded by a "+" to designate being inside that range, or a "-" to
4066 designate being outside that range, so the condition can be inverted by
4067 flipping the prefix. An omitted bound is represented by a "-". For
4068 example, "- [-, 10]" means being outside the range starting at the lowest
4069 possible value and ending at 10, in other words, being greater than 10.
4070 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4071 always false.
4073 We set up things so that the missing bounds are handled in a consistent
4074 manner so neither a missing bound nor "true" and "false" need to be
4075 handled using a special case. */
4077 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4078 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4079 and UPPER1_P are nonzero if the respective argument is an upper bound
4080 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4081 must be specified for a comparison. ARG1 will be converted to ARG0's
4082 type if both are specified. */
4084 static tree
4085 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4086 tree arg1, int upper1_p)
4088 tree tem;
4089 int result;
4090 int sgn0, sgn1;
4092 /* If neither arg represents infinity, do the normal operation.
4093 Else, if not a comparison, return infinity. Else handle the special
4094 comparison rules. Note that most of the cases below won't occur, but
4095 are handled for consistency. */
4097 if (arg0 != 0 && arg1 != 0)
4099 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4100 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4101 STRIP_NOPS (tem);
4102 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4105 if (TREE_CODE_CLASS (code) != tcc_comparison)
4106 return 0;
4108 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4109 for neither. In real maths, we cannot assume open ended ranges are
4110 the same. But, this is computer arithmetic, where numbers are finite.
4111 We can therefore make the transformation of any unbounded range with
4112 the value Z, Z being greater than any representable number. This permits
4113 us to treat unbounded ranges as equal. */
4114 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4115 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4116 switch (code)
4118 case EQ_EXPR:
4119 result = sgn0 == sgn1;
4120 break;
4121 case NE_EXPR:
4122 result = sgn0 != sgn1;
4123 break;
4124 case LT_EXPR:
4125 result = sgn0 < sgn1;
4126 break;
4127 case LE_EXPR:
4128 result = sgn0 <= sgn1;
4129 break;
4130 case GT_EXPR:
4131 result = sgn0 > sgn1;
4132 break;
4133 case GE_EXPR:
4134 result = sgn0 >= sgn1;
4135 break;
4136 default:
4137 gcc_unreachable ();
4140 return constant_boolean_node (result, type);
4143 /* Helper routine for make_range. Perform one step for it, return
4144 new expression if the loop should continue or NULL_TREE if it should
4145 stop. */
4147 tree
4148 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4149 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4150 bool *strict_overflow_p)
4152 tree arg0_type = TREE_TYPE (arg0);
4153 tree n_low, n_high, low = *p_low, high = *p_high;
4154 int in_p = *p_in_p, n_in_p;
4156 switch (code)
4158 case TRUTH_NOT_EXPR:
4159 /* We can only do something if the range is testing for zero. */
4160 if (low == NULL_TREE || high == NULL_TREE
4161 || ! integer_zerop (low) || ! integer_zerop (high))
4162 return NULL_TREE;
4163 *p_in_p = ! in_p;
4164 return arg0;
4166 case EQ_EXPR: case NE_EXPR:
4167 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4168 /* We can only do something if the range is testing for zero
4169 and if the second operand is an integer constant. Note that
4170 saying something is "in" the range we make is done by
4171 complementing IN_P since it will set in the initial case of
4172 being not equal to zero; "out" is leaving it alone. */
4173 if (low == NULL_TREE || high == NULL_TREE
4174 || ! integer_zerop (low) || ! integer_zerop (high)
4175 || TREE_CODE (arg1) != INTEGER_CST)
4176 return NULL_TREE;
4178 switch (code)
4180 case NE_EXPR: /* - [c, c] */
4181 low = high = arg1;
4182 break;
4183 case EQ_EXPR: /* + [c, c] */
4184 in_p = ! in_p, low = high = arg1;
4185 break;
4186 case GT_EXPR: /* - [-, c] */
4187 low = 0, high = arg1;
4188 break;
4189 case GE_EXPR: /* + [c, -] */
4190 in_p = ! in_p, low = arg1, high = 0;
4191 break;
4192 case LT_EXPR: /* - [c, -] */
4193 low = arg1, high = 0;
4194 break;
4195 case LE_EXPR: /* + [-, c] */
4196 in_p = ! in_p, low = 0, high = arg1;
4197 break;
4198 default:
4199 gcc_unreachable ();
4202 /* If this is an unsigned comparison, we also know that EXP is
4203 greater than or equal to zero. We base the range tests we make
4204 on that fact, so we record it here so we can parse existing
4205 range tests. We test arg0_type since often the return type
4206 of, e.g. EQ_EXPR, is boolean. */
4207 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4209 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4210 in_p, low, high, 1,
4211 build_int_cst (arg0_type, 0),
4212 NULL_TREE))
4213 return NULL_TREE;
4215 in_p = n_in_p, low = n_low, high = n_high;
4217 /* If the high bound is missing, but we have a nonzero low
4218 bound, reverse the range so it goes from zero to the low bound
4219 minus 1. */
4220 if (high == 0 && low && ! integer_zerop (low))
4222 in_p = ! in_p;
4223 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4224 build_int_cst (TREE_TYPE (low), 1), 0);
4225 low = build_int_cst (arg0_type, 0);
4229 *p_low = low;
4230 *p_high = high;
4231 *p_in_p = in_p;
4232 return arg0;
4234 case NEGATE_EXPR:
4235 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4236 low and high are non-NULL, then normalize will DTRT. */
4237 if (!TYPE_UNSIGNED (arg0_type)
4238 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4240 if (low == NULL_TREE)
4241 low = TYPE_MIN_VALUE (arg0_type);
4242 if (high == NULL_TREE)
4243 high = TYPE_MAX_VALUE (arg0_type);
4246 /* (-x) IN [a,b] -> x in [-b, -a] */
4247 n_low = range_binop (MINUS_EXPR, exp_type,
4248 build_int_cst (exp_type, 0),
4249 0, high, 1);
4250 n_high = range_binop (MINUS_EXPR, exp_type,
4251 build_int_cst (exp_type, 0),
4252 0, low, 0);
4253 if (n_high != 0 && TREE_OVERFLOW (n_high))
4254 return NULL_TREE;
4255 goto normalize;
4257 case BIT_NOT_EXPR:
4258 /* ~ X -> -X - 1 */
4259 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4260 build_int_cst (exp_type, 1));
4262 case PLUS_EXPR:
4263 case MINUS_EXPR:
4264 if (TREE_CODE (arg1) != INTEGER_CST)
4265 return NULL_TREE;
4267 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4268 move a constant to the other side. */
4269 if (!TYPE_UNSIGNED (arg0_type)
4270 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4271 return NULL_TREE;
4273 /* If EXP is signed, any overflow in the computation is undefined,
4274 so we don't worry about it so long as our computations on
4275 the bounds don't overflow. For unsigned, overflow is defined
4276 and this is exactly the right thing. */
4277 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4278 arg0_type, low, 0, arg1, 0);
4279 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4280 arg0_type, high, 1, arg1, 0);
4281 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4282 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4283 return NULL_TREE;
4285 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4286 *strict_overflow_p = true;
4288 normalize:
4289 /* Check for an unsigned range which has wrapped around the maximum
4290 value thus making n_high < n_low, and normalize it. */
4291 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4293 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4294 build_int_cst (TREE_TYPE (n_high), 1), 0);
4295 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4296 build_int_cst (TREE_TYPE (n_low), 1), 0);
4298 /* If the range is of the form +/- [ x+1, x ], we won't
4299 be able to normalize it. But then, it represents the
4300 whole range or the empty set, so make it
4301 +/- [ -, - ]. */
4302 if (tree_int_cst_equal (n_low, low)
4303 && tree_int_cst_equal (n_high, high))
4304 low = high = 0;
4305 else
4306 in_p = ! in_p;
4308 else
4309 low = n_low, high = n_high;
4311 *p_low = low;
4312 *p_high = high;
4313 *p_in_p = in_p;
4314 return arg0;
4316 CASE_CONVERT:
4317 case NON_LVALUE_EXPR:
4318 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4319 return NULL_TREE;
4321 if (! INTEGRAL_TYPE_P (arg0_type)
4322 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4323 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4324 return NULL_TREE;
4326 n_low = low, n_high = high;
4328 if (n_low != 0)
4329 n_low = fold_convert_loc (loc, arg0_type, n_low);
4331 if (n_high != 0)
4332 n_high = fold_convert_loc (loc, arg0_type, n_high);
4334 /* If we're converting arg0 from an unsigned type, to exp,
4335 a signed type, we will be doing the comparison as unsigned.
4336 The tests above have already verified that LOW and HIGH
4337 are both positive.
4339 So we have to ensure that we will handle large unsigned
4340 values the same way that the current signed bounds treat
4341 negative values. */
4343 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4345 tree high_positive;
4346 tree equiv_type;
4347 /* For fixed-point modes, we need to pass the saturating flag
4348 as the 2nd parameter. */
4349 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4350 equiv_type
4351 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4352 TYPE_SATURATING (arg0_type));
4353 else
4354 equiv_type
4355 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4357 /* A range without an upper bound is, naturally, unbounded.
4358 Since convert would have cropped a very large value, use
4359 the max value for the destination type. */
4360 high_positive
4361 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4362 : TYPE_MAX_VALUE (arg0_type);
4364 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4365 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4366 fold_convert_loc (loc, arg0_type,
4367 high_positive),
4368 build_int_cst (arg0_type, 1));
4370 /* If the low bound is specified, "and" the range with the
4371 range for which the original unsigned value will be
4372 positive. */
4373 if (low != 0)
4375 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4376 1, fold_convert_loc (loc, arg0_type,
4377 integer_zero_node),
4378 high_positive))
4379 return NULL_TREE;
4381 in_p = (n_in_p == in_p);
4383 else
4385 /* Otherwise, "or" the range with the range of the input
4386 that will be interpreted as negative. */
4387 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4388 1, fold_convert_loc (loc, arg0_type,
4389 integer_zero_node),
4390 high_positive))
4391 return NULL_TREE;
4393 in_p = (in_p != n_in_p);
4397 *p_low = n_low;
4398 *p_high = n_high;
4399 *p_in_p = in_p;
4400 return arg0;
4402 default:
4403 return NULL_TREE;
4407 /* Given EXP, a logical expression, set the range it is testing into
4408 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4409 actually being tested. *PLOW and *PHIGH will be made of the same
4410 type as the returned expression. If EXP is not a comparison, we
4411 will most likely not be returning a useful value and range. Set
4412 *STRICT_OVERFLOW_P to true if the return value is only valid
4413 because signed overflow is undefined; otherwise, do not change
4414 *STRICT_OVERFLOW_P. */
4416 tree
4417 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4418 bool *strict_overflow_p)
4420 enum tree_code code;
4421 tree arg0, arg1 = NULL_TREE;
4422 tree exp_type, nexp;
4423 int in_p;
4424 tree low, high;
4425 location_t loc = EXPR_LOCATION (exp);
4427 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4428 and see if we can refine the range. Some of the cases below may not
4429 happen, but it doesn't seem worth worrying about this. We "continue"
4430 the outer loop when we've changed something; otherwise we "break"
4431 the switch, which will "break" the while. */
4433 in_p = 0;
4434 low = high = build_int_cst (TREE_TYPE (exp), 0);
4436 while (1)
4438 code = TREE_CODE (exp);
4439 exp_type = TREE_TYPE (exp);
4440 arg0 = NULL_TREE;
4442 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4444 if (TREE_OPERAND_LENGTH (exp) > 0)
4445 arg0 = TREE_OPERAND (exp, 0);
4446 if (TREE_CODE_CLASS (code) == tcc_binary
4447 || TREE_CODE_CLASS (code) == tcc_comparison
4448 || (TREE_CODE_CLASS (code) == tcc_expression
4449 && TREE_OPERAND_LENGTH (exp) > 1))
4450 arg1 = TREE_OPERAND (exp, 1);
4452 if (arg0 == NULL_TREE)
4453 break;
4455 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4456 &high, &in_p, strict_overflow_p);
4457 if (nexp == NULL_TREE)
4458 break;
4459 exp = nexp;
4462 /* If EXP is a constant, we can evaluate whether this is true or false. */
4463 if (TREE_CODE (exp) == INTEGER_CST)
4465 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4466 exp, 0, low, 0))
4467 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4468 exp, 1, high, 1)));
4469 low = high = 0;
4470 exp = 0;
4473 *pin_p = in_p, *plow = low, *phigh = high;
4474 return exp;
4477 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4478 type, TYPE, return an expression to test if EXP is in (or out of, depending
4479 on IN_P) the range. Return 0 if the test couldn't be created. */
4481 tree
4482 build_range_check (location_t loc, tree type, tree exp, int in_p,
4483 tree low, tree high)
4485 tree etype = TREE_TYPE (exp), value;
4487 /* Disable this optimization for function pointer expressions
4488 on targets that require function pointer canonicalization. */
4489 if (targetm.have_canonicalize_funcptr_for_compare ()
4490 && TREE_CODE (etype) == POINTER_TYPE
4491 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4492 return NULL_TREE;
4494 if (! in_p)
4496 value = build_range_check (loc, type, exp, 1, low, high);
4497 if (value != 0)
4498 return invert_truthvalue_loc (loc, value);
4500 return 0;
4503 if (low == 0 && high == 0)
4504 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4506 if (low == 0)
4507 return fold_build2_loc (loc, LE_EXPR, type, exp,
4508 fold_convert_loc (loc, etype, high));
4510 if (high == 0)
4511 return fold_build2_loc (loc, GE_EXPR, type, exp,
4512 fold_convert_loc (loc, etype, low));
4514 if (operand_equal_p (low, high, 0))
4515 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4516 fold_convert_loc (loc, etype, low));
4518 if (integer_zerop (low))
4520 if (! TYPE_UNSIGNED (etype))
4522 etype = unsigned_type_for (etype);
4523 high = fold_convert_loc (loc, etype, high);
4524 exp = fold_convert_loc (loc, etype, exp);
4526 return build_range_check (loc, type, exp, 1, 0, high);
4529 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4530 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4532 int prec = TYPE_PRECISION (etype);
4534 if (wi::mask (prec - 1, false, prec) == high)
4536 if (TYPE_UNSIGNED (etype))
4538 tree signed_etype = signed_type_for (etype);
4539 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4540 etype
4541 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4542 else
4543 etype = signed_etype;
4544 exp = fold_convert_loc (loc, etype, exp);
4546 return fold_build2_loc (loc, GT_EXPR, type, exp,
4547 build_int_cst (etype, 0));
4551 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4552 This requires wrap-around arithmetics for the type of the expression.
4553 First make sure that arithmetics in this type is valid, then make sure
4554 that it wraps around. */
4555 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4556 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4557 TYPE_UNSIGNED (etype));
4559 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4561 tree utype, minv, maxv;
4563 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4564 for the type in question, as we rely on this here. */
4565 utype = unsigned_type_for (etype);
4566 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4567 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4568 build_int_cst (TREE_TYPE (maxv), 1), 1);
4569 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4571 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4572 minv, 1, maxv, 1)))
4573 etype = utype;
4574 else
4575 return 0;
4578 high = fold_convert_loc (loc, etype, high);
4579 low = fold_convert_loc (loc, etype, low);
4580 exp = fold_convert_loc (loc, etype, exp);
4582 value = const_binop (MINUS_EXPR, high, low);
4585 if (POINTER_TYPE_P (etype))
4587 if (value != 0 && !TREE_OVERFLOW (value))
4589 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4590 return build_range_check (loc, type,
4591 fold_build_pointer_plus_loc (loc, exp, low),
4592 1, build_int_cst (etype, 0), value);
4594 return 0;
4597 if (value != 0 && !TREE_OVERFLOW (value))
4598 return build_range_check (loc, type,
4599 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4600 1, build_int_cst (etype, 0), value);
4602 return 0;
4605 /* Return the predecessor of VAL in its type, handling the infinite case. */
4607 static tree
4608 range_predecessor (tree val)
4610 tree type = TREE_TYPE (val);
4612 if (INTEGRAL_TYPE_P (type)
4613 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4614 return 0;
4615 else
4616 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4617 build_int_cst (TREE_TYPE (val), 1), 0);
4620 /* Return the successor of VAL in its type, handling the infinite case. */
4622 static tree
4623 range_successor (tree val)
4625 tree type = TREE_TYPE (val);
4627 if (INTEGRAL_TYPE_P (type)
4628 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4629 return 0;
4630 else
4631 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4632 build_int_cst (TREE_TYPE (val), 1), 0);
4635 /* Given two ranges, see if we can merge them into one. Return 1 if we
4636 can, 0 if we can't. Set the output range into the specified parameters. */
4638 bool
4639 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4640 tree high0, int in1_p, tree low1, tree high1)
4642 int no_overlap;
4643 int subset;
4644 int temp;
4645 tree tem;
4646 int in_p;
4647 tree low, high;
4648 int lowequal = ((low0 == 0 && low1 == 0)
4649 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4650 low0, 0, low1, 0)));
4651 int highequal = ((high0 == 0 && high1 == 0)
4652 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4653 high0, 1, high1, 1)));
4655 /* Make range 0 be the range that starts first, or ends last if they
4656 start at the same value. Swap them if it isn't. */
4657 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4658 low0, 0, low1, 0))
4659 || (lowequal
4660 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4661 high1, 1, high0, 1))))
4663 temp = in0_p, in0_p = in1_p, in1_p = temp;
4664 tem = low0, low0 = low1, low1 = tem;
4665 tem = high0, high0 = high1, high1 = tem;
4668 /* Now flag two cases, whether the ranges are disjoint or whether the
4669 second range is totally subsumed in the first. Note that the tests
4670 below are simplified by the ones above. */
4671 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4672 high0, 1, low1, 0));
4673 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4674 high1, 1, high0, 1));
4676 /* We now have four cases, depending on whether we are including or
4677 excluding the two ranges. */
4678 if (in0_p && in1_p)
4680 /* If they don't overlap, the result is false. If the second range
4681 is a subset it is the result. Otherwise, the range is from the start
4682 of the second to the end of the first. */
4683 if (no_overlap)
4684 in_p = 0, low = high = 0;
4685 else if (subset)
4686 in_p = 1, low = low1, high = high1;
4687 else
4688 in_p = 1, low = low1, high = high0;
4691 else if (in0_p && ! in1_p)
4693 /* If they don't overlap, the result is the first range. If they are
4694 equal, the result is false. If the second range is a subset of the
4695 first, and the ranges begin at the same place, we go from just after
4696 the end of the second range to the end of the first. If the second
4697 range is not a subset of the first, or if it is a subset and both
4698 ranges end at the same place, the range starts at the start of the
4699 first range and ends just before the second range.
4700 Otherwise, we can't describe this as a single range. */
4701 if (no_overlap)
4702 in_p = 1, low = low0, high = high0;
4703 else if (lowequal && highequal)
4704 in_p = 0, low = high = 0;
4705 else if (subset && lowequal)
4707 low = range_successor (high1);
4708 high = high0;
4709 in_p = 1;
4710 if (low == 0)
4712 /* We are in the weird situation where high0 > high1 but
4713 high1 has no successor. Punt. */
4714 return 0;
4717 else if (! subset || highequal)
4719 low = low0;
4720 high = range_predecessor (low1);
4721 in_p = 1;
4722 if (high == 0)
4724 /* low0 < low1 but low1 has no predecessor. Punt. */
4725 return 0;
4728 else
4729 return 0;
4732 else if (! in0_p && in1_p)
4734 /* If they don't overlap, the result is the second range. If the second
4735 is a subset of the first, the result is false. Otherwise,
4736 the range starts just after the first range and ends at the
4737 end of the second. */
4738 if (no_overlap)
4739 in_p = 1, low = low1, high = high1;
4740 else if (subset || highequal)
4741 in_p = 0, low = high = 0;
4742 else
4744 low = range_successor (high0);
4745 high = high1;
4746 in_p = 1;
4747 if (low == 0)
4749 /* high1 > high0 but high0 has no successor. Punt. */
4750 return 0;
4755 else
4757 /* The case where we are excluding both ranges. Here the complex case
4758 is if they don't overlap. In that case, the only time we have a
4759 range is if they are adjacent. If the second is a subset of the
4760 first, the result is the first. Otherwise, the range to exclude
4761 starts at the beginning of the first range and ends at the end of the
4762 second. */
4763 if (no_overlap)
4765 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4766 range_successor (high0),
4767 1, low1, 0)))
4768 in_p = 0, low = low0, high = high1;
4769 else
4771 /* Canonicalize - [min, x] into - [-, x]. */
4772 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4773 switch (TREE_CODE (TREE_TYPE (low0)))
4775 case ENUMERAL_TYPE:
4776 if (TYPE_PRECISION (TREE_TYPE (low0))
4777 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4778 break;
4779 /* FALLTHROUGH */
4780 case INTEGER_TYPE:
4781 if (tree_int_cst_equal (low0,
4782 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4783 low0 = 0;
4784 break;
4785 case POINTER_TYPE:
4786 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4787 && integer_zerop (low0))
4788 low0 = 0;
4789 break;
4790 default:
4791 break;
4794 /* Canonicalize - [x, max] into - [x, -]. */
4795 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4796 switch (TREE_CODE (TREE_TYPE (high1)))
4798 case ENUMERAL_TYPE:
4799 if (TYPE_PRECISION (TREE_TYPE (high1))
4800 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4801 break;
4802 /* FALLTHROUGH */
4803 case INTEGER_TYPE:
4804 if (tree_int_cst_equal (high1,
4805 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4806 high1 = 0;
4807 break;
4808 case POINTER_TYPE:
4809 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4810 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4811 high1, 1,
4812 build_int_cst (TREE_TYPE (high1), 1),
4813 1)))
4814 high1 = 0;
4815 break;
4816 default:
4817 break;
4820 /* The ranges might be also adjacent between the maximum and
4821 minimum values of the given type. For
4822 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4823 return + [x + 1, y - 1]. */
4824 if (low0 == 0 && high1 == 0)
4826 low = range_successor (high0);
4827 high = range_predecessor (low1);
4828 if (low == 0 || high == 0)
4829 return 0;
4831 in_p = 1;
4833 else
4834 return 0;
4837 else if (subset)
4838 in_p = 0, low = low0, high = high0;
4839 else
4840 in_p = 0, low = low0, high = high1;
4843 *pin_p = in_p, *plow = low, *phigh = high;
4844 return 1;
4848 /* Subroutine of fold, looking inside expressions of the form
4849 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4850 of the COND_EXPR. This function is being used also to optimize
4851 A op B ? C : A, by reversing the comparison first.
4853 Return a folded expression whose code is not a COND_EXPR
4854 anymore, or NULL_TREE if no folding opportunity is found. */
4856 static tree
4857 fold_cond_expr_with_comparison (location_t loc, tree type,
4858 tree arg0, tree arg1, tree arg2)
4860 enum tree_code comp_code = TREE_CODE (arg0);
4861 tree arg00 = TREE_OPERAND (arg0, 0);
4862 tree arg01 = TREE_OPERAND (arg0, 1);
4863 tree arg1_type = TREE_TYPE (arg1);
4864 tree tem;
4866 STRIP_NOPS (arg1);
4867 STRIP_NOPS (arg2);
4869 /* If we have A op 0 ? A : -A, consider applying the following
4870 transformations:
4872 A == 0? A : -A same as -A
4873 A != 0? A : -A same as A
4874 A >= 0? A : -A same as abs (A)
4875 A > 0? A : -A same as abs (A)
4876 A <= 0? A : -A same as -abs (A)
4877 A < 0? A : -A same as -abs (A)
4879 None of these transformations work for modes with signed
4880 zeros. If A is +/-0, the first two transformations will
4881 change the sign of the result (from +0 to -0, or vice
4882 versa). The last four will fix the sign of the result,
4883 even though the original expressions could be positive or
4884 negative, depending on the sign of A.
4886 Note that all these transformations are correct if A is
4887 NaN, since the two alternatives (A and -A) are also NaNs. */
4888 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4889 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4890 ? real_zerop (arg01)
4891 : integer_zerop (arg01))
4892 && ((TREE_CODE (arg2) == NEGATE_EXPR
4893 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4894 /* In the case that A is of the form X-Y, '-A' (arg2) may
4895 have already been folded to Y-X, check for that. */
4896 || (TREE_CODE (arg1) == MINUS_EXPR
4897 && TREE_CODE (arg2) == MINUS_EXPR
4898 && operand_equal_p (TREE_OPERAND (arg1, 0),
4899 TREE_OPERAND (arg2, 1), 0)
4900 && operand_equal_p (TREE_OPERAND (arg1, 1),
4901 TREE_OPERAND (arg2, 0), 0))))
4902 switch (comp_code)
4904 case EQ_EXPR:
4905 case UNEQ_EXPR:
4906 tem = fold_convert_loc (loc, arg1_type, arg1);
4907 return pedantic_non_lvalue_loc (loc,
4908 fold_convert_loc (loc, type,
4909 negate_expr (tem)));
4910 case NE_EXPR:
4911 case LTGT_EXPR:
4912 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4913 case UNGE_EXPR:
4914 case UNGT_EXPR:
4915 if (flag_trapping_math)
4916 break;
4917 /* Fall through. */
4918 case GE_EXPR:
4919 case GT_EXPR:
4920 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4921 arg1 = fold_convert_loc (loc, signed_type_for
4922 (TREE_TYPE (arg1)), arg1);
4923 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4924 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4925 case UNLE_EXPR:
4926 case UNLT_EXPR:
4927 if (flag_trapping_math)
4928 break;
4929 case LE_EXPR:
4930 case LT_EXPR:
4931 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4932 arg1 = fold_convert_loc (loc, signed_type_for
4933 (TREE_TYPE (arg1)), arg1);
4934 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4935 return negate_expr (fold_convert_loc (loc, type, tem));
4936 default:
4937 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4938 break;
4941 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4942 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4943 both transformations are correct when A is NaN: A != 0
4944 is then true, and A == 0 is false. */
4946 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4947 && integer_zerop (arg01) && integer_zerop (arg2))
4949 if (comp_code == NE_EXPR)
4950 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4951 else if (comp_code == EQ_EXPR)
4952 return build_zero_cst (type);
4955 /* Try some transformations of A op B ? A : B.
4957 A == B? A : B same as B
4958 A != B? A : B same as A
4959 A >= B? A : B same as max (A, B)
4960 A > B? A : B same as max (B, A)
4961 A <= B? A : B same as min (A, B)
4962 A < B? A : B same as min (B, A)
4964 As above, these transformations don't work in the presence
4965 of signed zeros. For example, if A and B are zeros of
4966 opposite sign, the first two transformations will change
4967 the sign of the result. In the last four, the original
4968 expressions give different results for (A=+0, B=-0) and
4969 (A=-0, B=+0), but the transformed expressions do not.
4971 The first two transformations are correct if either A or B
4972 is a NaN. In the first transformation, the condition will
4973 be false, and B will indeed be chosen. In the case of the
4974 second transformation, the condition A != B will be true,
4975 and A will be chosen.
4977 The conversions to max() and min() are not correct if B is
4978 a number and A is not. The conditions in the original
4979 expressions will be false, so all four give B. The min()
4980 and max() versions would give a NaN instead. */
4981 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4982 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4983 /* Avoid these transformations if the COND_EXPR may be used
4984 as an lvalue in the C++ front-end. PR c++/19199. */
4985 && (in_gimple_form
4986 || VECTOR_TYPE_P (type)
4987 || (! lang_GNU_CXX ()
4988 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4989 || ! maybe_lvalue_p (arg1)
4990 || ! maybe_lvalue_p (arg2)))
4992 tree comp_op0 = arg00;
4993 tree comp_op1 = arg01;
4994 tree comp_type = TREE_TYPE (comp_op0);
4996 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4997 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4999 comp_type = type;
5000 comp_op0 = arg1;
5001 comp_op1 = arg2;
5004 switch (comp_code)
5006 case EQ_EXPR:
5007 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5008 case NE_EXPR:
5009 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5010 case LE_EXPR:
5011 case LT_EXPR:
5012 case UNLE_EXPR:
5013 case UNLT_EXPR:
5014 /* In C++ a ?: expression can be an lvalue, so put the
5015 operand which will be used if they are equal first
5016 so that we can convert this back to the
5017 corresponding COND_EXPR. */
5018 if (!HONOR_NANS (arg1))
5020 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5021 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5022 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5023 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5024 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5025 comp_op1, comp_op0);
5026 return pedantic_non_lvalue_loc (loc,
5027 fold_convert_loc (loc, type, tem));
5029 break;
5030 case GE_EXPR:
5031 case GT_EXPR:
5032 case UNGE_EXPR:
5033 case UNGT_EXPR:
5034 if (!HONOR_NANS (arg1))
5036 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5037 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5038 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5039 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5040 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5041 comp_op1, comp_op0);
5042 return pedantic_non_lvalue_loc (loc,
5043 fold_convert_loc (loc, type, tem));
5045 break;
5046 case UNEQ_EXPR:
5047 if (!HONOR_NANS (arg1))
5048 return pedantic_non_lvalue_loc (loc,
5049 fold_convert_loc (loc, type, arg2));
5050 break;
5051 case LTGT_EXPR:
5052 if (!HONOR_NANS (arg1))
5053 return pedantic_non_lvalue_loc (loc,
5054 fold_convert_loc (loc, type, arg1));
5055 break;
5056 default:
5057 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5058 break;
5062 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5063 we might still be able to simplify this. For example,
5064 if C1 is one less or one more than C2, this might have started
5065 out as a MIN or MAX and been transformed by this function.
5066 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5068 if (INTEGRAL_TYPE_P (type)
5069 && TREE_CODE (arg01) == INTEGER_CST
5070 && TREE_CODE (arg2) == INTEGER_CST)
5071 switch (comp_code)
5073 case EQ_EXPR:
5074 if (TREE_CODE (arg1) == INTEGER_CST)
5075 break;
5076 /* We can replace A with C1 in this case. */
5077 arg1 = fold_convert_loc (loc, type, arg01);
5078 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5080 case LT_EXPR:
5081 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5082 MIN_EXPR, to preserve the signedness of the comparison. */
5083 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5084 OEP_ONLY_CONST)
5085 && operand_equal_p (arg01,
5086 const_binop (PLUS_EXPR, arg2,
5087 build_int_cst (type, 1)),
5088 OEP_ONLY_CONST))
5090 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5091 fold_convert_loc (loc, TREE_TYPE (arg00),
5092 arg2));
5093 return pedantic_non_lvalue_loc (loc,
5094 fold_convert_loc (loc, type, tem));
5096 break;
5098 case LE_EXPR:
5099 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5100 as above. */
5101 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5102 OEP_ONLY_CONST)
5103 && operand_equal_p (arg01,
5104 const_binop (MINUS_EXPR, arg2,
5105 build_int_cst (type, 1)),
5106 OEP_ONLY_CONST))
5108 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5109 fold_convert_loc (loc, TREE_TYPE (arg00),
5110 arg2));
5111 return pedantic_non_lvalue_loc (loc,
5112 fold_convert_loc (loc, type, tem));
5114 break;
5116 case GT_EXPR:
5117 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5118 MAX_EXPR, to preserve the signedness of the comparison. */
5119 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5120 OEP_ONLY_CONST)
5121 && operand_equal_p (arg01,
5122 const_binop (MINUS_EXPR, arg2,
5123 build_int_cst (type, 1)),
5124 OEP_ONLY_CONST))
5126 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5127 fold_convert_loc (loc, TREE_TYPE (arg00),
5128 arg2));
5129 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5131 break;
5133 case GE_EXPR:
5134 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5135 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5136 OEP_ONLY_CONST)
5137 && operand_equal_p (arg01,
5138 const_binop (PLUS_EXPR, arg2,
5139 build_int_cst (type, 1)),
5140 OEP_ONLY_CONST))
5142 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5143 fold_convert_loc (loc, TREE_TYPE (arg00),
5144 arg2));
5145 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5147 break;
5148 case NE_EXPR:
5149 break;
5150 default:
5151 gcc_unreachable ();
5154 return NULL_TREE;
5159 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5160 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5161 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5162 false) >= 2)
5163 #endif
5165 /* EXP is some logical combination of boolean tests. See if we can
5166 merge it into some range test. Return the new tree if so. */
5168 static tree
5169 fold_range_test (location_t loc, enum tree_code code, tree type,
5170 tree op0, tree op1)
5172 int or_op = (code == TRUTH_ORIF_EXPR
5173 || code == TRUTH_OR_EXPR);
5174 int in0_p, in1_p, in_p;
5175 tree low0, low1, low, high0, high1, high;
5176 bool strict_overflow_p = false;
5177 tree tem, lhs, rhs;
5178 const char * const warnmsg = G_("assuming signed overflow does not occur "
5179 "when simplifying range test");
5181 if (!INTEGRAL_TYPE_P (type))
5182 return 0;
5184 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5185 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5187 /* If this is an OR operation, invert both sides; we will invert
5188 again at the end. */
5189 if (or_op)
5190 in0_p = ! in0_p, in1_p = ! in1_p;
5192 /* If both expressions are the same, if we can merge the ranges, and we
5193 can build the range test, return it or it inverted. If one of the
5194 ranges is always true or always false, consider it to be the same
5195 expression as the other. */
5196 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5197 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5198 in1_p, low1, high1)
5199 && 0 != (tem = (build_range_check (loc, type,
5200 lhs != 0 ? lhs
5201 : rhs != 0 ? rhs : integer_zero_node,
5202 in_p, low, high))))
5204 if (strict_overflow_p)
5205 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5206 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5209 /* On machines where the branch cost is expensive, if this is a
5210 short-circuited branch and the underlying object on both sides
5211 is the same, make a non-short-circuit operation. */
5212 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5213 && lhs != 0 && rhs != 0
5214 && (code == TRUTH_ANDIF_EXPR
5215 || code == TRUTH_ORIF_EXPR)
5216 && operand_equal_p (lhs, rhs, 0))
5218 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5219 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5220 which cases we can't do this. */
5221 if (simple_operand_p (lhs))
5222 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5223 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5224 type, op0, op1);
5226 else if (!lang_hooks.decls.global_bindings_p ()
5227 && !CONTAINS_PLACEHOLDER_P (lhs))
5229 tree common = save_expr (lhs);
5231 if (0 != (lhs = build_range_check (loc, type, common,
5232 or_op ? ! in0_p : in0_p,
5233 low0, high0))
5234 && (0 != (rhs = build_range_check (loc, type, common,
5235 or_op ? ! in1_p : in1_p,
5236 low1, high1))))
5238 if (strict_overflow_p)
5239 fold_overflow_warning (warnmsg,
5240 WARN_STRICT_OVERFLOW_COMPARISON);
5241 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5242 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5243 type, lhs, rhs);
5248 return 0;
5251 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5252 bit value. Arrange things so the extra bits will be set to zero if and
5253 only if C is signed-extended to its full width. If MASK is nonzero,
5254 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5256 static tree
5257 unextend (tree c, int p, int unsignedp, tree mask)
5259 tree type = TREE_TYPE (c);
5260 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5261 tree temp;
5263 if (p == modesize || unsignedp)
5264 return c;
5266 /* We work by getting just the sign bit into the low-order bit, then
5267 into the high-order bit, then sign-extend. We then XOR that value
5268 with C. */
5269 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5271 /* We must use a signed type in order to get an arithmetic right shift.
5272 However, we must also avoid introducing accidental overflows, so that
5273 a subsequent call to integer_zerop will work. Hence we must
5274 do the type conversion here. At this point, the constant is either
5275 zero or one, and the conversion to a signed type can never overflow.
5276 We could get an overflow if this conversion is done anywhere else. */
5277 if (TYPE_UNSIGNED (type))
5278 temp = fold_convert (signed_type_for (type), temp);
5280 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5281 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5282 if (mask != 0)
5283 temp = const_binop (BIT_AND_EXPR, temp,
5284 fold_convert (TREE_TYPE (c), mask));
5285 /* If necessary, convert the type back to match the type of C. */
5286 if (TYPE_UNSIGNED (type))
5287 temp = fold_convert (type, temp);
5289 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5292 /* For an expression that has the form
5293 (A && B) || ~B
5295 (A || B) && ~B,
5296 we can drop one of the inner expressions and simplify to
5297 A || ~B
5299 A && ~B
5300 LOC is the location of the resulting expression. OP is the inner
5301 logical operation; the left-hand side in the examples above, while CMPOP
5302 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5303 removing a condition that guards another, as in
5304 (A != NULL && A->...) || A == NULL
5305 which we must not transform. If RHS_ONLY is true, only eliminate the
5306 right-most operand of the inner logical operation. */
5308 static tree
5309 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5310 bool rhs_only)
5312 tree type = TREE_TYPE (cmpop);
5313 enum tree_code code = TREE_CODE (cmpop);
5314 enum tree_code truthop_code = TREE_CODE (op);
5315 tree lhs = TREE_OPERAND (op, 0);
5316 tree rhs = TREE_OPERAND (op, 1);
5317 tree orig_lhs = lhs, orig_rhs = rhs;
5318 enum tree_code rhs_code = TREE_CODE (rhs);
5319 enum tree_code lhs_code = TREE_CODE (lhs);
5320 enum tree_code inv_code;
5322 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5323 return NULL_TREE;
5325 if (TREE_CODE_CLASS (code) != tcc_comparison)
5326 return NULL_TREE;
5328 if (rhs_code == truthop_code)
5330 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5331 if (newrhs != NULL_TREE)
5333 rhs = newrhs;
5334 rhs_code = TREE_CODE (rhs);
5337 if (lhs_code == truthop_code && !rhs_only)
5339 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5340 if (newlhs != NULL_TREE)
5342 lhs = newlhs;
5343 lhs_code = TREE_CODE (lhs);
5347 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5348 if (inv_code == rhs_code
5349 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5350 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5351 return lhs;
5352 if (!rhs_only && inv_code == lhs_code
5353 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5354 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5355 return rhs;
5356 if (rhs != orig_rhs || lhs != orig_lhs)
5357 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5358 lhs, rhs);
5359 return NULL_TREE;
5362 /* Find ways of folding logical expressions of LHS and RHS:
5363 Try to merge two comparisons to the same innermost item.
5364 Look for range tests like "ch >= '0' && ch <= '9'".
5365 Look for combinations of simple terms on machines with expensive branches
5366 and evaluate the RHS unconditionally.
5368 For example, if we have p->a == 2 && p->b == 4 and we can make an
5369 object large enough to span both A and B, we can do this with a comparison
5370 against the object ANDed with the a mask.
5372 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5373 operations to do this with one comparison.
5375 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5376 function and the one above.
5378 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5379 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5381 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5382 two operands.
5384 We return the simplified tree or 0 if no optimization is possible. */
5386 static tree
5387 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5388 tree lhs, tree rhs)
5390 /* If this is the "or" of two comparisons, we can do something if
5391 the comparisons are NE_EXPR. If this is the "and", we can do something
5392 if the comparisons are EQ_EXPR. I.e.,
5393 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5395 WANTED_CODE is this operation code. For single bit fields, we can
5396 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5397 comparison for one-bit fields. */
5399 enum tree_code wanted_code;
5400 enum tree_code lcode, rcode;
5401 tree ll_arg, lr_arg, rl_arg, rr_arg;
5402 tree ll_inner, lr_inner, rl_inner, rr_inner;
5403 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5404 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5405 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5406 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5407 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5408 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5409 machine_mode lnmode, rnmode;
5410 tree ll_mask, lr_mask, rl_mask, rr_mask;
5411 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5412 tree l_const, r_const;
5413 tree lntype, rntype, result;
5414 HOST_WIDE_INT first_bit, end_bit;
5415 int volatilep;
5417 /* Start by getting the comparison codes. Fail if anything is volatile.
5418 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5419 it were surrounded with a NE_EXPR. */
5421 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5422 return 0;
5424 lcode = TREE_CODE (lhs);
5425 rcode = TREE_CODE (rhs);
5427 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5429 lhs = build2 (NE_EXPR, truth_type, lhs,
5430 build_int_cst (TREE_TYPE (lhs), 0));
5431 lcode = NE_EXPR;
5434 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5436 rhs = build2 (NE_EXPR, truth_type, rhs,
5437 build_int_cst (TREE_TYPE (rhs), 0));
5438 rcode = NE_EXPR;
5441 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5442 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5443 return 0;
5445 ll_arg = TREE_OPERAND (lhs, 0);
5446 lr_arg = TREE_OPERAND (lhs, 1);
5447 rl_arg = TREE_OPERAND (rhs, 0);
5448 rr_arg = TREE_OPERAND (rhs, 1);
5450 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5451 if (simple_operand_p (ll_arg)
5452 && simple_operand_p (lr_arg))
5454 if (operand_equal_p (ll_arg, rl_arg, 0)
5455 && operand_equal_p (lr_arg, rr_arg, 0))
5457 result = combine_comparisons (loc, code, lcode, rcode,
5458 truth_type, ll_arg, lr_arg);
5459 if (result)
5460 return result;
5462 else if (operand_equal_p (ll_arg, rr_arg, 0)
5463 && operand_equal_p (lr_arg, rl_arg, 0))
5465 result = combine_comparisons (loc, code, lcode,
5466 swap_tree_comparison (rcode),
5467 truth_type, ll_arg, lr_arg);
5468 if (result)
5469 return result;
5473 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5474 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5476 /* If the RHS can be evaluated unconditionally and its operands are
5477 simple, it wins to evaluate the RHS unconditionally on machines
5478 with expensive branches. In this case, this isn't a comparison
5479 that can be merged. */
5481 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5482 false) >= 2
5483 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5484 && simple_operand_p (rl_arg)
5485 && simple_operand_p (rr_arg))
5487 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5488 if (code == TRUTH_OR_EXPR
5489 && lcode == NE_EXPR && integer_zerop (lr_arg)
5490 && rcode == NE_EXPR && integer_zerop (rr_arg)
5491 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5492 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5493 return build2_loc (loc, NE_EXPR, truth_type,
5494 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5495 ll_arg, rl_arg),
5496 build_int_cst (TREE_TYPE (ll_arg), 0));
5498 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5499 if (code == TRUTH_AND_EXPR
5500 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5501 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5502 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5503 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5504 return build2_loc (loc, EQ_EXPR, truth_type,
5505 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5506 ll_arg, rl_arg),
5507 build_int_cst (TREE_TYPE (ll_arg), 0));
5510 /* See if the comparisons can be merged. Then get all the parameters for
5511 each side. */
5513 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5514 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5515 return 0;
5517 volatilep = 0;
5518 ll_inner = decode_field_reference (loc, ll_arg,
5519 &ll_bitsize, &ll_bitpos, &ll_mode,
5520 &ll_unsignedp, &volatilep, &ll_mask,
5521 &ll_and_mask);
5522 lr_inner = decode_field_reference (loc, lr_arg,
5523 &lr_bitsize, &lr_bitpos, &lr_mode,
5524 &lr_unsignedp, &volatilep, &lr_mask,
5525 &lr_and_mask);
5526 rl_inner = decode_field_reference (loc, rl_arg,
5527 &rl_bitsize, &rl_bitpos, &rl_mode,
5528 &rl_unsignedp, &volatilep, &rl_mask,
5529 &rl_and_mask);
5530 rr_inner = decode_field_reference (loc, rr_arg,
5531 &rr_bitsize, &rr_bitpos, &rr_mode,
5532 &rr_unsignedp, &volatilep, &rr_mask,
5533 &rr_and_mask);
5535 /* It must be true that the inner operation on the lhs of each
5536 comparison must be the same if we are to be able to do anything.
5537 Then see if we have constants. If not, the same must be true for
5538 the rhs's. */
5539 if (volatilep || ll_inner == 0 || rl_inner == 0
5540 || ! operand_equal_p (ll_inner, rl_inner, 0))
5541 return 0;
5543 if (TREE_CODE (lr_arg) == INTEGER_CST
5544 && TREE_CODE (rr_arg) == INTEGER_CST)
5545 l_const = lr_arg, r_const = rr_arg;
5546 else if (lr_inner == 0 || rr_inner == 0
5547 || ! operand_equal_p (lr_inner, rr_inner, 0))
5548 return 0;
5549 else
5550 l_const = r_const = 0;
5552 /* If either comparison code is not correct for our logical operation,
5553 fail. However, we can convert a one-bit comparison against zero into
5554 the opposite comparison against that bit being set in the field. */
5556 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5557 if (lcode != wanted_code)
5559 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5561 /* Make the left operand unsigned, since we are only interested
5562 in the value of one bit. Otherwise we are doing the wrong
5563 thing below. */
5564 ll_unsignedp = 1;
5565 l_const = ll_mask;
5567 else
5568 return 0;
5571 /* This is analogous to the code for l_const above. */
5572 if (rcode != wanted_code)
5574 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5576 rl_unsignedp = 1;
5577 r_const = rl_mask;
5579 else
5580 return 0;
5583 /* See if we can find a mode that contains both fields being compared on
5584 the left. If we can't, fail. Otherwise, update all constants and masks
5585 to be relative to a field of that size. */
5586 first_bit = MIN (ll_bitpos, rl_bitpos);
5587 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5588 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5589 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5590 volatilep);
5591 if (lnmode == VOIDmode)
5592 return 0;
5594 lnbitsize = GET_MODE_BITSIZE (lnmode);
5595 lnbitpos = first_bit & ~ (lnbitsize - 1);
5596 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5597 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5599 if (BYTES_BIG_ENDIAN)
5601 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5602 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5605 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5606 size_int (xll_bitpos));
5607 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5608 size_int (xrl_bitpos));
5610 if (l_const)
5612 l_const = fold_convert_loc (loc, lntype, l_const);
5613 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5614 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5615 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5616 fold_build1_loc (loc, BIT_NOT_EXPR,
5617 lntype, ll_mask))))
5619 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5621 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5624 if (r_const)
5626 r_const = fold_convert_loc (loc, lntype, r_const);
5627 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5628 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5629 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5630 fold_build1_loc (loc, BIT_NOT_EXPR,
5631 lntype, rl_mask))))
5633 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5635 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5639 /* If the right sides are not constant, do the same for it. Also,
5640 disallow this optimization if a size or signedness mismatch occurs
5641 between the left and right sides. */
5642 if (l_const == 0)
5644 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5645 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5646 /* Make sure the two fields on the right
5647 correspond to the left without being swapped. */
5648 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5649 return 0;
5651 first_bit = MIN (lr_bitpos, rr_bitpos);
5652 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5653 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5654 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5655 volatilep);
5656 if (rnmode == VOIDmode)
5657 return 0;
5659 rnbitsize = GET_MODE_BITSIZE (rnmode);
5660 rnbitpos = first_bit & ~ (rnbitsize - 1);
5661 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5662 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5664 if (BYTES_BIG_ENDIAN)
5666 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5667 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5670 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5671 rntype, lr_mask),
5672 size_int (xlr_bitpos));
5673 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5674 rntype, rr_mask),
5675 size_int (xrr_bitpos));
5677 /* Make a mask that corresponds to both fields being compared.
5678 Do this for both items being compared. If the operands are the
5679 same size and the bits being compared are in the same position
5680 then we can do this by masking both and comparing the masked
5681 results. */
5682 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5683 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5684 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5686 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5687 ll_unsignedp || rl_unsignedp);
5688 if (! all_ones_mask_p (ll_mask, lnbitsize))
5689 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5691 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5692 lr_unsignedp || rr_unsignedp);
5693 if (! all_ones_mask_p (lr_mask, rnbitsize))
5694 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5696 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5699 /* There is still another way we can do something: If both pairs of
5700 fields being compared are adjacent, we may be able to make a wider
5701 field containing them both.
5703 Note that we still must mask the lhs/rhs expressions. Furthermore,
5704 the mask must be shifted to account for the shift done by
5705 make_bit_field_ref. */
5706 if ((ll_bitsize + ll_bitpos == rl_bitpos
5707 && lr_bitsize + lr_bitpos == rr_bitpos)
5708 || (ll_bitpos == rl_bitpos + rl_bitsize
5709 && lr_bitpos == rr_bitpos + rr_bitsize))
5711 tree type;
5713 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5714 ll_bitsize + rl_bitsize,
5715 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5716 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5717 lr_bitsize + rr_bitsize,
5718 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5720 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5721 size_int (MIN (xll_bitpos, xrl_bitpos)));
5722 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5723 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5725 /* Convert to the smaller type before masking out unwanted bits. */
5726 type = lntype;
5727 if (lntype != rntype)
5729 if (lnbitsize > rnbitsize)
5731 lhs = fold_convert_loc (loc, rntype, lhs);
5732 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5733 type = rntype;
5735 else if (lnbitsize < rnbitsize)
5737 rhs = fold_convert_loc (loc, lntype, rhs);
5738 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5739 type = lntype;
5743 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5744 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5746 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5747 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5749 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5752 return 0;
5755 /* Handle the case of comparisons with constants. If there is something in
5756 common between the masks, those bits of the constants must be the same.
5757 If not, the condition is always false. Test for this to avoid generating
5758 incorrect code below. */
5759 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5760 if (! integer_zerop (result)
5761 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5762 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5764 if (wanted_code == NE_EXPR)
5766 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5767 return constant_boolean_node (true, truth_type);
5769 else
5771 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5772 return constant_boolean_node (false, truth_type);
5776 /* Construct the expression we will return. First get the component
5777 reference we will make. Unless the mask is all ones the width of
5778 that field, perform the mask operation. Then compare with the
5779 merged constant. */
5780 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5781 ll_unsignedp || rl_unsignedp);
5783 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5784 if (! all_ones_mask_p (ll_mask, lnbitsize))
5785 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5787 return build2_loc (loc, wanted_code, truth_type, result,
5788 const_binop (BIT_IOR_EXPR, l_const, r_const));
5791 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5792 constant. */
5794 static tree
5795 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5796 tree op0, tree op1)
5798 tree arg0 = op0;
5799 enum tree_code op_code;
5800 tree comp_const;
5801 tree minmax_const;
5802 int consts_equal, consts_lt;
5803 tree inner;
5805 STRIP_SIGN_NOPS (arg0);
5807 op_code = TREE_CODE (arg0);
5808 minmax_const = TREE_OPERAND (arg0, 1);
5809 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5810 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5811 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5812 inner = TREE_OPERAND (arg0, 0);
5814 /* If something does not permit us to optimize, return the original tree. */
5815 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5816 || TREE_CODE (comp_const) != INTEGER_CST
5817 || TREE_OVERFLOW (comp_const)
5818 || TREE_CODE (minmax_const) != INTEGER_CST
5819 || TREE_OVERFLOW (minmax_const))
5820 return NULL_TREE;
5822 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5823 and GT_EXPR, doing the rest with recursive calls using logical
5824 simplifications. */
5825 switch (code)
5827 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5829 tree tem
5830 = optimize_minmax_comparison (loc,
5831 invert_tree_comparison (code, false),
5832 type, op0, op1);
5833 if (tem)
5834 return invert_truthvalue_loc (loc, tem);
5835 return NULL_TREE;
5838 case GE_EXPR:
5839 return
5840 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5841 optimize_minmax_comparison
5842 (loc, EQ_EXPR, type, arg0, comp_const),
5843 optimize_minmax_comparison
5844 (loc, GT_EXPR, type, arg0, comp_const));
5846 case EQ_EXPR:
5847 if (op_code == MAX_EXPR && consts_equal)
5848 /* MAX (X, 0) == 0 -> X <= 0 */
5849 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5851 else if (op_code == MAX_EXPR && consts_lt)
5852 /* MAX (X, 0) == 5 -> X == 5 */
5853 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5855 else if (op_code == MAX_EXPR)
5856 /* MAX (X, 0) == -1 -> false */
5857 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5859 else if (consts_equal)
5860 /* MIN (X, 0) == 0 -> X >= 0 */
5861 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5863 else if (consts_lt)
5864 /* MIN (X, 0) == 5 -> false */
5865 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5867 else
5868 /* MIN (X, 0) == -1 -> X == -1 */
5869 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5871 case GT_EXPR:
5872 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5873 /* MAX (X, 0) > 0 -> X > 0
5874 MAX (X, 0) > 5 -> X > 5 */
5875 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5877 else if (op_code == MAX_EXPR)
5878 /* MAX (X, 0) > -1 -> true */
5879 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5881 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5882 /* MIN (X, 0) > 0 -> false
5883 MIN (X, 0) > 5 -> false */
5884 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5886 else
5887 /* MIN (X, 0) > -1 -> X > -1 */
5888 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5890 default:
5891 return NULL_TREE;
5895 /* T is an integer expression that is being multiplied, divided, or taken a
5896 modulus (CODE says which and what kind of divide or modulus) by a
5897 constant C. See if we can eliminate that operation by folding it with
5898 other operations already in T. WIDE_TYPE, if non-null, is a type that
5899 should be used for the computation if wider than our type.
5901 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5902 (X * 2) + (Y * 4). We must, however, be assured that either the original
5903 expression would not overflow or that overflow is undefined for the type
5904 in the language in question.
5906 If we return a non-null expression, it is an equivalent form of the
5907 original computation, but need not be in the original type.
5909 We set *STRICT_OVERFLOW_P to true if the return values depends on
5910 signed overflow being undefined. Otherwise we do not change
5911 *STRICT_OVERFLOW_P. */
5913 static tree
5914 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5915 bool *strict_overflow_p)
5917 /* To avoid exponential search depth, refuse to allow recursion past
5918 three levels. Beyond that (1) it's highly unlikely that we'll find
5919 something interesting and (2) we've probably processed it before
5920 when we built the inner expression. */
5922 static int depth;
5923 tree ret;
5925 if (depth > 3)
5926 return NULL;
5928 depth++;
5929 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5930 depth--;
5932 return ret;
5935 static tree
5936 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5937 bool *strict_overflow_p)
5939 tree type = TREE_TYPE (t);
5940 enum tree_code tcode = TREE_CODE (t);
5941 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5942 > GET_MODE_SIZE (TYPE_MODE (type)))
5943 ? wide_type : type);
5944 tree t1, t2;
5945 int same_p = tcode == code;
5946 tree op0 = NULL_TREE, op1 = NULL_TREE;
5947 bool sub_strict_overflow_p;
5949 /* Don't deal with constants of zero here; they confuse the code below. */
5950 if (integer_zerop (c))
5951 return NULL_TREE;
5953 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5954 op0 = TREE_OPERAND (t, 0);
5956 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5957 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5959 /* Note that we need not handle conditional operations here since fold
5960 already handles those cases. So just do arithmetic here. */
5961 switch (tcode)
5963 case INTEGER_CST:
5964 /* For a constant, we can always simplify if we are a multiply
5965 or (for divide and modulus) if it is a multiple of our constant. */
5966 if (code == MULT_EXPR
5967 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5968 return const_binop (code, fold_convert (ctype, t),
5969 fold_convert (ctype, c));
5970 break;
5972 CASE_CONVERT: case NON_LVALUE_EXPR:
5973 /* If op0 is an expression ... */
5974 if ((COMPARISON_CLASS_P (op0)
5975 || UNARY_CLASS_P (op0)
5976 || BINARY_CLASS_P (op0)
5977 || VL_EXP_CLASS_P (op0)
5978 || EXPRESSION_CLASS_P (op0))
5979 /* ... and has wrapping overflow, and its type is smaller
5980 than ctype, then we cannot pass through as widening. */
5981 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5982 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5983 && (TYPE_PRECISION (ctype)
5984 > TYPE_PRECISION (TREE_TYPE (op0))))
5985 /* ... or this is a truncation (t is narrower than op0),
5986 then we cannot pass through this narrowing. */
5987 || (TYPE_PRECISION (type)
5988 < TYPE_PRECISION (TREE_TYPE (op0)))
5989 /* ... or signedness changes for division or modulus,
5990 then we cannot pass through this conversion. */
5991 || (code != MULT_EXPR
5992 && (TYPE_UNSIGNED (ctype)
5993 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5994 /* ... or has undefined overflow while the converted to
5995 type has not, we cannot do the operation in the inner type
5996 as that would introduce undefined overflow. */
5997 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5998 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
5999 && !TYPE_OVERFLOW_UNDEFINED (type))))
6000 break;
6002 /* Pass the constant down and see if we can make a simplification. If
6003 we can, replace this expression with the inner simplification for
6004 possible later conversion to our or some other type. */
6005 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6006 && TREE_CODE (t2) == INTEGER_CST
6007 && !TREE_OVERFLOW (t2)
6008 && (0 != (t1 = extract_muldiv (op0, t2, code,
6009 code == MULT_EXPR
6010 ? ctype : NULL_TREE,
6011 strict_overflow_p))))
6012 return t1;
6013 break;
6015 case ABS_EXPR:
6016 /* If widening the type changes it from signed to unsigned, then we
6017 must avoid building ABS_EXPR itself as unsigned. */
6018 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6020 tree cstype = (*signed_type_for) (ctype);
6021 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6022 != 0)
6024 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6025 return fold_convert (ctype, t1);
6027 break;
6029 /* If the constant is negative, we cannot simplify this. */
6030 if (tree_int_cst_sgn (c) == -1)
6031 break;
6032 /* FALLTHROUGH */
6033 case NEGATE_EXPR:
6034 /* For division and modulus, type can't be unsigned, as e.g.
6035 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6036 For signed types, even with wrapping overflow, this is fine. */
6037 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6038 break;
6039 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6040 != 0)
6041 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6042 break;
6044 case MIN_EXPR: case MAX_EXPR:
6045 /* If widening the type changes the signedness, then we can't perform
6046 this optimization as that changes the result. */
6047 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6048 break;
6050 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6051 sub_strict_overflow_p = false;
6052 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6053 &sub_strict_overflow_p)) != 0
6054 && (t2 = extract_muldiv (op1, c, code, wide_type,
6055 &sub_strict_overflow_p)) != 0)
6057 if (tree_int_cst_sgn (c) < 0)
6058 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6059 if (sub_strict_overflow_p)
6060 *strict_overflow_p = true;
6061 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6062 fold_convert (ctype, t2));
6064 break;
6066 case LSHIFT_EXPR: case RSHIFT_EXPR:
6067 /* If the second operand is constant, this is a multiplication
6068 or floor division, by a power of two, so we can treat it that
6069 way unless the multiplier or divisor overflows. Signed
6070 left-shift overflow is implementation-defined rather than
6071 undefined in C90, so do not convert signed left shift into
6072 multiplication. */
6073 if (TREE_CODE (op1) == INTEGER_CST
6074 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6075 /* const_binop may not detect overflow correctly,
6076 so check for it explicitly here. */
6077 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6078 && 0 != (t1 = fold_convert (ctype,
6079 const_binop (LSHIFT_EXPR,
6080 size_one_node,
6081 op1)))
6082 && !TREE_OVERFLOW (t1))
6083 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6084 ? MULT_EXPR : FLOOR_DIV_EXPR,
6085 ctype,
6086 fold_convert (ctype, op0),
6087 t1),
6088 c, code, wide_type, strict_overflow_p);
6089 break;
6091 case PLUS_EXPR: case MINUS_EXPR:
6092 /* See if we can eliminate the operation on both sides. If we can, we
6093 can return a new PLUS or MINUS. If we can't, the only remaining
6094 cases where we can do anything are if the second operand is a
6095 constant. */
6096 sub_strict_overflow_p = false;
6097 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6098 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6099 if (t1 != 0 && t2 != 0
6100 && (code == MULT_EXPR
6101 /* If not multiplication, we can only do this if both operands
6102 are divisible by c. */
6103 || (multiple_of_p (ctype, op0, c)
6104 && multiple_of_p (ctype, op1, c))))
6106 if (sub_strict_overflow_p)
6107 *strict_overflow_p = true;
6108 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6109 fold_convert (ctype, t2));
6112 /* If this was a subtraction, negate OP1 and set it to be an addition.
6113 This simplifies the logic below. */
6114 if (tcode == MINUS_EXPR)
6116 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6117 /* If OP1 was not easily negatable, the constant may be OP0. */
6118 if (TREE_CODE (op0) == INTEGER_CST)
6120 std::swap (op0, op1);
6121 std::swap (t1, t2);
6125 if (TREE_CODE (op1) != INTEGER_CST)
6126 break;
6128 /* If either OP1 or C are negative, this optimization is not safe for
6129 some of the division and remainder types while for others we need
6130 to change the code. */
6131 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6133 if (code == CEIL_DIV_EXPR)
6134 code = FLOOR_DIV_EXPR;
6135 else if (code == FLOOR_DIV_EXPR)
6136 code = CEIL_DIV_EXPR;
6137 else if (code != MULT_EXPR
6138 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6139 break;
6142 /* If it's a multiply or a division/modulus operation of a multiple
6143 of our constant, do the operation and verify it doesn't overflow. */
6144 if (code == MULT_EXPR
6145 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6147 op1 = const_binop (code, fold_convert (ctype, op1),
6148 fold_convert (ctype, c));
6149 /* We allow the constant to overflow with wrapping semantics. */
6150 if (op1 == 0
6151 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6152 break;
6154 else
6155 break;
6157 /* If we have an unsigned type, we cannot widen the operation since it
6158 will change the result if the original computation overflowed. */
6159 if (TYPE_UNSIGNED (ctype) && ctype != type)
6160 break;
6162 /* If we were able to eliminate our operation from the first side,
6163 apply our operation to the second side and reform the PLUS. */
6164 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6165 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6167 /* The last case is if we are a multiply. In that case, we can
6168 apply the distributive law to commute the multiply and addition
6169 if the multiplication of the constants doesn't overflow
6170 and overflow is defined. With undefined overflow
6171 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6172 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6173 return fold_build2 (tcode, ctype,
6174 fold_build2 (code, ctype,
6175 fold_convert (ctype, op0),
6176 fold_convert (ctype, c)),
6177 op1);
6179 break;
6181 case MULT_EXPR:
6182 /* We have a special case here if we are doing something like
6183 (C * 8) % 4 since we know that's zero. */
6184 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6185 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6186 /* If the multiplication can overflow we cannot optimize this. */
6187 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6188 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6189 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6191 *strict_overflow_p = true;
6192 return omit_one_operand (type, integer_zero_node, op0);
6195 /* ... fall through ... */
6197 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6198 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6199 /* If we can extract our operation from the LHS, do so and return a
6200 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6201 do something only if the second operand is a constant. */
6202 if (same_p
6203 && (t1 = extract_muldiv (op0, c, code, wide_type,
6204 strict_overflow_p)) != 0)
6205 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6206 fold_convert (ctype, op1));
6207 else if (tcode == MULT_EXPR && code == MULT_EXPR
6208 && (t1 = extract_muldiv (op1, c, code, wide_type,
6209 strict_overflow_p)) != 0)
6210 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6211 fold_convert (ctype, t1));
6212 else if (TREE_CODE (op1) != INTEGER_CST)
6213 return 0;
6215 /* If these are the same operation types, we can associate them
6216 assuming no overflow. */
6217 if (tcode == code)
6219 bool overflow_p = false;
6220 bool overflow_mul_p;
6221 signop sign = TYPE_SIGN (ctype);
6222 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6223 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6224 if (overflow_mul_p
6225 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6226 overflow_p = true;
6227 if (!overflow_p)
6228 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6229 wide_int_to_tree (ctype, mul));
6232 /* If these operations "cancel" each other, we have the main
6233 optimizations of this pass, which occur when either constant is a
6234 multiple of the other, in which case we replace this with either an
6235 operation or CODE or TCODE.
6237 If we have an unsigned type, we cannot do this since it will change
6238 the result if the original computation overflowed. */
6239 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6240 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6241 || (tcode == MULT_EXPR
6242 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6243 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6244 && code != MULT_EXPR)))
6246 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6248 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6249 *strict_overflow_p = true;
6250 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6251 fold_convert (ctype,
6252 const_binop (TRUNC_DIV_EXPR,
6253 op1, c)));
6255 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6257 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6258 *strict_overflow_p = true;
6259 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6260 fold_convert (ctype,
6261 const_binop (TRUNC_DIV_EXPR,
6262 c, op1)));
6265 break;
6267 default:
6268 break;
6271 return 0;
6274 /* Return a node which has the indicated constant VALUE (either 0 or
6275 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6276 and is of the indicated TYPE. */
6278 tree
6279 constant_boolean_node (bool value, tree type)
6281 if (type == integer_type_node)
6282 return value ? integer_one_node : integer_zero_node;
6283 else if (type == boolean_type_node)
6284 return value ? boolean_true_node : boolean_false_node;
6285 else if (TREE_CODE (type) == VECTOR_TYPE)
6286 return build_vector_from_val (type,
6287 build_int_cst (TREE_TYPE (type),
6288 value ? -1 : 0));
6289 else
6290 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6294 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6295 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6296 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6297 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6298 COND is the first argument to CODE; otherwise (as in the example
6299 given here), it is the second argument. TYPE is the type of the
6300 original expression. Return NULL_TREE if no simplification is
6301 possible. */
6303 static tree
6304 fold_binary_op_with_conditional_arg (location_t loc,
6305 enum tree_code code,
6306 tree type, tree op0, tree op1,
6307 tree cond, tree arg, int cond_first_p)
6309 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6310 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6311 tree test, true_value, false_value;
6312 tree lhs = NULL_TREE;
6313 tree rhs = NULL_TREE;
6314 enum tree_code cond_code = COND_EXPR;
6316 if (TREE_CODE (cond) == COND_EXPR
6317 || TREE_CODE (cond) == VEC_COND_EXPR)
6319 test = TREE_OPERAND (cond, 0);
6320 true_value = TREE_OPERAND (cond, 1);
6321 false_value = TREE_OPERAND (cond, 2);
6322 /* If this operand throws an expression, then it does not make
6323 sense to try to perform a logical or arithmetic operation
6324 involving it. */
6325 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6326 lhs = true_value;
6327 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6328 rhs = false_value;
6330 else
6332 tree testtype = TREE_TYPE (cond);
6333 test = cond;
6334 true_value = constant_boolean_node (true, testtype);
6335 false_value = constant_boolean_node (false, testtype);
6338 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6339 cond_code = VEC_COND_EXPR;
6341 /* This transformation is only worthwhile if we don't have to wrap ARG
6342 in a SAVE_EXPR and the operation can be simplified without recursing
6343 on at least one of the branches once its pushed inside the COND_EXPR. */
6344 if (!TREE_CONSTANT (arg)
6345 && (TREE_SIDE_EFFECTS (arg)
6346 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6347 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6348 return NULL_TREE;
6350 arg = fold_convert_loc (loc, arg_type, arg);
6351 if (lhs == 0)
6353 true_value = fold_convert_loc (loc, cond_type, true_value);
6354 if (cond_first_p)
6355 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6356 else
6357 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6359 if (rhs == 0)
6361 false_value = fold_convert_loc (loc, cond_type, false_value);
6362 if (cond_first_p)
6363 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6364 else
6365 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6368 /* Check that we have simplified at least one of the branches. */
6369 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6370 return NULL_TREE;
6372 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6376 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6378 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6379 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6380 ADDEND is the same as X.
6382 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6383 and finite. The problematic cases are when X is zero, and its mode
6384 has signed zeros. In the case of rounding towards -infinity,
6385 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6386 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6388 bool
6389 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6391 if (!real_zerop (addend))
6392 return false;
6394 /* Don't allow the fold with -fsignaling-nans. */
6395 if (HONOR_SNANS (element_mode (type)))
6396 return false;
6398 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6399 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6400 return true;
6402 /* In a vector or complex, we would need to check the sign of all zeros. */
6403 if (TREE_CODE (addend) != REAL_CST)
6404 return false;
6406 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6407 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6408 negate = !negate;
6410 /* The mode has signed zeros, and we have to honor their sign.
6411 In this situation, there is only one case we can return true for.
6412 X - 0 is the same as X unless rounding towards -infinity is
6413 supported. */
6414 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6417 /* Subroutine of fold() that optimizes comparisons of a division by
6418 a nonzero integer constant against an integer constant, i.e.
6419 X/C1 op C2.
6421 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6422 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6423 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6425 The function returns the constant folded tree if a simplification
6426 can be made, and NULL_TREE otherwise. */
6428 static tree
6429 fold_div_compare (location_t loc,
6430 enum tree_code code, tree type, tree arg0, tree arg1)
6432 tree prod, tmp, hi, lo;
6433 tree arg00 = TREE_OPERAND (arg0, 0);
6434 tree arg01 = TREE_OPERAND (arg0, 1);
6435 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6436 bool neg_overflow = false;
6437 bool overflow;
6439 /* We have to do this the hard way to detect unsigned overflow.
6440 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6441 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6442 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6443 neg_overflow = false;
6445 if (sign == UNSIGNED)
6447 tmp = int_const_binop (MINUS_EXPR, arg01,
6448 build_int_cst (TREE_TYPE (arg01), 1));
6449 lo = prod;
6451 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6452 val = wi::add (prod, tmp, sign, &overflow);
6453 hi = force_fit_type (TREE_TYPE (arg00), val,
6454 -1, overflow | TREE_OVERFLOW (prod));
6456 else if (tree_int_cst_sgn (arg01) >= 0)
6458 tmp = int_const_binop (MINUS_EXPR, arg01,
6459 build_int_cst (TREE_TYPE (arg01), 1));
6460 switch (tree_int_cst_sgn (arg1))
6462 case -1:
6463 neg_overflow = true;
6464 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6465 hi = prod;
6466 break;
6468 case 0:
6469 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6470 hi = tmp;
6471 break;
6473 case 1:
6474 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6475 lo = prod;
6476 break;
6478 default:
6479 gcc_unreachable ();
6482 else
6484 /* A negative divisor reverses the relational operators. */
6485 code = swap_tree_comparison (code);
6487 tmp = int_const_binop (PLUS_EXPR, arg01,
6488 build_int_cst (TREE_TYPE (arg01), 1));
6489 switch (tree_int_cst_sgn (arg1))
6491 case -1:
6492 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6493 lo = prod;
6494 break;
6496 case 0:
6497 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6498 lo = tmp;
6499 break;
6501 case 1:
6502 neg_overflow = true;
6503 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6504 hi = prod;
6505 break;
6507 default:
6508 gcc_unreachable ();
6512 switch (code)
6514 case EQ_EXPR:
6515 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6516 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6517 if (TREE_OVERFLOW (hi))
6518 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6519 if (TREE_OVERFLOW (lo))
6520 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6521 return build_range_check (loc, type, arg00, 1, lo, hi);
6523 case NE_EXPR:
6524 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6525 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6526 if (TREE_OVERFLOW (hi))
6527 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6528 if (TREE_OVERFLOW (lo))
6529 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6530 return build_range_check (loc, type, arg00, 0, lo, hi);
6532 case LT_EXPR:
6533 if (TREE_OVERFLOW (lo))
6535 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6536 return omit_one_operand_loc (loc, type, tmp, arg00);
6538 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6540 case LE_EXPR:
6541 if (TREE_OVERFLOW (hi))
6543 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6544 return omit_one_operand_loc (loc, type, tmp, arg00);
6546 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6548 case GT_EXPR:
6549 if (TREE_OVERFLOW (hi))
6551 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6552 return omit_one_operand_loc (loc, type, tmp, arg00);
6554 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6556 case GE_EXPR:
6557 if (TREE_OVERFLOW (lo))
6559 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6560 return omit_one_operand_loc (loc, type, tmp, arg00);
6562 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6564 default:
6565 break;
6568 return NULL_TREE;
6572 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6573 equality/inequality test, then return a simplified form of the test
6574 using a sign testing. Otherwise return NULL. TYPE is the desired
6575 result type. */
6577 static tree
6578 fold_single_bit_test_into_sign_test (location_t loc,
6579 enum tree_code code, tree arg0, tree arg1,
6580 tree result_type)
6582 /* If this is testing a single bit, we can optimize the test. */
6583 if ((code == NE_EXPR || code == EQ_EXPR)
6584 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6585 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6587 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6588 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6589 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6591 if (arg00 != NULL_TREE
6592 /* This is only a win if casting to a signed type is cheap,
6593 i.e. when arg00's type is not a partial mode. */
6594 && TYPE_PRECISION (TREE_TYPE (arg00))
6595 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6597 tree stype = signed_type_for (TREE_TYPE (arg00));
6598 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6599 result_type,
6600 fold_convert_loc (loc, stype, arg00),
6601 build_int_cst (stype, 0));
6605 return NULL_TREE;
6608 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6609 equality/inequality test, then return a simplified form of
6610 the test using shifts and logical operations. Otherwise return
6611 NULL. TYPE is the desired result type. */
6613 tree
6614 fold_single_bit_test (location_t loc, enum tree_code code,
6615 tree arg0, tree arg1, tree result_type)
6617 /* If this is testing a single bit, we can optimize the test. */
6618 if ((code == NE_EXPR || code == EQ_EXPR)
6619 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6620 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6622 tree inner = TREE_OPERAND (arg0, 0);
6623 tree type = TREE_TYPE (arg0);
6624 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6625 machine_mode operand_mode = TYPE_MODE (type);
6626 int ops_unsigned;
6627 tree signed_type, unsigned_type, intermediate_type;
6628 tree tem, one;
6630 /* First, see if we can fold the single bit test into a sign-bit
6631 test. */
6632 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6633 result_type);
6634 if (tem)
6635 return tem;
6637 /* Otherwise we have (A & C) != 0 where C is a single bit,
6638 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6639 Similarly for (A & C) == 0. */
6641 /* If INNER is a right shift of a constant and it plus BITNUM does
6642 not overflow, adjust BITNUM and INNER. */
6643 if (TREE_CODE (inner) == RSHIFT_EXPR
6644 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6645 && bitnum < TYPE_PRECISION (type)
6646 && wi::ltu_p (TREE_OPERAND (inner, 1),
6647 TYPE_PRECISION (type) - bitnum))
6649 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6650 inner = TREE_OPERAND (inner, 0);
6653 /* If we are going to be able to omit the AND below, we must do our
6654 operations as unsigned. If we must use the AND, we have a choice.
6655 Normally unsigned is faster, but for some machines signed is. */
6656 #ifdef LOAD_EXTEND_OP
6657 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6658 && !flag_syntax_only) ? 0 : 1;
6659 #else
6660 ops_unsigned = 1;
6661 #endif
6663 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6664 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6665 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6666 inner = fold_convert_loc (loc, intermediate_type, inner);
6668 if (bitnum != 0)
6669 inner = build2 (RSHIFT_EXPR, intermediate_type,
6670 inner, size_int (bitnum));
6672 one = build_int_cst (intermediate_type, 1);
6674 if (code == EQ_EXPR)
6675 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6677 /* Put the AND last so it can combine with more things. */
6678 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6680 /* Make sure to return the proper type. */
6681 inner = fold_convert_loc (loc, result_type, inner);
6683 return inner;
6685 return NULL_TREE;
6688 /* Check whether we are allowed to reorder operands arg0 and arg1,
6689 such that the evaluation of arg1 occurs before arg0. */
6691 static bool
6692 reorder_operands_p (const_tree arg0, const_tree arg1)
6694 if (! flag_evaluation_order)
6695 return true;
6696 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6697 return true;
6698 return ! TREE_SIDE_EFFECTS (arg0)
6699 && ! TREE_SIDE_EFFECTS (arg1);
6702 /* Test whether it is preferable two swap two operands, ARG0 and
6703 ARG1, for example because ARG0 is an integer constant and ARG1
6704 isn't. If REORDER is true, only recommend swapping if we can
6705 evaluate the operands in reverse order. */
6707 bool
6708 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6710 if (CONSTANT_CLASS_P (arg1))
6711 return 0;
6712 if (CONSTANT_CLASS_P (arg0))
6713 return 1;
6715 STRIP_NOPS (arg0);
6716 STRIP_NOPS (arg1);
6718 if (TREE_CONSTANT (arg1))
6719 return 0;
6720 if (TREE_CONSTANT (arg0))
6721 return 1;
6723 if (reorder && flag_evaluation_order
6724 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6725 return 0;
6727 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6728 for commutative and comparison operators. Ensuring a canonical
6729 form allows the optimizers to find additional redundancies without
6730 having to explicitly check for both orderings. */
6731 if (TREE_CODE (arg0) == SSA_NAME
6732 && TREE_CODE (arg1) == SSA_NAME
6733 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6734 return 1;
6736 /* Put SSA_NAMEs last. */
6737 if (TREE_CODE (arg1) == SSA_NAME)
6738 return 0;
6739 if (TREE_CODE (arg0) == SSA_NAME)
6740 return 1;
6742 /* Put variables last. */
6743 if (DECL_P (arg1))
6744 return 0;
6745 if (DECL_P (arg0))
6746 return 1;
6748 return 0;
6751 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6752 ARG0 is extended to a wider type. */
6754 static tree
6755 fold_widened_comparison (location_t loc, enum tree_code code,
6756 tree type, tree arg0, tree arg1)
6758 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6759 tree arg1_unw;
6760 tree shorter_type, outer_type;
6761 tree min, max;
6762 bool above, below;
6764 if (arg0_unw == arg0)
6765 return NULL_TREE;
6766 shorter_type = TREE_TYPE (arg0_unw);
6768 /* Disable this optimization if we're casting a function pointer
6769 type on targets that require function pointer canonicalization. */
6770 if (targetm.have_canonicalize_funcptr_for_compare ()
6771 && TREE_CODE (shorter_type) == POINTER_TYPE
6772 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6773 return NULL_TREE;
6775 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6776 return NULL_TREE;
6778 arg1_unw = get_unwidened (arg1, NULL_TREE);
6780 /* If possible, express the comparison in the shorter mode. */
6781 if ((code == EQ_EXPR || code == NE_EXPR
6782 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6783 && (TREE_TYPE (arg1_unw) == shorter_type
6784 || ((TYPE_PRECISION (shorter_type)
6785 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6786 && (TYPE_UNSIGNED (shorter_type)
6787 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6788 || (TREE_CODE (arg1_unw) == INTEGER_CST
6789 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6790 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6791 && int_fits_type_p (arg1_unw, shorter_type))))
6792 return fold_build2_loc (loc, code, type, arg0_unw,
6793 fold_convert_loc (loc, shorter_type, arg1_unw));
6795 if (TREE_CODE (arg1_unw) != INTEGER_CST
6796 || TREE_CODE (shorter_type) != INTEGER_TYPE
6797 || !int_fits_type_p (arg1_unw, shorter_type))
6798 return NULL_TREE;
6800 /* If we are comparing with the integer that does not fit into the range
6801 of the shorter type, the result is known. */
6802 outer_type = TREE_TYPE (arg1_unw);
6803 min = lower_bound_in_type (outer_type, shorter_type);
6804 max = upper_bound_in_type (outer_type, shorter_type);
6806 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6807 max, arg1_unw));
6808 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6809 arg1_unw, min));
6811 switch (code)
6813 case EQ_EXPR:
6814 if (above || below)
6815 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6816 break;
6818 case NE_EXPR:
6819 if (above || below)
6820 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6821 break;
6823 case LT_EXPR:
6824 case LE_EXPR:
6825 if (above)
6826 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6827 else if (below)
6828 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6830 case GT_EXPR:
6831 case GE_EXPR:
6832 if (above)
6833 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6834 else if (below)
6835 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6837 default:
6838 break;
6841 return NULL_TREE;
6844 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6845 ARG0 just the signedness is changed. */
6847 static tree
6848 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6849 tree arg0, tree arg1)
6851 tree arg0_inner;
6852 tree inner_type, outer_type;
6854 if (!CONVERT_EXPR_P (arg0))
6855 return NULL_TREE;
6857 outer_type = TREE_TYPE (arg0);
6858 arg0_inner = TREE_OPERAND (arg0, 0);
6859 inner_type = TREE_TYPE (arg0_inner);
6861 /* Disable this optimization if we're casting a function pointer
6862 type on targets that require function pointer canonicalization. */
6863 if (targetm.have_canonicalize_funcptr_for_compare ()
6864 && TREE_CODE (inner_type) == POINTER_TYPE
6865 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6866 return NULL_TREE;
6868 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6869 return NULL_TREE;
6871 if (TREE_CODE (arg1) != INTEGER_CST
6872 && !(CONVERT_EXPR_P (arg1)
6873 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6874 return NULL_TREE;
6876 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6877 && code != NE_EXPR
6878 && code != EQ_EXPR)
6879 return NULL_TREE;
6881 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6882 return NULL_TREE;
6884 if (TREE_CODE (arg1) == INTEGER_CST)
6885 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
6886 TREE_OVERFLOW (arg1));
6887 else
6888 arg1 = fold_convert_loc (loc, inner_type, arg1);
6890 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6894 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6895 means A >= Y && A != MAX, but in this case we know that
6896 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6898 static tree
6899 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6901 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6903 if (TREE_CODE (bound) == LT_EXPR)
6904 a = TREE_OPERAND (bound, 0);
6905 else if (TREE_CODE (bound) == GT_EXPR)
6906 a = TREE_OPERAND (bound, 1);
6907 else
6908 return NULL_TREE;
6910 typea = TREE_TYPE (a);
6911 if (!INTEGRAL_TYPE_P (typea)
6912 && !POINTER_TYPE_P (typea))
6913 return NULL_TREE;
6915 if (TREE_CODE (ineq) == LT_EXPR)
6917 a1 = TREE_OPERAND (ineq, 1);
6918 y = TREE_OPERAND (ineq, 0);
6920 else if (TREE_CODE (ineq) == GT_EXPR)
6922 a1 = TREE_OPERAND (ineq, 0);
6923 y = TREE_OPERAND (ineq, 1);
6925 else
6926 return NULL_TREE;
6928 if (TREE_TYPE (a1) != typea)
6929 return NULL_TREE;
6931 if (POINTER_TYPE_P (typea))
6933 /* Convert the pointer types into integer before taking the difference. */
6934 tree ta = fold_convert_loc (loc, ssizetype, a);
6935 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6936 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6938 else
6939 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6941 if (!diff || !integer_onep (diff))
6942 return NULL_TREE;
6944 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6947 /* Fold a sum or difference of at least one multiplication.
6948 Returns the folded tree or NULL if no simplification could be made. */
6950 static tree
6951 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6952 tree arg0, tree arg1)
6954 tree arg00, arg01, arg10, arg11;
6955 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6957 /* (A * C) +- (B * C) -> (A+-B) * C.
6958 (A * C) +- A -> A * (C+-1).
6959 We are most concerned about the case where C is a constant,
6960 but other combinations show up during loop reduction. Since
6961 it is not difficult, try all four possibilities. */
6963 if (TREE_CODE (arg0) == MULT_EXPR)
6965 arg00 = TREE_OPERAND (arg0, 0);
6966 arg01 = TREE_OPERAND (arg0, 1);
6968 else if (TREE_CODE (arg0) == INTEGER_CST)
6970 arg00 = build_one_cst (type);
6971 arg01 = arg0;
6973 else
6975 /* We cannot generate constant 1 for fract. */
6976 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6977 return NULL_TREE;
6978 arg00 = arg0;
6979 arg01 = build_one_cst (type);
6981 if (TREE_CODE (arg1) == MULT_EXPR)
6983 arg10 = TREE_OPERAND (arg1, 0);
6984 arg11 = TREE_OPERAND (arg1, 1);
6986 else if (TREE_CODE (arg1) == INTEGER_CST)
6988 arg10 = build_one_cst (type);
6989 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6990 the purpose of this canonicalization. */
6991 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6992 && negate_expr_p (arg1)
6993 && code == PLUS_EXPR)
6995 arg11 = negate_expr (arg1);
6996 code = MINUS_EXPR;
6998 else
6999 arg11 = arg1;
7001 else
7003 /* We cannot generate constant 1 for fract. */
7004 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7005 return NULL_TREE;
7006 arg10 = arg1;
7007 arg11 = build_one_cst (type);
7009 same = NULL_TREE;
7011 if (operand_equal_p (arg01, arg11, 0))
7012 same = arg01, alt0 = arg00, alt1 = arg10;
7013 else if (operand_equal_p (arg00, arg10, 0))
7014 same = arg00, alt0 = arg01, alt1 = arg11;
7015 else if (operand_equal_p (arg00, arg11, 0))
7016 same = arg00, alt0 = arg01, alt1 = arg10;
7017 else if (operand_equal_p (arg01, arg10, 0))
7018 same = arg01, alt0 = arg00, alt1 = arg11;
7020 /* No identical multiplicands; see if we can find a common
7021 power-of-two factor in non-power-of-two multiplies. This
7022 can help in multi-dimensional array access. */
7023 else if (tree_fits_shwi_p (arg01)
7024 && tree_fits_shwi_p (arg11))
7026 HOST_WIDE_INT int01, int11, tmp;
7027 bool swap = false;
7028 tree maybe_same;
7029 int01 = tree_to_shwi (arg01);
7030 int11 = tree_to_shwi (arg11);
7032 /* Move min of absolute values to int11. */
7033 if (absu_hwi (int01) < absu_hwi (int11))
7035 tmp = int01, int01 = int11, int11 = tmp;
7036 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7037 maybe_same = arg01;
7038 swap = true;
7040 else
7041 maybe_same = arg11;
7043 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7044 /* The remainder should not be a constant, otherwise we
7045 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7046 increased the number of multiplications necessary. */
7047 && TREE_CODE (arg10) != INTEGER_CST)
7049 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7050 build_int_cst (TREE_TYPE (arg00),
7051 int01 / int11));
7052 alt1 = arg10;
7053 same = maybe_same;
7054 if (swap)
7055 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7059 if (same)
7060 return fold_build2_loc (loc, MULT_EXPR, type,
7061 fold_build2_loc (loc, code, type,
7062 fold_convert_loc (loc, type, alt0),
7063 fold_convert_loc (loc, type, alt1)),
7064 fold_convert_loc (loc, type, same));
7066 return NULL_TREE;
7069 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7070 specified by EXPR into the buffer PTR of length LEN bytes.
7071 Return the number of bytes placed in the buffer, or zero
7072 upon failure. */
7074 static int
7075 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7077 tree type = TREE_TYPE (expr);
7078 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7079 int byte, offset, word, words;
7080 unsigned char value;
7082 if ((off == -1 && total_bytes > len)
7083 || off >= total_bytes)
7084 return 0;
7085 if (off == -1)
7086 off = 0;
7087 words = total_bytes / UNITS_PER_WORD;
7089 for (byte = 0; byte < total_bytes; byte++)
7091 int bitpos = byte * BITS_PER_UNIT;
7092 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7093 number of bytes. */
7094 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7096 if (total_bytes > UNITS_PER_WORD)
7098 word = byte / UNITS_PER_WORD;
7099 if (WORDS_BIG_ENDIAN)
7100 word = (words - 1) - word;
7101 offset = word * UNITS_PER_WORD;
7102 if (BYTES_BIG_ENDIAN)
7103 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7104 else
7105 offset += byte % UNITS_PER_WORD;
7107 else
7108 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7109 if (offset >= off
7110 && offset - off < len)
7111 ptr[offset - off] = value;
7113 return MIN (len, total_bytes - off);
7117 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7118 specified by EXPR into the buffer PTR of length LEN bytes.
7119 Return the number of bytes placed in the buffer, or zero
7120 upon failure. */
7122 static int
7123 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7125 tree type = TREE_TYPE (expr);
7126 machine_mode mode = TYPE_MODE (type);
7127 int total_bytes = GET_MODE_SIZE (mode);
7128 FIXED_VALUE_TYPE value;
7129 tree i_value, i_type;
7131 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7132 return 0;
7134 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7136 if (NULL_TREE == i_type
7137 || TYPE_PRECISION (i_type) != total_bytes)
7138 return 0;
7140 value = TREE_FIXED_CST (expr);
7141 i_value = double_int_to_tree (i_type, value.data);
7143 return native_encode_int (i_value, ptr, len, off);
7147 /* Subroutine of native_encode_expr. Encode the REAL_CST
7148 specified by EXPR into the buffer PTR of length LEN bytes.
7149 Return the number of bytes placed in the buffer, or zero
7150 upon failure. */
7152 static int
7153 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7155 tree type = TREE_TYPE (expr);
7156 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7157 int byte, offset, word, words, bitpos;
7158 unsigned char value;
7160 /* There are always 32 bits in each long, no matter the size of
7161 the hosts long. We handle floating point representations with
7162 up to 192 bits. */
7163 long tmp[6];
7165 if ((off == -1 && total_bytes > len)
7166 || off >= total_bytes)
7167 return 0;
7168 if (off == -1)
7169 off = 0;
7170 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7172 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7174 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7175 bitpos += BITS_PER_UNIT)
7177 byte = (bitpos / BITS_PER_UNIT) & 3;
7178 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7180 if (UNITS_PER_WORD < 4)
7182 word = byte / UNITS_PER_WORD;
7183 if (WORDS_BIG_ENDIAN)
7184 word = (words - 1) - word;
7185 offset = word * UNITS_PER_WORD;
7186 if (BYTES_BIG_ENDIAN)
7187 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7188 else
7189 offset += byte % UNITS_PER_WORD;
7191 else
7192 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7193 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7194 if (offset >= off
7195 && offset - off < len)
7196 ptr[offset - off] = value;
7198 return MIN (len, total_bytes - off);
7201 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7202 specified by EXPR into the buffer PTR of length LEN bytes.
7203 Return the number of bytes placed in the buffer, or zero
7204 upon failure. */
7206 static int
7207 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7209 int rsize, isize;
7210 tree part;
7212 part = TREE_REALPART (expr);
7213 rsize = native_encode_expr (part, ptr, len, off);
7214 if (off == -1
7215 && rsize == 0)
7216 return 0;
7217 part = TREE_IMAGPART (expr);
7218 if (off != -1)
7219 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7220 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7221 if (off == -1
7222 && isize != rsize)
7223 return 0;
7224 return rsize + isize;
7228 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7229 specified by EXPR into the buffer PTR of length LEN bytes.
7230 Return the number of bytes placed in the buffer, or zero
7231 upon failure. */
7233 static int
7234 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7236 unsigned i, count;
7237 int size, offset;
7238 tree itype, elem;
7240 offset = 0;
7241 count = VECTOR_CST_NELTS (expr);
7242 itype = TREE_TYPE (TREE_TYPE (expr));
7243 size = GET_MODE_SIZE (TYPE_MODE (itype));
7244 for (i = 0; i < count; i++)
7246 if (off >= size)
7248 off -= size;
7249 continue;
7251 elem = VECTOR_CST_ELT (expr, i);
7252 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7253 if ((off == -1 && res != size)
7254 || res == 0)
7255 return 0;
7256 offset += res;
7257 if (offset >= len)
7258 return offset;
7259 if (off != -1)
7260 off = 0;
7262 return offset;
7266 /* Subroutine of native_encode_expr. Encode the STRING_CST
7267 specified by EXPR into the buffer PTR of length LEN bytes.
7268 Return the number of bytes placed in the buffer, or zero
7269 upon failure. */
7271 static int
7272 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7274 tree type = TREE_TYPE (expr);
7275 HOST_WIDE_INT total_bytes;
7277 if (TREE_CODE (type) != ARRAY_TYPE
7278 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7279 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7280 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7281 return 0;
7282 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7283 if ((off == -1 && total_bytes > len)
7284 || off >= total_bytes)
7285 return 0;
7286 if (off == -1)
7287 off = 0;
7288 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7290 int written = 0;
7291 if (off < TREE_STRING_LENGTH (expr))
7293 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7294 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7296 memset (ptr + written, 0,
7297 MIN (total_bytes - written, len - written));
7299 else
7300 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7301 return MIN (total_bytes - off, len);
7305 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7306 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7307 buffer PTR of length LEN bytes. If OFF is not -1 then start
7308 the encoding at byte offset OFF and encode at most LEN bytes.
7309 Return the number of bytes placed in the buffer, or zero upon failure. */
7312 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7314 switch (TREE_CODE (expr))
7316 case INTEGER_CST:
7317 return native_encode_int (expr, ptr, len, off);
7319 case REAL_CST:
7320 return native_encode_real (expr, ptr, len, off);
7322 case FIXED_CST:
7323 return native_encode_fixed (expr, ptr, len, off);
7325 case COMPLEX_CST:
7326 return native_encode_complex (expr, ptr, len, off);
7328 case VECTOR_CST:
7329 return native_encode_vector (expr, ptr, len, off);
7331 case STRING_CST:
7332 return native_encode_string (expr, ptr, len, off);
7334 default:
7335 return 0;
7340 /* Subroutine of native_interpret_expr. Interpret the contents of
7341 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7342 If the buffer cannot be interpreted, return NULL_TREE. */
7344 static tree
7345 native_interpret_int (tree type, const unsigned char *ptr, int len)
7347 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7349 if (total_bytes > len
7350 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7351 return NULL_TREE;
7353 wide_int result = wi::from_buffer (ptr, total_bytes);
7355 return wide_int_to_tree (type, result);
7359 /* Subroutine of native_interpret_expr. Interpret the contents of
7360 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7361 If the buffer cannot be interpreted, return NULL_TREE. */
7363 static tree
7364 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7366 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7367 double_int result;
7368 FIXED_VALUE_TYPE fixed_value;
7370 if (total_bytes > len
7371 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7372 return NULL_TREE;
7374 result = double_int::from_buffer (ptr, total_bytes);
7375 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7377 return build_fixed (type, fixed_value);
7381 /* Subroutine of native_interpret_expr. Interpret the contents of
7382 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7383 If the buffer cannot be interpreted, return NULL_TREE. */
7385 static tree
7386 native_interpret_real (tree type, const unsigned char *ptr, int len)
7388 machine_mode mode = TYPE_MODE (type);
7389 int total_bytes = GET_MODE_SIZE (mode);
7390 int byte, offset, word, words, bitpos;
7391 unsigned char value;
7392 /* There are always 32 bits in each long, no matter the size of
7393 the hosts long. We handle floating point representations with
7394 up to 192 bits. */
7395 REAL_VALUE_TYPE r;
7396 long tmp[6];
7398 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7399 if (total_bytes > len || total_bytes > 24)
7400 return NULL_TREE;
7401 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7403 memset (tmp, 0, sizeof (tmp));
7404 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7405 bitpos += BITS_PER_UNIT)
7407 byte = (bitpos / BITS_PER_UNIT) & 3;
7408 if (UNITS_PER_WORD < 4)
7410 word = byte / UNITS_PER_WORD;
7411 if (WORDS_BIG_ENDIAN)
7412 word = (words - 1) - word;
7413 offset = word * UNITS_PER_WORD;
7414 if (BYTES_BIG_ENDIAN)
7415 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7416 else
7417 offset += byte % UNITS_PER_WORD;
7419 else
7420 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7421 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7423 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7426 real_from_target (&r, tmp, mode);
7427 return build_real (type, r);
7431 /* Subroutine of native_interpret_expr. Interpret the contents of
7432 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7433 If the buffer cannot be interpreted, return NULL_TREE. */
7435 static tree
7436 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7438 tree etype, rpart, ipart;
7439 int size;
7441 etype = TREE_TYPE (type);
7442 size = GET_MODE_SIZE (TYPE_MODE (etype));
7443 if (size * 2 > len)
7444 return NULL_TREE;
7445 rpart = native_interpret_expr (etype, ptr, size);
7446 if (!rpart)
7447 return NULL_TREE;
7448 ipart = native_interpret_expr (etype, ptr+size, size);
7449 if (!ipart)
7450 return NULL_TREE;
7451 return build_complex (type, rpart, ipart);
7455 /* Subroutine of native_interpret_expr. Interpret the contents of
7456 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7457 If the buffer cannot be interpreted, return NULL_TREE. */
7459 static tree
7460 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7462 tree etype, elem;
7463 int i, size, count;
7464 tree *elements;
7466 etype = TREE_TYPE (type);
7467 size = GET_MODE_SIZE (TYPE_MODE (etype));
7468 count = TYPE_VECTOR_SUBPARTS (type);
7469 if (size * count > len)
7470 return NULL_TREE;
7472 elements = XALLOCAVEC (tree, count);
7473 for (i = count - 1; i >= 0; i--)
7475 elem = native_interpret_expr (etype, ptr+(i*size), size);
7476 if (!elem)
7477 return NULL_TREE;
7478 elements[i] = elem;
7480 return build_vector (type, elements);
7484 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7485 the buffer PTR of length LEN as a constant of type TYPE. For
7486 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7487 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7488 return NULL_TREE. */
7490 tree
7491 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7493 switch (TREE_CODE (type))
7495 case INTEGER_TYPE:
7496 case ENUMERAL_TYPE:
7497 case BOOLEAN_TYPE:
7498 case POINTER_TYPE:
7499 case REFERENCE_TYPE:
7500 return native_interpret_int (type, ptr, len);
7502 case REAL_TYPE:
7503 return native_interpret_real (type, ptr, len);
7505 case FIXED_POINT_TYPE:
7506 return native_interpret_fixed (type, ptr, len);
7508 case COMPLEX_TYPE:
7509 return native_interpret_complex (type, ptr, len);
7511 case VECTOR_TYPE:
7512 return native_interpret_vector (type, ptr, len);
7514 default:
7515 return NULL_TREE;
7519 /* Returns true if we can interpret the contents of a native encoding
7520 as TYPE. */
7522 static bool
7523 can_native_interpret_type_p (tree type)
7525 switch (TREE_CODE (type))
7527 case INTEGER_TYPE:
7528 case ENUMERAL_TYPE:
7529 case BOOLEAN_TYPE:
7530 case POINTER_TYPE:
7531 case REFERENCE_TYPE:
7532 case FIXED_POINT_TYPE:
7533 case REAL_TYPE:
7534 case COMPLEX_TYPE:
7535 case VECTOR_TYPE:
7536 return true;
7537 default:
7538 return false;
7542 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7543 TYPE at compile-time. If we're unable to perform the conversion
7544 return NULL_TREE. */
7546 static tree
7547 fold_view_convert_expr (tree type, tree expr)
7549 /* We support up to 512-bit values (for V8DFmode). */
7550 unsigned char buffer[64];
7551 int len;
7553 /* Check that the host and target are sane. */
7554 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7555 return NULL_TREE;
7557 len = native_encode_expr (expr, buffer, sizeof (buffer));
7558 if (len == 0)
7559 return NULL_TREE;
7561 return native_interpret_expr (type, buffer, len);
7564 /* Build an expression for the address of T. Folds away INDIRECT_REF
7565 to avoid confusing the gimplify process. */
7567 tree
7568 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7570 /* The size of the object is not relevant when talking about its address. */
7571 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7572 t = TREE_OPERAND (t, 0);
7574 if (TREE_CODE (t) == INDIRECT_REF)
7576 t = TREE_OPERAND (t, 0);
7578 if (TREE_TYPE (t) != ptrtype)
7579 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7581 else if (TREE_CODE (t) == MEM_REF
7582 && integer_zerop (TREE_OPERAND (t, 1)))
7583 return TREE_OPERAND (t, 0);
7584 else if (TREE_CODE (t) == MEM_REF
7585 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7586 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7587 TREE_OPERAND (t, 0),
7588 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7589 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7591 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7593 if (TREE_TYPE (t) != ptrtype)
7594 t = fold_convert_loc (loc, ptrtype, t);
7596 else
7597 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7599 return t;
7602 /* Build an expression for the address of T. */
7604 tree
7605 build_fold_addr_expr_loc (location_t loc, tree t)
7607 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7609 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7612 /* Fold a unary expression of code CODE and type TYPE with operand
7613 OP0. Return the folded expression if folding is successful.
7614 Otherwise, return NULL_TREE. */
7616 tree
7617 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7619 tree tem;
7620 tree arg0;
7621 enum tree_code_class kind = TREE_CODE_CLASS (code);
7623 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7624 && TREE_CODE_LENGTH (code) == 1);
7626 arg0 = op0;
7627 if (arg0)
7629 if (CONVERT_EXPR_CODE_P (code)
7630 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7632 /* Don't use STRIP_NOPS, because signedness of argument type
7633 matters. */
7634 STRIP_SIGN_NOPS (arg0);
7636 else
7638 /* Strip any conversions that don't change the mode. This
7639 is safe for every expression, except for a comparison
7640 expression because its signedness is derived from its
7641 operands.
7643 Note that this is done as an internal manipulation within
7644 the constant folder, in order to find the simplest
7645 representation of the arguments so that their form can be
7646 studied. In any cases, the appropriate type conversions
7647 should be put back in the tree that will get out of the
7648 constant folder. */
7649 STRIP_NOPS (arg0);
7652 if (CONSTANT_CLASS_P (arg0))
7654 tree tem = const_unop (code, type, arg0);
7655 if (tem)
7657 if (TREE_TYPE (tem) != type)
7658 tem = fold_convert_loc (loc, type, tem);
7659 return tem;
7664 tem = generic_simplify (loc, code, type, op0);
7665 if (tem)
7666 return tem;
7668 if (TREE_CODE_CLASS (code) == tcc_unary)
7670 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7671 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7672 fold_build1_loc (loc, code, type,
7673 fold_convert_loc (loc, TREE_TYPE (op0),
7674 TREE_OPERAND (arg0, 1))));
7675 else if (TREE_CODE (arg0) == COND_EXPR)
7677 tree arg01 = TREE_OPERAND (arg0, 1);
7678 tree arg02 = TREE_OPERAND (arg0, 2);
7679 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7680 arg01 = fold_build1_loc (loc, code, type,
7681 fold_convert_loc (loc,
7682 TREE_TYPE (op0), arg01));
7683 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7684 arg02 = fold_build1_loc (loc, code, type,
7685 fold_convert_loc (loc,
7686 TREE_TYPE (op0), arg02));
7687 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7688 arg01, arg02);
7690 /* If this was a conversion, and all we did was to move into
7691 inside the COND_EXPR, bring it back out. But leave it if
7692 it is a conversion from integer to integer and the
7693 result precision is no wider than a word since such a
7694 conversion is cheap and may be optimized away by combine,
7695 while it couldn't if it were outside the COND_EXPR. Then return
7696 so we don't get into an infinite recursion loop taking the
7697 conversion out and then back in. */
7699 if ((CONVERT_EXPR_CODE_P (code)
7700 || code == NON_LVALUE_EXPR)
7701 && TREE_CODE (tem) == COND_EXPR
7702 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7703 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7704 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7705 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7706 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7707 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7708 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7709 && (INTEGRAL_TYPE_P
7710 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7711 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7712 || flag_syntax_only))
7713 tem = build1_loc (loc, code, type,
7714 build3 (COND_EXPR,
7715 TREE_TYPE (TREE_OPERAND
7716 (TREE_OPERAND (tem, 1), 0)),
7717 TREE_OPERAND (tem, 0),
7718 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7719 TREE_OPERAND (TREE_OPERAND (tem, 2),
7720 0)));
7721 return tem;
7725 switch (code)
7727 case NON_LVALUE_EXPR:
7728 if (!maybe_lvalue_p (op0))
7729 return fold_convert_loc (loc, type, op0);
7730 return NULL_TREE;
7732 CASE_CONVERT:
7733 case FLOAT_EXPR:
7734 case FIX_TRUNC_EXPR:
7735 if (COMPARISON_CLASS_P (op0))
7737 /* If we have (type) (a CMP b) and type is an integral type, return
7738 new expression involving the new type. Canonicalize
7739 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7740 non-integral type.
7741 Do not fold the result as that would not simplify further, also
7742 folding again results in recursions. */
7743 if (TREE_CODE (type) == BOOLEAN_TYPE)
7744 return build2_loc (loc, TREE_CODE (op0), type,
7745 TREE_OPERAND (op0, 0),
7746 TREE_OPERAND (op0, 1));
7747 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7748 && TREE_CODE (type) != VECTOR_TYPE)
7749 return build3_loc (loc, COND_EXPR, type, op0,
7750 constant_boolean_node (true, type),
7751 constant_boolean_node (false, type));
7754 /* Handle (T *)&A.B.C for A being of type T and B and C
7755 living at offset zero. This occurs frequently in
7756 C++ upcasting and then accessing the base. */
7757 if (TREE_CODE (op0) == ADDR_EXPR
7758 && POINTER_TYPE_P (type)
7759 && handled_component_p (TREE_OPERAND (op0, 0)))
7761 HOST_WIDE_INT bitsize, bitpos;
7762 tree offset;
7763 machine_mode mode;
7764 int unsignedp, volatilep;
7765 tree base = TREE_OPERAND (op0, 0);
7766 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7767 &mode, &unsignedp, &volatilep, false);
7768 /* If the reference was to a (constant) zero offset, we can use
7769 the address of the base if it has the same base type
7770 as the result type and the pointer type is unqualified. */
7771 if (! offset && bitpos == 0
7772 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7773 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7774 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7775 return fold_convert_loc (loc, type,
7776 build_fold_addr_expr_loc (loc, base));
7779 if (TREE_CODE (op0) == MODIFY_EXPR
7780 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7781 /* Detect assigning a bitfield. */
7782 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7783 && DECL_BIT_FIELD
7784 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7786 /* Don't leave an assignment inside a conversion
7787 unless assigning a bitfield. */
7788 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7789 /* First do the assignment, then return converted constant. */
7790 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7791 TREE_NO_WARNING (tem) = 1;
7792 TREE_USED (tem) = 1;
7793 return tem;
7796 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7797 constants (if x has signed type, the sign bit cannot be set
7798 in c). This folds extension into the BIT_AND_EXPR.
7799 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7800 very likely don't have maximal range for their precision and this
7801 transformation effectively doesn't preserve non-maximal ranges. */
7802 if (TREE_CODE (type) == INTEGER_TYPE
7803 && TREE_CODE (op0) == BIT_AND_EXPR
7804 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7806 tree and_expr = op0;
7807 tree and0 = TREE_OPERAND (and_expr, 0);
7808 tree and1 = TREE_OPERAND (and_expr, 1);
7809 int change = 0;
7811 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7812 || (TYPE_PRECISION (type)
7813 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7814 change = 1;
7815 else if (TYPE_PRECISION (TREE_TYPE (and1))
7816 <= HOST_BITS_PER_WIDE_INT
7817 && tree_fits_uhwi_p (and1))
7819 unsigned HOST_WIDE_INT cst;
7821 cst = tree_to_uhwi (and1);
7822 cst &= HOST_WIDE_INT_M1U
7823 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7824 change = (cst == 0);
7825 #ifdef LOAD_EXTEND_OP
7826 if (change
7827 && !flag_syntax_only
7828 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7829 == ZERO_EXTEND))
7831 tree uns = unsigned_type_for (TREE_TYPE (and0));
7832 and0 = fold_convert_loc (loc, uns, and0);
7833 and1 = fold_convert_loc (loc, uns, and1);
7835 #endif
7837 if (change)
7839 tem = force_fit_type (type, wi::to_widest (and1), 0,
7840 TREE_OVERFLOW (and1));
7841 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7842 fold_convert_loc (loc, type, and0), tem);
7846 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7847 when one of the new casts will fold away. Conservatively we assume
7848 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7849 if (POINTER_TYPE_P (type)
7850 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7851 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7852 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7853 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7854 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7856 tree arg00 = TREE_OPERAND (arg0, 0);
7857 tree arg01 = TREE_OPERAND (arg0, 1);
7859 return fold_build_pointer_plus_loc
7860 (loc, fold_convert_loc (loc, type, arg00), arg01);
7863 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7864 of the same precision, and X is an integer type not narrower than
7865 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7866 if (INTEGRAL_TYPE_P (type)
7867 && TREE_CODE (op0) == BIT_NOT_EXPR
7868 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7869 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7870 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7872 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7873 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7874 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7875 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7876 fold_convert_loc (loc, type, tem));
7879 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7880 type of X and Y (integer types only). */
7881 if (INTEGRAL_TYPE_P (type)
7882 && TREE_CODE (op0) == MULT_EXPR
7883 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7884 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7886 /* Be careful not to introduce new overflows. */
7887 tree mult_type;
7888 if (TYPE_OVERFLOW_WRAPS (type))
7889 mult_type = type;
7890 else
7891 mult_type = unsigned_type_for (type);
7893 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7895 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7896 fold_convert_loc (loc, mult_type,
7897 TREE_OPERAND (op0, 0)),
7898 fold_convert_loc (loc, mult_type,
7899 TREE_OPERAND (op0, 1)));
7900 return fold_convert_loc (loc, type, tem);
7904 return NULL_TREE;
7906 case VIEW_CONVERT_EXPR:
7907 if (TREE_CODE (op0) == MEM_REF)
7908 return fold_build2_loc (loc, MEM_REF, type,
7909 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7911 return NULL_TREE;
7913 case NEGATE_EXPR:
7914 tem = fold_negate_expr (loc, arg0);
7915 if (tem)
7916 return fold_convert_loc (loc, type, tem);
7917 return NULL_TREE;
7919 case ABS_EXPR:
7920 /* Convert fabs((double)float) into (double)fabsf(float). */
7921 if (TREE_CODE (arg0) == NOP_EXPR
7922 && TREE_CODE (type) == REAL_TYPE)
7924 tree targ0 = strip_float_extensions (arg0);
7925 if (targ0 != arg0)
7926 return fold_convert_loc (loc, type,
7927 fold_build1_loc (loc, ABS_EXPR,
7928 TREE_TYPE (targ0),
7929 targ0));
7932 /* Strip sign ops from argument. */
7933 if (TREE_CODE (type) == REAL_TYPE)
7935 tem = fold_strip_sign_ops (arg0);
7936 if (tem)
7937 return fold_build1_loc (loc, ABS_EXPR, type,
7938 fold_convert_loc (loc, type, tem));
7940 return NULL_TREE;
7942 case CONJ_EXPR:
7943 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7944 return fold_convert_loc (loc, type, arg0);
7945 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7947 tree itype = TREE_TYPE (type);
7948 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
7949 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
7950 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
7951 negate_expr (ipart));
7953 if (TREE_CODE (arg0) == CONJ_EXPR)
7954 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
7955 return NULL_TREE;
7957 case BIT_NOT_EXPR:
7958 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7959 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7960 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7961 fold_convert_loc (loc, type,
7962 TREE_OPERAND (arg0, 0)))))
7963 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7964 fold_convert_loc (loc, type,
7965 TREE_OPERAND (arg0, 1)));
7966 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7967 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7968 fold_convert_loc (loc, type,
7969 TREE_OPERAND (arg0, 1)))))
7970 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7971 fold_convert_loc (loc, type,
7972 TREE_OPERAND (arg0, 0)), tem);
7974 return NULL_TREE;
7976 case TRUTH_NOT_EXPR:
7977 /* Note that the operand of this must be an int
7978 and its values must be 0 or 1.
7979 ("true" is a fixed value perhaps depending on the language,
7980 but we don't handle values other than 1 correctly yet.) */
7981 tem = fold_truth_not_expr (loc, arg0);
7982 if (!tem)
7983 return NULL_TREE;
7984 return fold_convert_loc (loc, type, tem);
7986 case REALPART_EXPR:
7987 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7988 return fold_convert_loc (loc, type, arg0);
7989 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7991 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7992 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
7993 fold_build1_loc (loc, REALPART_EXPR, itype,
7994 TREE_OPERAND (arg0, 0)),
7995 fold_build1_loc (loc, REALPART_EXPR, itype,
7996 TREE_OPERAND (arg0, 1)));
7997 return fold_convert_loc (loc, type, tem);
7999 if (TREE_CODE (arg0) == CONJ_EXPR)
8001 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8002 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8003 TREE_OPERAND (arg0, 0));
8004 return fold_convert_loc (loc, type, tem);
8006 if (TREE_CODE (arg0) == CALL_EXPR)
8008 tree fn = get_callee_fndecl (arg0);
8009 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8010 switch (DECL_FUNCTION_CODE (fn))
8012 CASE_FLT_FN (BUILT_IN_CEXPI):
8013 fn = mathfn_built_in (type, BUILT_IN_COS);
8014 if (fn)
8015 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8016 break;
8018 default:
8019 break;
8022 return NULL_TREE;
8024 case IMAGPART_EXPR:
8025 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8026 return build_zero_cst (type);
8027 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8029 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8030 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8031 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8032 TREE_OPERAND (arg0, 0)),
8033 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8034 TREE_OPERAND (arg0, 1)));
8035 return fold_convert_loc (loc, type, tem);
8037 if (TREE_CODE (arg0) == CONJ_EXPR)
8039 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8040 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8041 return fold_convert_loc (loc, type, negate_expr (tem));
8043 if (TREE_CODE (arg0) == CALL_EXPR)
8045 tree fn = get_callee_fndecl (arg0);
8046 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8047 switch (DECL_FUNCTION_CODE (fn))
8049 CASE_FLT_FN (BUILT_IN_CEXPI):
8050 fn = mathfn_built_in (type, BUILT_IN_SIN);
8051 if (fn)
8052 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8053 break;
8055 default:
8056 break;
8059 return NULL_TREE;
8061 case INDIRECT_REF:
8062 /* Fold *&X to X if X is an lvalue. */
8063 if (TREE_CODE (op0) == ADDR_EXPR)
8065 tree op00 = TREE_OPERAND (op0, 0);
8066 if ((TREE_CODE (op00) == VAR_DECL
8067 || TREE_CODE (op00) == PARM_DECL
8068 || TREE_CODE (op00) == RESULT_DECL)
8069 && !TREE_READONLY (op00))
8070 return op00;
8072 return NULL_TREE;
8074 default:
8075 return NULL_TREE;
8076 } /* switch (code) */
8080 /* If the operation was a conversion do _not_ mark a resulting constant
8081 with TREE_OVERFLOW if the original constant was not. These conversions
8082 have implementation defined behavior and retaining the TREE_OVERFLOW
8083 flag here would confuse later passes such as VRP. */
8084 tree
8085 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8086 tree type, tree op0)
8088 tree res = fold_unary_loc (loc, code, type, op0);
8089 if (res
8090 && TREE_CODE (res) == INTEGER_CST
8091 && TREE_CODE (op0) == INTEGER_CST
8092 && CONVERT_EXPR_CODE_P (code))
8093 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8095 return res;
8098 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8099 operands OP0 and OP1. LOC is the location of the resulting expression.
8100 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8101 Return the folded expression if folding is successful. Otherwise,
8102 return NULL_TREE. */
8103 static tree
8104 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8105 tree arg0, tree arg1, tree op0, tree op1)
8107 tree tem;
8109 /* We only do these simplifications if we are optimizing. */
8110 if (!optimize)
8111 return NULL_TREE;
8113 /* Check for things like (A || B) && (A || C). We can convert this
8114 to A || (B && C). Note that either operator can be any of the four
8115 truth and/or operations and the transformation will still be
8116 valid. Also note that we only care about order for the
8117 ANDIF and ORIF operators. If B contains side effects, this
8118 might change the truth-value of A. */
8119 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8120 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8121 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8122 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8123 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8124 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8126 tree a00 = TREE_OPERAND (arg0, 0);
8127 tree a01 = TREE_OPERAND (arg0, 1);
8128 tree a10 = TREE_OPERAND (arg1, 0);
8129 tree a11 = TREE_OPERAND (arg1, 1);
8130 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8131 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8132 && (code == TRUTH_AND_EXPR
8133 || code == TRUTH_OR_EXPR));
8135 if (operand_equal_p (a00, a10, 0))
8136 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8137 fold_build2_loc (loc, code, type, a01, a11));
8138 else if (commutative && operand_equal_p (a00, a11, 0))
8139 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8140 fold_build2_loc (loc, code, type, a01, a10));
8141 else if (commutative && operand_equal_p (a01, a10, 0))
8142 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8143 fold_build2_loc (loc, code, type, a00, a11));
8145 /* This case if tricky because we must either have commutative
8146 operators or else A10 must not have side-effects. */
8148 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8149 && operand_equal_p (a01, a11, 0))
8150 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8151 fold_build2_loc (loc, code, type, a00, a10),
8152 a01);
8155 /* See if we can build a range comparison. */
8156 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8157 return tem;
8159 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8160 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8162 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8163 if (tem)
8164 return fold_build2_loc (loc, code, type, tem, arg1);
8167 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8168 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8170 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8171 if (tem)
8172 return fold_build2_loc (loc, code, type, arg0, tem);
8175 /* Check for the possibility of merging component references. If our
8176 lhs is another similar operation, try to merge its rhs with our
8177 rhs. Then try to merge our lhs and rhs. */
8178 if (TREE_CODE (arg0) == code
8179 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8180 TREE_OPERAND (arg0, 1), arg1)))
8181 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8183 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8184 return tem;
8186 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8187 && (code == TRUTH_AND_EXPR
8188 || code == TRUTH_ANDIF_EXPR
8189 || code == TRUTH_OR_EXPR
8190 || code == TRUTH_ORIF_EXPR))
8192 enum tree_code ncode, icode;
8194 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8195 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8196 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8198 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8199 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8200 We don't want to pack more than two leafs to a non-IF AND/OR
8201 expression.
8202 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8203 equal to IF-CODE, then we don't want to add right-hand operand.
8204 If the inner right-hand side of left-hand operand has
8205 side-effects, or isn't simple, then we can't add to it,
8206 as otherwise we might destroy if-sequence. */
8207 if (TREE_CODE (arg0) == icode
8208 && simple_operand_p_2 (arg1)
8209 /* Needed for sequence points to handle trappings, and
8210 side-effects. */
8211 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8213 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8214 arg1);
8215 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8216 tem);
8218 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8219 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8220 else if (TREE_CODE (arg1) == icode
8221 && simple_operand_p_2 (arg0)
8222 /* Needed for sequence points to handle trappings, and
8223 side-effects. */
8224 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8226 tem = fold_build2_loc (loc, ncode, type,
8227 arg0, TREE_OPERAND (arg1, 0));
8228 return fold_build2_loc (loc, icode, type, tem,
8229 TREE_OPERAND (arg1, 1));
8231 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8232 into (A OR B).
8233 For sequence point consistancy, we need to check for trapping,
8234 and side-effects. */
8235 else if (code == icode && simple_operand_p_2 (arg0)
8236 && simple_operand_p_2 (arg1))
8237 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8240 return NULL_TREE;
8243 /* Fold a binary expression of code CODE and type TYPE with operands
8244 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8245 Return the folded expression if folding is successful. Otherwise,
8246 return NULL_TREE. */
8248 static tree
8249 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8251 enum tree_code compl_code;
8253 if (code == MIN_EXPR)
8254 compl_code = MAX_EXPR;
8255 else if (code == MAX_EXPR)
8256 compl_code = MIN_EXPR;
8257 else
8258 gcc_unreachable ();
8260 /* MIN (MAX (a, b), b) == b. */
8261 if (TREE_CODE (op0) == compl_code
8262 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8263 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8265 /* MIN (MAX (b, a), b) == b. */
8266 if (TREE_CODE (op0) == compl_code
8267 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8268 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8269 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8271 /* MIN (a, MAX (a, b)) == a. */
8272 if (TREE_CODE (op1) == compl_code
8273 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8274 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8275 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8277 /* MIN (a, MAX (b, a)) == a. */
8278 if (TREE_CODE (op1) == compl_code
8279 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8280 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8281 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8283 return NULL_TREE;
8286 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8287 by changing CODE to reduce the magnitude of constants involved in
8288 ARG0 of the comparison.
8289 Returns a canonicalized comparison tree if a simplification was
8290 possible, otherwise returns NULL_TREE.
8291 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8292 valid if signed overflow is undefined. */
8294 static tree
8295 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8296 tree arg0, tree arg1,
8297 bool *strict_overflow_p)
8299 enum tree_code code0 = TREE_CODE (arg0);
8300 tree t, cst0 = NULL_TREE;
8301 int sgn0;
8302 bool swap = false;
8304 /* Match A +- CST code arg1 and CST code arg1. We can change the
8305 first form only if overflow is undefined. */
8306 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8307 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8308 /* In principle pointers also have undefined overflow behavior,
8309 but that causes problems elsewhere. */
8310 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8311 && (code0 == MINUS_EXPR
8312 || code0 == PLUS_EXPR)
8313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8314 || code0 == INTEGER_CST))
8315 return NULL_TREE;
8317 /* Identify the constant in arg0 and its sign. */
8318 if (code0 == INTEGER_CST)
8319 cst0 = arg0;
8320 else
8321 cst0 = TREE_OPERAND (arg0, 1);
8322 sgn0 = tree_int_cst_sgn (cst0);
8324 /* Overflowed constants and zero will cause problems. */
8325 if (integer_zerop (cst0)
8326 || TREE_OVERFLOW (cst0))
8327 return NULL_TREE;
8329 /* See if we can reduce the magnitude of the constant in
8330 arg0 by changing the comparison code. */
8331 if (code0 == INTEGER_CST)
8333 /* CST <= arg1 -> CST-1 < arg1. */
8334 if (code == LE_EXPR && sgn0 == 1)
8335 code = LT_EXPR;
8336 /* -CST < arg1 -> -CST-1 <= arg1. */
8337 else if (code == LT_EXPR && sgn0 == -1)
8338 code = LE_EXPR;
8339 /* CST > arg1 -> CST-1 >= arg1. */
8340 else if (code == GT_EXPR && sgn0 == 1)
8341 code = GE_EXPR;
8342 /* -CST >= arg1 -> -CST-1 > arg1. */
8343 else if (code == GE_EXPR && sgn0 == -1)
8344 code = GT_EXPR;
8345 else
8346 return NULL_TREE;
8347 /* arg1 code' CST' might be more canonical. */
8348 swap = true;
8350 else
8352 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8353 if (code == LT_EXPR
8354 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8355 code = LE_EXPR;
8356 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8357 else if (code == GT_EXPR
8358 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8359 code = GE_EXPR;
8360 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8361 else if (code == LE_EXPR
8362 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8363 code = LT_EXPR;
8364 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8365 else if (code == GE_EXPR
8366 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8367 code = GT_EXPR;
8368 else
8369 return NULL_TREE;
8370 *strict_overflow_p = true;
8373 /* Now build the constant reduced in magnitude. But not if that
8374 would produce one outside of its types range. */
8375 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8376 && ((sgn0 == 1
8377 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8378 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8379 || (sgn0 == -1
8380 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8381 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8382 /* We cannot swap the comparison here as that would cause us to
8383 endlessly recurse. */
8384 return NULL_TREE;
8386 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8387 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8388 if (code0 != INTEGER_CST)
8389 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8390 t = fold_convert (TREE_TYPE (arg1), t);
8392 /* If swapping might yield to a more canonical form, do so. */
8393 if (swap)
8394 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8395 else
8396 return fold_build2_loc (loc, code, type, t, arg1);
8399 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8400 overflow further. Try to decrease the magnitude of constants involved
8401 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8402 and put sole constants at the second argument position.
8403 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8405 static tree
8406 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8407 tree arg0, tree arg1)
8409 tree t;
8410 bool strict_overflow_p;
8411 const char * const warnmsg = G_("assuming signed overflow does not occur "
8412 "when reducing constant in comparison");
8414 /* Try canonicalization by simplifying arg0. */
8415 strict_overflow_p = false;
8416 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8417 &strict_overflow_p);
8418 if (t)
8420 if (strict_overflow_p)
8421 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8422 return t;
8425 /* Try canonicalization by simplifying arg1 using the swapped
8426 comparison. */
8427 code = swap_tree_comparison (code);
8428 strict_overflow_p = false;
8429 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8430 &strict_overflow_p);
8431 if (t && strict_overflow_p)
8432 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8433 return t;
8436 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8437 space. This is used to avoid issuing overflow warnings for
8438 expressions like &p->x which can not wrap. */
8440 static bool
8441 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8443 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8444 return true;
8446 if (bitpos < 0)
8447 return true;
8449 wide_int wi_offset;
8450 int precision = TYPE_PRECISION (TREE_TYPE (base));
8451 if (offset == NULL_TREE)
8452 wi_offset = wi::zero (precision);
8453 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8454 return true;
8455 else
8456 wi_offset = offset;
8458 bool overflow;
8459 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8460 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8461 if (overflow)
8462 return true;
8464 if (!wi::fits_uhwi_p (total))
8465 return true;
8467 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8468 if (size <= 0)
8469 return true;
8471 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8472 array. */
8473 if (TREE_CODE (base) == ADDR_EXPR)
8475 HOST_WIDE_INT base_size;
8477 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8478 if (base_size > 0 && size < base_size)
8479 size = base_size;
8482 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8485 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8486 kind INTEGER_CST. This makes sure to properly sign-extend the
8487 constant. */
8489 static HOST_WIDE_INT
8490 size_low_cst (const_tree t)
8492 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8493 int prec = TYPE_PRECISION (TREE_TYPE (t));
8494 if (prec < HOST_BITS_PER_WIDE_INT)
8495 return sext_hwi (w, prec);
8496 return w;
8499 /* Subroutine of fold_binary. This routine performs all of the
8500 transformations that are common to the equality/inequality
8501 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8502 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8503 fold_binary should call fold_binary. Fold a comparison with
8504 tree code CODE and type TYPE with operands OP0 and OP1. Return
8505 the folded comparison or NULL_TREE. */
8507 static tree
8508 fold_comparison (location_t loc, enum tree_code code, tree type,
8509 tree op0, tree op1)
8511 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8512 tree arg0, arg1, tem;
8514 arg0 = op0;
8515 arg1 = op1;
8517 STRIP_SIGN_NOPS (arg0);
8518 STRIP_SIGN_NOPS (arg1);
8520 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8521 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8522 && (equality_code
8523 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8524 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8525 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8526 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8527 && TREE_CODE (arg1) == INTEGER_CST
8528 && !TREE_OVERFLOW (arg1))
8530 const enum tree_code
8531 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8532 tree const1 = TREE_OPERAND (arg0, 1);
8533 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8534 tree variable = TREE_OPERAND (arg0, 0);
8535 tree new_const = int_const_binop (reverse_op, const2, const1);
8537 /* If the constant operation overflowed this can be
8538 simplified as a comparison against INT_MAX/INT_MIN. */
8539 if (TREE_OVERFLOW (new_const)
8540 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8542 int const1_sgn = tree_int_cst_sgn (const1);
8543 enum tree_code code2 = code;
8545 /* Get the sign of the constant on the lhs if the
8546 operation were VARIABLE + CONST1. */
8547 if (TREE_CODE (arg0) == MINUS_EXPR)
8548 const1_sgn = -const1_sgn;
8550 /* The sign of the constant determines if we overflowed
8551 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8552 Canonicalize to the INT_MIN overflow by swapping the comparison
8553 if necessary. */
8554 if (const1_sgn == -1)
8555 code2 = swap_tree_comparison (code);
8557 /* We now can look at the canonicalized case
8558 VARIABLE + 1 CODE2 INT_MIN
8559 and decide on the result. */
8560 switch (code2)
8562 case EQ_EXPR:
8563 case LT_EXPR:
8564 case LE_EXPR:
8565 return
8566 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8568 case NE_EXPR:
8569 case GE_EXPR:
8570 case GT_EXPR:
8571 return
8572 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8574 default:
8575 gcc_unreachable ();
8578 else
8580 if (!equality_code)
8581 fold_overflow_warning ("assuming signed overflow does not occur "
8582 "when changing X +- C1 cmp C2 to "
8583 "X cmp C2 -+ C1",
8584 WARN_STRICT_OVERFLOW_COMPARISON);
8585 return fold_build2_loc (loc, code, type, variable, new_const);
8589 /* For comparisons of pointers we can decompose it to a compile time
8590 comparison of the base objects and the offsets into the object.
8591 This requires at least one operand being an ADDR_EXPR or a
8592 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8593 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8594 && (TREE_CODE (arg0) == ADDR_EXPR
8595 || TREE_CODE (arg1) == ADDR_EXPR
8596 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8597 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8599 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8600 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8601 machine_mode mode;
8602 int volatilep, unsignedp;
8603 bool indirect_base0 = false, indirect_base1 = false;
8605 /* Get base and offset for the access. Strip ADDR_EXPR for
8606 get_inner_reference, but put it back by stripping INDIRECT_REF
8607 off the base object if possible. indirect_baseN will be true
8608 if baseN is not an address but refers to the object itself. */
8609 base0 = arg0;
8610 if (TREE_CODE (arg0) == ADDR_EXPR)
8612 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8613 &bitsize, &bitpos0, &offset0, &mode,
8614 &unsignedp, &volatilep, false);
8615 if (TREE_CODE (base0) == INDIRECT_REF)
8616 base0 = TREE_OPERAND (base0, 0);
8617 else
8618 indirect_base0 = true;
8620 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8622 base0 = TREE_OPERAND (arg0, 0);
8623 STRIP_SIGN_NOPS (base0);
8624 if (TREE_CODE (base0) == ADDR_EXPR)
8626 base0 = TREE_OPERAND (base0, 0);
8627 indirect_base0 = true;
8629 offset0 = TREE_OPERAND (arg0, 1);
8630 if (tree_fits_shwi_p (offset0))
8632 HOST_WIDE_INT off = size_low_cst (offset0);
8633 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8634 * BITS_PER_UNIT)
8635 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8637 bitpos0 = off * BITS_PER_UNIT;
8638 offset0 = NULL_TREE;
8643 base1 = arg1;
8644 if (TREE_CODE (arg1) == ADDR_EXPR)
8646 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8647 &bitsize, &bitpos1, &offset1, &mode,
8648 &unsignedp, &volatilep, false);
8649 if (TREE_CODE (base1) == INDIRECT_REF)
8650 base1 = TREE_OPERAND (base1, 0);
8651 else
8652 indirect_base1 = true;
8654 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8656 base1 = TREE_OPERAND (arg1, 0);
8657 STRIP_SIGN_NOPS (base1);
8658 if (TREE_CODE (base1) == ADDR_EXPR)
8660 base1 = TREE_OPERAND (base1, 0);
8661 indirect_base1 = true;
8663 offset1 = TREE_OPERAND (arg1, 1);
8664 if (tree_fits_shwi_p (offset1))
8666 HOST_WIDE_INT off = size_low_cst (offset1);
8667 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8668 * BITS_PER_UNIT)
8669 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8671 bitpos1 = off * BITS_PER_UNIT;
8672 offset1 = NULL_TREE;
8677 /* A local variable can never be pointed to by
8678 the default SSA name of an incoming parameter. */
8679 if ((TREE_CODE (arg0) == ADDR_EXPR
8680 && indirect_base0
8681 && TREE_CODE (base0) == VAR_DECL
8682 && auto_var_in_fn_p (base0, current_function_decl)
8683 && !indirect_base1
8684 && TREE_CODE (base1) == SSA_NAME
8685 && SSA_NAME_IS_DEFAULT_DEF (base1)
8686 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8687 || (TREE_CODE (arg1) == ADDR_EXPR
8688 && indirect_base1
8689 && TREE_CODE (base1) == VAR_DECL
8690 && auto_var_in_fn_p (base1, current_function_decl)
8691 && !indirect_base0
8692 && TREE_CODE (base0) == SSA_NAME
8693 && SSA_NAME_IS_DEFAULT_DEF (base0)
8694 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8696 if (code == NE_EXPR)
8697 return constant_boolean_node (1, type);
8698 else if (code == EQ_EXPR)
8699 return constant_boolean_node (0, type);
8701 /* If we have equivalent bases we might be able to simplify. */
8702 else if (indirect_base0 == indirect_base1
8703 && operand_equal_p (base0, base1, 0))
8705 /* We can fold this expression to a constant if the non-constant
8706 offset parts are equal. */
8707 if ((offset0 == offset1
8708 || (offset0 && offset1
8709 && operand_equal_p (offset0, offset1, 0)))
8710 && (code == EQ_EXPR
8711 || code == NE_EXPR
8712 || (indirect_base0 && DECL_P (base0))
8713 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8716 if (!equality_code
8717 && bitpos0 != bitpos1
8718 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8719 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8720 fold_overflow_warning (("assuming pointer wraparound does not "
8721 "occur when comparing P +- C1 with "
8722 "P +- C2"),
8723 WARN_STRICT_OVERFLOW_CONDITIONAL);
8725 switch (code)
8727 case EQ_EXPR:
8728 return constant_boolean_node (bitpos0 == bitpos1, type);
8729 case NE_EXPR:
8730 return constant_boolean_node (bitpos0 != bitpos1, type);
8731 case LT_EXPR:
8732 return constant_boolean_node (bitpos0 < bitpos1, type);
8733 case LE_EXPR:
8734 return constant_boolean_node (bitpos0 <= bitpos1, type);
8735 case GE_EXPR:
8736 return constant_boolean_node (bitpos0 >= bitpos1, type);
8737 case GT_EXPR:
8738 return constant_boolean_node (bitpos0 > bitpos1, type);
8739 default:;
8742 /* We can simplify the comparison to a comparison of the variable
8743 offset parts if the constant offset parts are equal.
8744 Be careful to use signed sizetype here because otherwise we
8745 mess with array offsets in the wrong way. This is possible
8746 because pointer arithmetic is restricted to retain within an
8747 object and overflow on pointer differences is undefined as of
8748 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8749 else if (bitpos0 == bitpos1
8750 && (equality_code
8751 || (indirect_base0 && DECL_P (base0))
8752 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8754 /* By converting to signed sizetype we cover middle-end pointer
8755 arithmetic which operates on unsigned pointer types of size
8756 type size and ARRAY_REF offsets which are properly sign or
8757 zero extended from their type in case it is narrower than
8758 sizetype. */
8759 if (offset0 == NULL_TREE)
8760 offset0 = build_int_cst (ssizetype, 0);
8761 else
8762 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8763 if (offset1 == NULL_TREE)
8764 offset1 = build_int_cst (ssizetype, 0);
8765 else
8766 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8768 if (!equality_code
8769 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8770 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8771 fold_overflow_warning (("assuming pointer wraparound does not "
8772 "occur when comparing P +- C1 with "
8773 "P +- C2"),
8774 WARN_STRICT_OVERFLOW_COMPARISON);
8776 return fold_build2_loc (loc, code, type, offset0, offset1);
8779 /* For non-equal bases we can simplify if they are addresses
8780 declarations with different addresses. */
8781 else if (indirect_base0 && indirect_base1
8782 /* We know that !operand_equal_p (base0, base1, 0)
8783 because the if condition was false. But make
8784 sure two decls are not the same. */
8785 && base0 != base1
8786 && TREE_CODE (arg0) == ADDR_EXPR
8787 && TREE_CODE (arg1) == ADDR_EXPR
8788 && DECL_P (base0)
8789 && DECL_P (base1)
8790 /* Watch for aliases. */
8791 && (!decl_in_symtab_p (base0)
8792 || !decl_in_symtab_p (base1)
8793 || !symtab_node::get_create (base0)->equal_address_to
8794 (symtab_node::get_create (base1))))
8796 if (code == EQ_EXPR)
8797 return omit_two_operands_loc (loc, type, boolean_false_node,
8798 arg0, arg1);
8799 else if (code == NE_EXPR)
8800 return omit_two_operands_loc (loc, type, boolean_true_node,
8801 arg0, arg1);
8803 /* For equal offsets we can simplify to a comparison of the
8804 base addresses. */
8805 else if (bitpos0 == bitpos1
8806 && (indirect_base0
8807 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8808 && (indirect_base1
8809 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8810 && ((offset0 == offset1)
8811 || (offset0 && offset1
8812 && operand_equal_p (offset0, offset1, 0))))
8814 if (indirect_base0)
8815 base0 = build_fold_addr_expr_loc (loc, base0);
8816 if (indirect_base1)
8817 base1 = build_fold_addr_expr_loc (loc, base1);
8818 return fold_build2_loc (loc, code, type, base0, base1);
8822 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8823 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8824 the resulting offset is smaller in absolute value than the
8825 original one and has the same sign. */
8826 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8827 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8828 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8829 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8830 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8831 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8832 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8833 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8835 tree const1 = TREE_OPERAND (arg0, 1);
8836 tree const2 = TREE_OPERAND (arg1, 1);
8837 tree variable1 = TREE_OPERAND (arg0, 0);
8838 tree variable2 = TREE_OPERAND (arg1, 0);
8839 tree cst;
8840 const char * const warnmsg = G_("assuming signed overflow does not "
8841 "occur when combining constants around "
8842 "a comparison");
8844 /* Put the constant on the side where it doesn't overflow and is
8845 of lower absolute value and of same sign than before. */
8846 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8847 ? MINUS_EXPR : PLUS_EXPR,
8848 const2, const1);
8849 if (!TREE_OVERFLOW (cst)
8850 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8851 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8853 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8854 return fold_build2_loc (loc, code, type,
8855 variable1,
8856 fold_build2_loc (loc, TREE_CODE (arg1),
8857 TREE_TYPE (arg1),
8858 variable2, cst));
8861 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8862 ? MINUS_EXPR : PLUS_EXPR,
8863 const1, const2);
8864 if (!TREE_OVERFLOW (cst)
8865 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8866 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8868 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8869 return fold_build2_loc (loc, code, type,
8870 fold_build2_loc (loc, TREE_CODE (arg0),
8871 TREE_TYPE (arg0),
8872 variable1, cst),
8873 variable2);
8877 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8878 if (tem)
8879 return tem;
8881 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8882 && CONVERT_EXPR_P (arg0))
8884 /* If we are widening one operand of an integer comparison,
8885 see if the other operand is similarly being widened. Perhaps we
8886 can do the comparison in the narrower type. */
8887 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
8888 if (tem)
8889 return tem;
8891 /* Or if we are changing signedness. */
8892 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
8893 if (tem)
8894 return tem;
8897 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8898 constant, we can simplify it. */
8899 if (TREE_CODE (arg1) == INTEGER_CST
8900 && (TREE_CODE (arg0) == MIN_EXPR
8901 || TREE_CODE (arg0) == MAX_EXPR)
8902 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8904 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8905 if (tem)
8906 return tem;
8909 /* If we are comparing an expression that just has comparisons
8910 of two integer values, arithmetic expressions of those comparisons,
8911 and constants, we can simplify it. There are only three cases
8912 to check: the two values can either be equal, the first can be
8913 greater, or the second can be greater. Fold the expression for
8914 those three values. Since each value must be 0 or 1, we have
8915 eight possibilities, each of which corresponds to the constant 0
8916 or 1 or one of the six possible comparisons.
8918 This handles common cases like (a > b) == 0 but also handles
8919 expressions like ((x > y) - (y > x)) > 0, which supposedly
8920 occur in macroized code. */
8922 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8924 tree cval1 = 0, cval2 = 0;
8925 int save_p = 0;
8927 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8928 /* Don't handle degenerate cases here; they should already
8929 have been handled anyway. */
8930 && cval1 != 0 && cval2 != 0
8931 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8932 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8933 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8934 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8935 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8936 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8937 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8939 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8940 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8942 /* We can't just pass T to eval_subst in case cval1 or cval2
8943 was the same as ARG1. */
8945 tree high_result
8946 = fold_build2_loc (loc, code, type,
8947 eval_subst (loc, arg0, cval1, maxval,
8948 cval2, minval),
8949 arg1);
8950 tree equal_result
8951 = fold_build2_loc (loc, code, type,
8952 eval_subst (loc, arg0, cval1, maxval,
8953 cval2, maxval),
8954 arg1);
8955 tree low_result
8956 = fold_build2_loc (loc, code, type,
8957 eval_subst (loc, arg0, cval1, minval,
8958 cval2, maxval),
8959 arg1);
8961 /* All three of these results should be 0 or 1. Confirm they are.
8962 Then use those values to select the proper code to use. */
8964 if (TREE_CODE (high_result) == INTEGER_CST
8965 && TREE_CODE (equal_result) == INTEGER_CST
8966 && TREE_CODE (low_result) == INTEGER_CST)
8968 /* Make a 3-bit mask with the high-order bit being the
8969 value for `>', the next for '=', and the low for '<'. */
8970 switch ((integer_onep (high_result) * 4)
8971 + (integer_onep (equal_result) * 2)
8972 + integer_onep (low_result))
8974 case 0:
8975 /* Always false. */
8976 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8977 case 1:
8978 code = LT_EXPR;
8979 break;
8980 case 2:
8981 code = EQ_EXPR;
8982 break;
8983 case 3:
8984 code = LE_EXPR;
8985 break;
8986 case 4:
8987 code = GT_EXPR;
8988 break;
8989 case 5:
8990 code = NE_EXPR;
8991 break;
8992 case 6:
8993 code = GE_EXPR;
8994 break;
8995 case 7:
8996 /* Always true. */
8997 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9000 if (save_p)
9002 tem = save_expr (build2 (code, type, cval1, cval2));
9003 SET_EXPR_LOCATION (tem, loc);
9004 return tem;
9006 return fold_build2_loc (loc, code, type, cval1, cval2);
9011 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9012 into a single range test. */
9013 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9014 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9015 && TREE_CODE (arg1) == INTEGER_CST
9016 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9017 && !integer_zerop (TREE_OPERAND (arg0, 1))
9018 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9019 && !TREE_OVERFLOW (arg1))
9021 tem = fold_div_compare (loc, code, type, arg0, arg1);
9022 if (tem != NULL_TREE)
9023 return tem;
9026 return NULL_TREE;
9030 /* Subroutine of fold_binary. Optimize complex multiplications of the
9031 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9032 argument EXPR represents the expression "z" of type TYPE. */
9034 static tree
9035 fold_mult_zconjz (location_t loc, tree type, tree expr)
9037 tree itype = TREE_TYPE (type);
9038 tree rpart, ipart, tem;
9040 if (TREE_CODE (expr) == COMPLEX_EXPR)
9042 rpart = TREE_OPERAND (expr, 0);
9043 ipart = TREE_OPERAND (expr, 1);
9045 else if (TREE_CODE (expr) == COMPLEX_CST)
9047 rpart = TREE_REALPART (expr);
9048 ipart = TREE_IMAGPART (expr);
9050 else
9052 expr = save_expr (expr);
9053 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9054 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9057 rpart = save_expr (rpart);
9058 ipart = save_expr (ipart);
9059 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9060 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9061 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9062 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9063 build_zero_cst (itype));
9067 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9068 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9070 static bool
9071 vec_cst_ctor_to_array (tree arg, tree *elts)
9073 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9075 if (TREE_CODE (arg) == VECTOR_CST)
9077 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9078 elts[i] = VECTOR_CST_ELT (arg, i);
9080 else if (TREE_CODE (arg) == CONSTRUCTOR)
9082 constructor_elt *elt;
9084 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9085 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9086 return false;
9087 else
9088 elts[i] = elt->value;
9090 else
9091 return false;
9092 for (; i < nelts; i++)
9093 elts[i]
9094 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9095 return true;
9098 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9099 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9100 NULL_TREE otherwise. */
9102 static tree
9103 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9105 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9106 tree *elts;
9107 bool need_ctor = false;
9109 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9110 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9111 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9112 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9113 return NULL_TREE;
9115 elts = XALLOCAVEC (tree, nelts * 3);
9116 if (!vec_cst_ctor_to_array (arg0, elts)
9117 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9118 return NULL_TREE;
9120 for (i = 0; i < nelts; i++)
9122 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9123 need_ctor = true;
9124 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9127 if (need_ctor)
9129 vec<constructor_elt, va_gc> *v;
9130 vec_alloc (v, nelts);
9131 for (i = 0; i < nelts; i++)
9132 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9133 return build_constructor (type, v);
9135 else
9136 return build_vector (type, &elts[2 * nelts]);
9139 /* Try to fold a pointer difference of type TYPE two address expressions of
9140 array references AREF0 and AREF1 using location LOC. Return a
9141 simplified expression for the difference or NULL_TREE. */
9143 static tree
9144 fold_addr_of_array_ref_difference (location_t loc, tree type,
9145 tree aref0, tree aref1)
9147 tree base0 = TREE_OPERAND (aref0, 0);
9148 tree base1 = TREE_OPERAND (aref1, 0);
9149 tree base_offset = build_int_cst (type, 0);
9151 /* If the bases are array references as well, recurse. If the bases
9152 are pointer indirections compute the difference of the pointers.
9153 If the bases are equal, we are set. */
9154 if ((TREE_CODE (base0) == ARRAY_REF
9155 && TREE_CODE (base1) == ARRAY_REF
9156 && (base_offset
9157 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9158 || (INDIRECT_REF_P (base0)
9159 && INDIRECT_REF_P (base1)
9160 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9161 TREE_OPERAND (base0, 0),
9162 TREE_OPERAND (base1, 0))))
9163 || operand_equal_p (base0, base1, 0))
9165 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9166 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9167 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9168 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9169 return fold_build2_loc (loc, PLUS_EXPR, type,
9170 base_offset,
9171 fold_build2_loc (loc, MULT_EXPR, type,
9172 diff, esz));
9174 return NULL_TREE;
9177 /* If the real or vector real constant CST of type TYPE has an exact
9178 inverse, return it, else return NULL. */
9180 tree
9181 exact_inverse (tree type, tree cst)
9183 REAL_VALUE_TYPE r;
9184 tree unit_type, *elts;
9185 machine_mode mode;
9186 unsigned vec_nelts, i;
9188 switch (TREE_CODE (cst))
9190 case REAL_CST:
9191 r = TREE_REAL_CST (cst);
9193 if (exact_real_inverse (TYPE_MODE (type), &r))
9194 return build_real (type, r);
9196 return NULL_TREE;
9198 case VECTOR_CST:
9199 vec_nelts = VECTOR_CST_NELTS (cst);
9200 elts = XALLOCAVEC (tree, vec_nelts);
9201 unit_type = TREE_TYPE (type);
9202 mode = TYPE_MODE (unit_type);
9204 for (i = 0; i < vec_nelts; i++)
9206 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9207 if (!exact_real_inverse (mode, &r))
9208 return NULL_TREE;
9209 elts[i] = build_real (unit_type, r);
9212 return build_vector (type, elts);
9214 default:
9215 return NULL_TREE;
9219 /* Mask out the tz least significant bits of X of type TYPE where
9220 tz is the number of trailing zeroes in Y. */
9221 static wide_int
9222 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9224 int tz = wi::ctz (y);
9225 if (tz > 0)
9226 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9227 return x;
9230 /* Return true when T is an address and is known to be nonzero.
9231 For floating point we further ensure that T is not denormal.
9232 Similar logic is present in nonzero_address in rtlanal.h.
9234 If the return value is based on the assumption that signed overflow
9235 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9236 change *STRICT_OVERFLOW_P. */
9238 static bool
9239 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9241 tree type = TREE_TYPE (t);
9242 enum tree_code code;
9244 /* Doing something useful for floating point would need more work. */
9245 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9246 return false;
9248 code = TREE_CODE (t);
9249 switch (TREE_CODE_CLASS (code))
9251 case tcc_unary:
9252 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9253 strict_overflow_p);
9254 case tcc_binary:
9255 case tcc_comparison:
9256 return tree_binary_nonzero_warnv_p (code, type,
9257 TREE_OPERAND (t, 0),
9258 TREE_OPERAND (t, 1),
9259 strict_overflow_p);
9260 case tcc_constant:
9261 case tcc_declaration:
9262 case tcc_reference:
9263 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9265 default:
9266 break;
9269 switch (code)
9271 case TRUTH_NOT_EXPR:
9272 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9273 strict_overflow_p);
9275 case TRUTH_AND_EXPR:
9276 case TRUTH_OR_EXPR:
9277 case TRUTH_XOR_EXPR:
9278 return tree_binary_nonzero_warnv_p (code, type,
9279 TREE_OPERAND (t, 0),
9280 TREE_OPERAND (t, 1),
9281 strict_overflow_p);
9283 case COND_EXPR:
9284 case CONSTRUCTOR:
9285 case OBJ_TYPE_REF:
9286 case ASSERT_EXPR:
9287 case ADDR_EXPR:
9288 case WITH_SIZE_EXPR:
9289 case SSA_NAME:
9290 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9292 case COMPOUND_EXPR:
9293 case MODIFY_EXPR:
9294 case BIND_EXPR:
9295 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9296 strict_overflow_p);
9298 case SAVE_EXPR:
9299 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9300 strict_overflow_p);
9302 case CALL_EXPR:
9304 tree fndecl = get_callee_fndecl (t);
9305 if (!fndecl) return false;
9306 if (flag_delete_null_pointer_checks && !flag_check_new
9307 && DECL_IS_OPERATOR_NEW (fndecl)
9308 && !TREE_NOTHROW (fndecl))
9309 return true;
9310 if (flag_delete_null_pointer_checks
9311 && lookup_attribute ("returns_nonnull",
9312 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9313 return true;
9314 return alloca_call_p (t);
9317 default:
9318 break;
9320 return false;
9323 /* Return true when T is an address and is known to be nonzero.
9324 Handle warnings about undefined signed overflow. */
9326 static bool
9327 tree_expr_nonzero_p (tree t)
9329 bool ret, strict_overflow_p;
9331 strict_overflow_p = false;
9332 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9333 if (strict_overflow_p)
9334 fold_overflow_warning (("assuming signed overflow does not occur when "
9335 "determining that expression is always "
9336 "non-zero"),
9337 WARN_STRICT_OVERFLOW_MISC);
9338 return ret;
9341 /* Fold a binary expression of code CODE and type TYPE with operands
9342 OP0 and OP1. LOC is the location of the resulting expression.
9343 Return the folded expression if folding is successful. Otherwise,
9344 return NULL_TREE. */
9346 tree
9347 fold_binary_loc (location_t loc,
9348 enum tree_code code, tree type, tree op0, tree op1)
9350 enum tree_code_class kind = TREE_CODE_CLASS (code);
9351 tree arg0, arg1, tem;
9352 tree t1 = NULL_TREE;
9353 bool strict_overflow_p;
9354 unsigned int prec;
9356 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9357 && TREE_CODE_LENGTH (code) == 2
9358 && op0 != NULL_TREE
9359 && op1 != NULL_TREE);
9361 arg0 = op0;
9362 arg1 = op1;
9364 /* Strip any conversions that don't change the mode. This is
9365 safe for every expression, except for a comparison expression
9366 because its signedness is derived from its operands. So, in
9367 the latter case, only strip conversions that don't change the
9368 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9369 preserved.
9371 Note that this is done as an internal manipulation within the
9372 constant folder, in order to find the simplest representation
9373 of the arguments so that their form can be studied. In any
9374 cases, the appropriate type conversions should be put back in
9375 the tree that will get out of the constant folder. */
9377 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9379 STRIP_SIGN_NOPS (arg0);
9380 STRIP_SIGN_NOPS (arg1);
9382 else
9384 STRIP_NOPS (arg0);
9385 STRIP_NOPS (arg1);
9388 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9389 constant but we can't do arithmetic on them. */
9390 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9392 tem = const_binop (code, type, arg0, arg1);
9393 if (tem != NULL_TREE)
9395 if (TREE_TYPE (tem) != type)
9396 tem = fold_convert_loc (loc, type, tem);
9397 return tem;
9401 /* If this is a commutative operation, and ARG0 is a constant, move it
9402 to ARG1 to reduce the number of tests below. */
9403 if (commutative_tree_code (code)
9404 && tree_swap_operands_p (arg0, arg1, true))
9405 return fold_build2_loc (loc, code, type, op1, op0);
9407 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9408 to ARG1 to reduce the number of tests below. */
9409 if (kind == tcc_comparison
9410 && tree_swap_operands_p (arg0, arg1, true))
9411 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9413 tem = generic_simplify (loc, code, type, op0, op1);
9414 if (tem)
9415 return tem;
9417 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9419 First check for cases where an arithmetic operation is applied to a
9420 compound, conditional, or comparison operation. Push the arithmetic
9421 operation inside the compound or conditional to see if any folding
9422 can then be done. Convert comparison to conditional for this purpose.
9423 The also optimizes non-constant cases that used to be done in
9424 expand_expr.
9426 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9427 one of the operands is a comparison and the other is a comparison, a
9428 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9429 code below would make the expression more complex. Change it to a
9430 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9431 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9433 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9434 || code == EQ_EXPR || code == NE_EXPR)
9435 && TREE_CODE (type) != VECTOR_TYPE
9436 && ((truth_value_p (TREE_CODE (arg0))
9437 && (truth_value_p (TREE_CODE (arg1))
9438 || (TREE_CODE (arg1) == BIT_AND_EXPR
9439 && integer_onep (TREE_OPERAND (arg1, 1)))))
9440 || (truth_value_p (TREE_CODE (arg1))
9441 && (truth_value_p (TREE_CODE (arg0))
9442 || (TREE_CODE (arg0) == BIT_AND_EXPR
9443 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9445 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9446 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9447 : TRUTH_XOR_EXPR,
9448 boolean_type_node,
9449 fold_convert_loc (loc, boolean_type_node, arg0),
9450 fold_convert_loc (loc, boolean_type_node, arg1));
9452 if (code == EQ_EXPR)
9453 tem = invert_truthvalue_loc (loc, tem);
9455 return fold_convert_loc (loc, type, tem);
9458 if (TREE_CODE_CLASS (code) == tcc_binary
9459 || TREE_CODE_CLASS (code) == tcc_comparison)
9461 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9463 tem = fold_build2_loc (loc, code, type,
9464 fold_convert_loc (loc, TREE_TYPE (op0),
9465 TREE_OPERAND (arg0, 1)), op1);
9466 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9467 tem);
9469 if (TREE_CODE (arg1) == COMPOUND_EXPR
9470 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9472 tem = fold_build2_loc (loc, code, type, op0,
9473 fold_convert_loc (loc, TREE_TYPE (op1),
9474 TREE_OPERAND (arg1, 1)));
9475 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9476 tem);
9479 if (TREE_CODE (arg0) == COND_EXPR
9480 || TREE_CODE (arg0) == VEC_COND_EXPR
9481 || COMPARISON_CLASS_P (arg0))
9483 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9484 arg0, arg1,
9485 /*cond_first_p=*/1);
9486 if (tem != NULL_TREE)
9487 return tem;
9490 if (TREE_CODE (arg1) == COND_EXPR
9491 || TREE_CODE (arg1) == VEC_COND_EXPR
9492 || COMPARISON_CLASS_P (arg1))
9494 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9495 arg1, arg0,
9496 /*cond_first_p=*/0);
9497 if (tem != NULL_TREE)
9498 return tem;
9502 switch (code)
9504 case MEM_REF:
9505 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9506 if (TREE_CODE (arg0) == ADDR_EXPR
9507 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9509 tree iref = TREE_OPERAND (arg0, 0);
9510 return fold_build2 (MEM_REF, type,
9511 TREE_OPERAND (iref, 0),
9512 int_const_binop (PLUS_EXPR, arg1,
9513 TREE_OPERAND (iref, 1)));
9516 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9517 if (TREE_CODE (arg0) == ADDR_EXPR
9518 && handled_component_p (TREE_OPERAND (arg0, 0)))
9520 tree base;
9521 HOST_WIDE_INT coffset;
9522 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9523 &coffset);
9524 if (!base)
9525 return NULL_TREE;
9526 return fold_build2 (MEM_REF, type,
9527 build_fold_addr_expr (base),
9528 int_const_binop (PLUS_EXPR, arg1,
9529 size_int (coffset)));
9532 return NULL_TREE;
9534 case POINTER_PLUS_EXPR:
9535 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9536 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9537 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9538 return fold_convert_loc (loc, type,
9539 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9540 fold_convert_loc (loc, sizetype,
9541 arg1),
9542 fold_convert_loc (loc, sizetype,
9543 arg0)));
9545 return NULL_TREE;
9547 case PLUS_EXPR:
9548 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9550 /* X + (X / CST) * -CST is X % CST. */
9551 if (TREE_CODE (arg1) == MULT_EXPR
9552 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9553 && operand_equal_p (arg0,
9554 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9556 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9557 tree cst1 = TREE_OPERAND (arg1, 1);
9558 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9559 cst1, cst0);
9560 if (sum && integer_zerop (sum))
9561 return fold_convert_loc (loc, type,
9562 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9563 TREE_TYPE (arg0), arg0,
9564 cst0));
9568 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9569 one. Make sure the type is not saturating and has the signedness of
9570 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9571 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9572 if ((TREE_CODE (arg0) == MULT_EXPR
9573 || TREE_CODE (arg1) == MULT_EXPR)
9574 && !TYPE_SATURATING (type)
9575 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9576 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9577 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9579 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9580 if (tem)
9581 return tem;
9584 if (! FLOAT_TYPE_P (type))
9586 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9587 with a constant, and the two constants have no bits in common,
9588 we should treat this as a BIT_IOR_EXPR since this may produce more
9589 simplifications. */
9590 if (TREE_CODE (arg0) == BIT_AND_EXPR
9591 && TREE_CODE (arg1) == BIT_AND_EXPR
9592 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9593 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9594 && wi::bit_and (TREE_OPERAND (arg0, 1),
9595 TREE_OPERAND (arg1, 1)) == 0)
9597 code = BIT_IOR_EXPR;
9598 goto bit_ior;
9601 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9602 (plus (plus (mult) (mult)) (foo)) so that we can
9603 take advantage of the factoring cases below. */
9604 if (ANY_INTEGRAL_TYPE_P (type)
9605 && TYPE_OVERFLOW_WRAPS (type)
9606 && (((TREE_CODE (arg0) == PLUS_EXPR
9607 || TREE_CODE (arg0) == MINUS_EXPR)
9608 && TREE_CODE (arg1) == MULT_EXPR)
9609 || ((TREE_CODE (arg1) == PLUS_EXPR
9610 || TREE_CODE (arg1) == MINUS_EXPR)
9611 && TREE_CODE (arg0) == MULT_EXPR)))
9613 tree parg0, parg1, parg, marg;
9614 enum tree_code pcode;
9616 if (TREE_CODE (arg1) == MULT_EXPR)
9617 parg = arg0, marg = arg1;
9618 else
9619 parg = arg1, marg = arg0;
9620 pcode = TREE_CODE (parg);
9621 parg0 = TREE_OPERAND (parg, 0);
9622 parg1 = TREE_OPERAND (parg, 1);
9623 STRIP_NOPS (parg0);
9624 STRIP_NOPS (parg1);
9626 if (TREE_CODE (parg0) == MULT_EXPR
9627 && TREE_CODE (parg1) != MULT_EXPR)
9628 return fold_build2_loc (loc, pcode, type,
9629 fold_build2_loc (loc, PLUS_EXPR, type,
9630 fold_convert_loc (loc, type,
9631 parg0),
9632 fold_convert_loc (loc, type,
9633 marg)),
9634 fold_convert_loc (loc, type, parg1));
9635 if (TREE_CODE (parg0) != MULT_EXPR
9636 && TREE_CODE (parg1) == MULT_EXPR)
9637 return
9638 fold_build2_loc (loc, PLUS_EXPR, type,
9639 fold_convert_loc (loc, type, parg0),
9640 fold_build2_loc (loc, pcode, type,
9641 fold_convert_loc (loc, type, marg),
9642 fold_convert_loc (loc, type,
9643 parg1)));
9646 else
9648 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9649 to __complex__ ( x, y ). This is not the same for SNaNs or
9650 if signed zeros are involved. */
9651 if (!HONOR_SNANS (element_mode (arg0))
9652 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9653 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9655 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9656 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9657 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9658 bool arg0rz = false, arg0iz = false;
9659 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9660 || (arg0i && (arg0iz = real_zerop (arg0i))))
9662 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9663 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9664 if (arg0rz && arg1i && real_zerop (arg1i))
9666 tree rp = arg1r ? arg1r
9667 : build1 (REALPART_EXPR, rtype, arg1);
9668 tree ip = arg0i ? arg0i
9669 : build1 (IMAGPART_EXPR, rtype, arg0);
9670 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9672 else if (arg0iz && arg1r && real_zerop (arg1r))
9674 tree rp = arg0r ? arg0r
9675 : build1 (REALPART_EXPR, rtype, arg0);
9676 tree ip = arg1i ? arg1i
9677 : build1 (IMAGPART_EXPR, rtype, arg1);
9678 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9683 if (flag_unsafe_math_optimizations
9684 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9685 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9686 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9687 return tem;
9689 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9690 We associate floats only if the user has specified
9691 -fassociative-math. */
9692 if (flag_associative_math
9693 && TREE_CODE (arg1) == PLUS_EXPR
9694 && TREE_CODE (arg0) != MULT_EXPR)
9696 tree tree10 = TREE_OPERAND (arg1, 0);
9697 tree tree11 = TREE_OPERAND (arg1, 1);
9698 if (TREE_CODE (tree11) == MULT_EXPR
9699 && TREE_CODE (tree10) == MULT_EXPR)
9701 tree tree0;
9702 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9703 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9706 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9707 We associate floats only if the user has specified
9708 -fassociative-math. */
9709 if (flag_associative_math
9710 && TREE_CODE (arg0) == PLUS_EXPR
9711 && TREE_CODE (arg1) != MULT_EXPR)
9713 tree tree00 = TREE_OPERAND (arg0, 0);
9714 tree tree01 = TREE_OPERAND (arg0, 1);
9715 if (TREE_CODE (tree01) == MULT_EXPR
9716 && TREE_CODE (tree00) == MULT_EXPR)
9718 tree tree0;
9719 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9720 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9725 bit_rotate:
9726 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9727 is a rotate of A by C1 bits. */
9728 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9729 is a rotate of A by B bits. */
9731 enum tree_code code0, code1;
9732 tree rtype;
9733 code0 = TREE_CODE (arg0);
9734 code1 = TREE_CODE (arg1);
9735 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9736 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9737 && operand_equal_p (TREE_OPERAND (arg0, 0),
9738 TREE_OPERAND (arg1, 0), 0)
9739 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9740 TYPE_UNSIGNED (rtype))
9741 /* Only create rotates in complete modes. Other cases are not
9742 expanded properly. */
9743 && (element_precision (rtype)
9744 == element_precision (TYPE_MODE (rtype))))
9746 tree tree01, tree11;
9747 enum tree_code code01, code11;
9749 tree01 = TREE_OPERAND (arg0, 1);
9750 tree11 = TREE_OPERAND (arg1, 1);
9751 STRIP_NOPS (tree01);
9752 STRIP_NOPS (tree11);
9753 code01 = TREE_CODE (tree01);
9754 code11 = TREE_CODE (tree11);
9755 if (code01 == INTEGER_CST
9756 && code11 == INTEGER_CST
9757 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9758 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9760 tem = build2_loc (loc, LROTATE_EXPR,
9761 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9762 TREE_OPERAND (arg0, 0),
9763 code0 == LSHIFT_EXPR
9764 ? TREE_OPERAND (arg0, 1)
9765 : TREE_OPERAND (arg1, 1));
9766 return fold_convert_loc (loc, type, tem);
9768 else if (code11 == MINUS_EXPR)
9770 tree tree110, tree111;
9771 tree110 = TREE_OPERAND (tree11, 0);
9772 tree111 = TREE_OPERAND (tree11, 1);
9773 STRIP_NOPS (tree110);
9774 STRIP_NOPS (tree111);
9775 if (TREE_CODE (tree110) == INTEGER_CST
9776 && 0 == compare_tree_int (tree110,
9777 element_precision
9778 (TREE_TYPE (TREE_OPERAND
9779 (arg0, 0))))
9780 && operand_equal_p (tree01, tree111, 0))
9781 return
9782 fold_convert_loc (loc, type,
9783 build2 ((code0 == LSHIFT_EXPR
9784 ? LROTATE_EXPR
9785 : RROTATE_EXPR),
9786 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9787 TREE_OPERAND (arg0, 0),
9788 TREE_OPERAND (arg0, 1)));
9790 else if (code01 == MINUS_EXPR)
9792 tree tree010, tree011;
9793 tree010 = TREE_OPERAND (tree01, 0);
9794 tree011 = TREE_OPERAND (tree01, 1);
9795 STRIP_NOPS (tree010);
9796 STRIP_NOPS (tree011);
9797 if (TREE_CODE (tree010) == INTEGER_CST
9798 && 0 == compare_tree_int (tree010,
9799 element_precision
9800 (TREE_TYPE (TREE_OPERAND
9801 (arg0, 0))))
9802 && operand_equal_p (tree11, tree011, 0))
9803 return fold_convert_loc
9804 (loc, type,
9805 build2 ((code0 != LSHIFT_EXPR
9806 ? LROTATE_EXPR
9807 : RROTATE_EXPR),
9808 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9809 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9814 associate:
9815 /* In most languages, can't associate operations on floats through
9816 parentheses. Rather than remember where the parentheses were, we
9817 don't associate floats at all, unless the user has specified
9818 -fassociative-math.
9819 And, we need to make sure type is not saturating. */
9821 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9822 && !TYPE_SATURATING (type))
9824 tree var0, con0, lit0, minus_lit0;
9825 tree var1, con1, lit1, minus_lit1;
9826 tree atype = type;
9827 bool ok = true;
9829 /* Split both trees into variables, constants, and literals. Then
9830 associate each group together, the constants with literals,
9831 then the result with variables. This increases the chances of
9832 literals being recombined later and of generating relocatable
9833 expressions for the sum of a constant and literal. */
9834 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9835 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9836 code == MINUS_EXPR);
9838 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9839 if (code == MINUS_EXPR)
9840 code = PLUS_EXPR;
9842 /* With undefined overflow prefer doing association in a type
9843 which wraps on overflow, if that is one of the operand types. */
9844 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9845 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9847 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9848 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9849 atype = TREE_TYPE (arg0);
9850 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9851 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9852 atype = TREE_TYPE (arg1);
9853 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9856 /* With undefined overflow we can only associate constants with one
9857 variable, and constants whose association doesn't overflow. */
9858 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9859 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9861 if (var0 && var1)
9863 tree tmp0 = var0;
9864 tree tmp1 = var1;
9866 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9867 tmp0 = TREE_OPERAND (tmp0, 0);
9868 if (CONVERT_EXPR_P (tmp0)
9869 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9870 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9871 <= TYPE_PRECISION (atype)))
9872 tmp0 = TREE_OPERAND (tmp0, 0);
9873 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9874 tmp1 = TREE_OPERAND (tmp1, 0);
9875 if (CONVERT_EXPR_P (tmp1)
9876 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9877 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9878 <= TYPE_PRECISION (atype)))
9879 tmp1 = TREE_OPERAND (tmp1, 0);
9880 /* The only case we can still associate with two variables
9881 is if they are the same, modulo negation and bit-pattern
9882 preserving conversions. */
9883 if (!operand_equal_p (tmp0, tmp1, 0))
9884 ok = false;
9888 /* Only do something if we found more than two objects. Otherwise,
9889 nothing has changed and we risk infinite recursion. */
9890 if (ok
9891 && (2 < ((var0 != 0) + (var1 != 0)
9892 + (con0 != 0) + (con1 != 0)
9893 + (lit0 != 0) + (lit1 != 0)
9894 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9896 bool any_overflows = false;
9897 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9898 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9899 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9900 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9901 var0 = associate_trees (loc, var0, var1, code, atype);
9902 con0 = associate_trees (loc, con0, con1, code, atype);
9903 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9904 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9905 code, atype);
9907 /* Preserve the MINUS_EXPR if the negative part of the literal is
9908 greater than the positive part. Otherwise, the multiplicative
9909 folding code (i.e extract_muldiv) may be fooled in case
9910 unsigned constants are subtracted, like in the following
9911 example: ((X*2 + 4) - 8U)/2. */
9912 if (minus_lit0 && lit0)
9914 if (TREE_CODE (lit0) == INTEGER_CST
9915 && TREE_CODE (minus_lit0) == INTEGER_CST
9916 && tree_int_cst_lt (lit0, minus_lit0))
9918 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9919 MINUS_EXPR, atype);
9920 lit0 = 0;
9922 else
9924 lit0 = associate_trees (loc, lit0, minus_lit0,
9925 MINUS_EXPR, atype);
9926 minus_lit0 = 0;
9930 /* Don't introduce overflows through reassociation. */
9931 if (!any_overflows
9932 && ((lit0 && TREE_OVERFLOW_P (lit0))
9933 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9934 return NULL_TREE;
9936 if (minus_lit0)
9938 if (con0 == 0)
9939 return
9940 fold_convert_loc (loc, type,
9941 associate_trees (loc, var0, minus_lit0,
9942 MINUS_EXPR, atype));
9943 else
9945 con0 = associate_trees (loc, con0, minus_lit0,
9946 MINUS_EXPR, atype);
9947 return
9948 fold_convert_loc (loc, type,
9949 associate_trees (loc, var0, con0,
9950 PLUS_EXPR, atype));
9954 con0 = associate_trees (loc, con0, lit0, code, atype);
9955 return
9956 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9957 code, atype));
9961 return NULL_TREE;
9963 case MINUS_EXPR:
9964 /* Pointer simplifications for subtraction, simple reassociations. */
9965 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9967 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9968 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9969 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9971 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9972 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9973 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
9974 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
9975 return fold_build2_loc (loc, PLUS_EXPR, type,
9976 fold_build2_loc (loc, MINUS_EXPR, type,
9977 arg00, arg10),
9978 fold_build2_loc (loc, MINUS_EXPR, type,
9979 arg01, arg11));
9981 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9982 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9984 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9985 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
9986 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
9987 fold_convert_loc (loc, type, arg1));
9988 if (tmp)
9989 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
9991 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
9992 simplifies. */
9993 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9995 tree arg10 = fold_convert_loc (loc, type,
9996 TREE_OPERAND (arg1, 0));
9997 tree arg11 = fold_convert_loc (loc, type,
9998 TREE_OPERAND (arg1, 1));
9999 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10000 fold_convert_loc (loc, type, arg0),
10001 arg10);
10002 if (tmp)
10003 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10006 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10007 if (TREE_CODE (arg0) == NEGATE_EXPR
10008 && negate_expr_p (arg1)
10009 && reorder_operands_p (arg0, arg1))
10010 return fold_build2_loc (loc, MINUS_EXPR, type,
10011 fold_convert_loc (loc, type,
10012 negate_expr (arg1)),
10013 fold_convert_loc (loc, type,
10014 TREE_OPERAND (arg0, 0)));
10016 if (! FLOAT_TYPE_P (type))
10018 /* Fold A - (A & B) into ~B & A. */
10019 if (!TREE_SIDE_EFFECTS (arg0)
10020 && TREE_CODE (arg1) == BIT_AND_EXPR)
10022 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10024 tree arg10 = fold_convert_loc (loc, type,
10025 TREE_OPERAND (arg1, 0));
10026 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10027 fold_build1_loc (loc, BIT_NOT_EXPR,
10028 type, arg10),
10029 fold_convert_loc (loc, type, arg0));
10031 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10033 tree arg11 = fold_convert_loc (loc,
10034 type, TREE_OPERAND (arg1, 1));
10035 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10036 fold_build1_loc (loc, BIT_NOT_EXPR,
10037 type, arg11),
10038 fold_convert_loc (loc, type, arg0));
10042 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10043 any power of 2 minus 1. */
10044 if (TREE_CODE (arg0) == BIT_AND_EXPR
10045 && TREE_CODE (arg1) == BIT_AND_EXPR
10046 && operand_equal_p (TREE_OPERAND (arg0, 0),
10047 TREE_OPERAND (arg1, 0), 0))
10049 tree mask0 = TREE_OPERAND (arg0, 1);
10050 tree mask1 = TREE_OPERAND (arg1, 1);
10051 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10053 if (operand_equal_p (tem, mask1, 0))
10055 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10056 TREE_OPERAND (arg0, 0), mask1);
10057 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10062 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10063 __complex__ ( x, -y ). This is not the same for SNaNs or if
10064 signed zeros are involved. */
10065 if (!HONOR_SNANS (element_mode (arg0))
10066 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10067 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10069 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10070 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10071 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10072 bool arg0rz = false, arg0iz = false;
10073 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10074 || (arg0i && (arg0iz = real_zerop (arg0i))))
10076 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10077 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10078 if (arg0rz && arg1i && real_zerop (arg1i))
10080 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10081 arg1r ? arg1r
10082 : build1 (REALPART_EXPR, rtype, arg1));
10083 tree ip = arg0i ? arg0i
10084 : build1 (IMAGPART_EXPR, rtype, arg0);
10085 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10087 else if (arg0iz && arg1r && real_zerop (arg1r))
10089 tree rp = arg0r ? arg0r
10090 : build1 (REALPART_EXPR, rtype, arg0);
10091 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10092 arg1i ? arg1i
10093 : build1 (IMAGPART_EXPR, rtype, arg1));
10094 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10099 /* A - B -> A + (-B) if B is easily negatable. */
10100 if (negate_expr_p (arg1)
10101 && !TYPE_OVERFLOW_SANITIZED (type)
10102 && ((FLOAT_TYPE_P (type)
10103 /* Avoid this transformation if B is a positive REAL_CST. */
10104 && (TREE_CODE (arg1) != REAL_CST
10105 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10106 || INTEGRAL_TYPE_P (type)))
10107 return fold_build2_loc (loc, PLUS_EXPR, type,
10108 fold_convert_loc (loc, type, arg0),
10109 fold_convert_loc (loc, type,
10110 negate_expr (arg1)));
10112 /* Fold &a[i] - &a[j] to i-j. */
10113 if (TREE_CODE (arg0) == ADDR_EXPR
10114 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10115 && TREE_CODE (arg1) == ADDR_EXPR
10116 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10118 tree tem = fold_addr_of_array_ref_difference (loc, type,
10119 TREE_OPERAND (arg0, 0),
10120 TREE_OPERAND (arg1, 0));
10121 if (tem)
10122 return tem;
10125 if (FLOAT_TYPE_P (type)
10126 && flag_unsafe_math_optimizations
10127 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10128 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10129 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10130 return tem;
10132 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10133 one. Make sure the type is not saturating and has the signedness of
10134 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10135 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10136 if ((TREE_CODE (arg0) == MULT_EXPR
10137 || TREE_CODE (arg1) == MULT_EXPR)
10138 && !TYPE_SATURATING (type)
10139 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10140 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10141 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10143 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10144 if (tem)
10145 return tem;
10148 goto associate;
10150 case MULT_EXPR:
10151 /* (-A) * (-B) -> A * B */
10152 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10153 return fold_build2_loc (loc, MULT_EXPR, type,
10154 fold_convert_loc (loc, type,
10155 TREE_OPERAND (arg0, 0)),
10156 fold_convert_loc (loc, type,
10157 negate_expr (arg1)));
10158 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10159 return fold_build2_loc (loc, MULT_EXPR, type,
10160 fold_convert_loc (loc, type,
10161 negate_expr (arg0)),
10162 fold_convert_loc (loc, type,
10163 TREE_OPERAND (arg1, 0)));
10165 if (! FLOAT_TYPE_P (type))
10167 /* Transform x * -C into -x * C if x is easily negatable. */
10168 if (TREE_CODE (arg1) == INTEGER_CST
10169 && tree_int_cst_sgn (arg1) == -1
10170 && negate_expr_p (arg0)
10171 && (tem = negate_expr (arg1)) != arg1
10172 && !TREE_OVERFLOW (tem))
10173 return fold_build2_loc (loc, MULT_EXPR, type,
10174 fold_convert_loc (loc, type,
10175 negate_expr (arg0)),
10176 tem);
10178 /* (a * (1 << b)) is (a << b) */
10179 if (TREE_CODE (arg1) == LSHIFT_EXPR
10180 && integer_onep (TREE_OPERAND (arg1, 0)))
10181 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10182 TREE_OPERAND (arg1, 1));
10183 if (TREE_CODE (arg0) == LSHIFT_EXPR
10184 && integer_onep (TREE_OPERAND (arg0, 0)))
10185 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10186 TREE_OPERAND (arg0, 1));
10188 /* (A + A) * C -> A * 2 * C */
10189 if (TREE_CODE (arg0) == PLUS_EXPR
10190 && TREE_CODE (arg1) == INTEGER_CST
10191 && operand_equal_p (TREE_OPERAND (arg0, 0),
10192 TREE_OPERAND (arg0, 1), 0))
10193 return fold_build2_loc (loc, MULT_EXPR, type,
10194 omit_one_operand_loc (loc, type,
10195 TREE_OPERAND (arg0, 0),
10196 TREE_OPERAND (arg0, 1)),
10197 fold_build2_loc (loc, MULT_EXPR, type,
10198 build_int_cst (type, 2) , arg1));
10200 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10201 sign-changing only. */
10202 if (TREE_CODE (arg1) == INTEGER_CST
10203 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10204 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10205 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10207 strict_overflow_p = false;
10208 if (TREE_CODE (arg1) == INTEGER_CST
10209 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10210 &strict_overflow_p)))
10212 if (strict_overflow_p)
10213 fold_overflow_warning (("assuming signed overflow does not "
10214 "occur when simplifying "
10215 "multiplication"),
10216 WARN_STRICT_OVERFLOW_MISC);
10217 return fold_convert_loc (loc, type, tem);
10220 /* Optimize z * conj(z) for integer complex numbers. */
10221 if (TREE_CODE (arg0) == CONJ_EXPR
10222 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10223 return fold_mult_zconjz (loc, type, arg1);
10224 if (TREE_CODE (arg1) == CONJ_EXPR
10225 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10226 return fold_mult_zconjz (loc, type, arg0);
10228 else
10230 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10231 the result for floating point types due to rounding so it is applied
10232 only if -fassociative-math was specify. */
10233 if (flag_associative_math
10234 && TREE_CODE (arg0) == RDIV_EXPR
10235 && TREE_CODE (arg1) == REAL_CST
10236 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10238 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10239 arg1);
10240 if (tem)
10241 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10242 TREE_OPERAND (arg0, 1));
10245 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10246 if (operand_equal_p (arg0, arg1, 0))
10248 tree tem = fold_strip_sign_ops (arg0);
10249 if (tem != NULL_TREE)
10251 tem = fold_convert_loc (loc, type, tem);
10252 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10256 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10257 This is not the same for NaNs or if signed zeros are
10258 involved. */
10259 if (!HONOR_NANS (arg0)
10260 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10261 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10262 && TREE_CODE (arg1) == COMPLEX_CST
10263 && real_zerop (TREE_REALPART (arg1)))
10265 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10266 if (real_onep (TREE_IMAGPART (arg1)))
10267 return
10268 fold_build2_loc (loc, COMPLEX_EXPR, type,
10269 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10270 rtype, arg0)),
10271 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10272 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10273 return
10274 fold_build2_loc (loc, COMPLEX_EXPR, type,
10275 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10276 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10277 rtype, arg0)));
10280 /* Optimize z * conj(z) for floating point complex numbers.
10281 Guarded by flag_unsafe_math_optimizations as non-finite
10282 imaginary components don't produce scalar results. */
10283 if (flag_unsafe_math_optimizations
10284 && TREE_CODE (arg0) == CONJ_EXPR
10285 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10286 return fold_mult_zconjz (loc, type, arg1);
10287 if (flag_unsafe_math_optimizations
10288 && TREE_CODE (arg1) == CONJ_EXPR
10289 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10290 return fold_mult_zconjz (loc, type, arg0);
10292 if (flag_unsafe_math_optimizations)
10294 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10295 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10297 /* Optimizations of root(...)*root(...). */
10298 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10300 tree rootfn, arg;
10301 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10302 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10304 /* Optimize sqrt(x)*sqrt(x) as x. */
10305 if (BUILTIN_SQRT_P (fcode0)
10306 && operand_equal_p (arg00, arg10, 0)
10307 && ! HONOR_SNANS (element_mode (type)))
10308 return arg00;
10310 /* Optimize root(x)*root(y) as root(x*y). */
10311 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10312 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10313 return build_call_expr_loc (loc, rootfn, 1, arg);
10316 /* Optimize expN(x)*expN(y) as expN(x+y). */
10317 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10319 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10320 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10321 CALL_EXPR_ARG (arg0, 0),
10322 CALL_EXPR_ARG (arg1, 0));
10323 return build_call_expr_loc (loc, expfn, 1, arg);
10326 /* Optimizations of pow(...)*pow(...). */
10327 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10328 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10329 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10331 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10332 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10333 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10334 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10336 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10337 if (operand_equal_p (arg01, arg11, 0))
10339 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10340 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10341 arg00, arg10);
10342 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10345 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10346 if (operand_equal_p (arg00, arg10, 0))
10348 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10349 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10350 arg01, arg11);
10351 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10355 /* Optimize tan(x)*cos(x) as sin(x). */
10356 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10357 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10358 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10359 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10360 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10361 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10362 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10363 CALL_EXPR_ARG (arg1, 0), 0))
10365 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10367 if (sinfn != NULL_TREE)
10368 return build_call_expr_loc (loc, sinfn, 1,
10369 CALL_EXPR_ARG (arg0, 0));
10372 /* Optimize x*pow(x,c) as pow(x,c+1). */
10373 if (fcode1 == BUILT_IN_POW
10374 || fcode1 == BUILT_IN_POWF
10375 || fcode1 == BUILT_IN_POWL)
10377 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10378 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10379 if (TREE_CODE (arg11) == REAL_CST
10380 && !TREE_OVERFLOW (arg11)
10381 && operand_equal_p (arg0, arg10, 0))
10383 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10384 REAL_VALUE_TYPE c;
10385 tree arg;
10387 c = TREE_REAL_CST (arg11);
10388 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10389 arg = build_real (type, c);
10390 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10394 /* Optimize pow(x,c)*x as pow(x,c+1). */
10395 if (fcode0 == BUILT_IN_POW
10396 || fcode0 == BUILT_IN_POWF
10397 || fcode0 == BUILT_IN_POWL)
10399 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10400 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10401 if (TREE_CODE (arg01) == REAL_CST
10402 && !TREE_OVERFLOW (arg01)
10403 && operand_equal_p (arg1, arg00, 0))
10405 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10406 REAL_VALUE_TYPE c;
10407 tree arg;
10409 c = TREE_REAL_CST (arg01);
10410 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10411 arg = build_real (type, c);
10412 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10416 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10417 if (!in_gimple_form
10418 && optimize
10419 && operand_equal_p (arg0, arg1, 0))
10421 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10423 if (powfn)
10425 tree arg = build_real (type, dconst2);
10426 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10431 goto associate;
10433 case BIT_IOR_EXPR:
10434 bit_ior:
10435 /* Canonicalize (X & C1) | C2. */
10436 if (TREE_CODE (arg0) == BIT_AND_EXPR
10437 && TREE_CODE (arg1) == INTEGER_CST
10438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10440 int width = TYPE_PRECISION (type), w;
10441 wide_int c1 = TREE_OPERAND (arg0, 1);
10442 wide_int c2 = arg1;
10444 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10445 if ((c1 & c2) == c1)
10446 return omit_one_operand_loc (loc, type, arg1,
10447 TREE_OPERAND (arg0, 0));
10449 wide_int msk = wi::mask (width, false,
10450 TYPE_PRECISION (TREE_TYPE (arg1)));
10452 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10453 if (msk.and_not (c1 | c2) == 0)
10454 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10455 TREE_OPERAND (arg0, 0), arg1);
10457 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10458 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10459 mode which allows further optimizations. */
10460 c1 &= msk;
10461 c2 &= msk;
10462 wide_int c3 = c1.and_not (c2);
10463 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10465 wide_int mask = wi::mask (w, false,
10466 TYPE_PRECISION (type));
10467 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10469 c3 = mask;
10470 break;
10474 if (c3 != c1)
10475 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10476 fold_build2_loc (loc, BIT_AND_EXPR, type,
10477 TREE_OPERAND (arg0, 0),
10478 wide_int_to_tree (type,
10479 c3)),
10480 arg1);
10483 /* (X & ~Y) | (~X & Y) is X ^ Y */
10484 if (TREE_CODE (arg0) == BIT_AND_EXPR
10485 && TREE_CODE (arg1) == BIT_AND_EXPR)
10487 tree a0, a1, l0, l1, n0, n1;
10489 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10490 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10492 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10493 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10495 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
10496 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
10498 if ((operand_equal_p (n0, a0, 0)
10499 && operand_equal_p (n1, a1, 0))
10500 || (operand_equal_p (n0, a1, 0)
10501 && operand_equal_p (n1, a0, 0)))
10502 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
10505 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10506 if (t1 != NULL_TREE)
10507 return t1;
10509 /* See if this can be simplified into a rotate first. If that
10510 is unsuccessful continue in the association code. */
10511 goto bit_rotate;
10513 case BIT_XOR_EXPR:
10514 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10515 if (TREE_CODE (arg0) == BIT_AND_EXPR
10516 && INTEGRAL_TYPE_P (type)
10517 && integer_onep (TREE_OPERAND (arg0, 1))
10518 && integer_onep (arg1))
10519 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10520 build_zero_cst (TREE_TYPE (arg0)));
10522 /* See if this can be simplified into a rotate first. If that
10523 is unsuccessful continue in the association code. */
10524 goto bit_rotate;
10526 case BIT_AND_EXPR:
10527 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
10528 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
10529 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
10530 || (TREE_CODE (arg0) == EQ_EXPR
10531 && integer_zerop (TREE_OPERAND (arg0, 1))))
10532 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10533 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10535 /* X & ~X , X & (X == 0), and X & !X are always zero. */
10536 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
10537 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
10538 || (TREE_CODE (arg1) == EQ_EXPR
10539 && integer_zerop (TREE_OPERAND (arg1, 1))))
10540 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10541 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10543 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10544 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10545 && INTEGRAL_TYPE_P (type)
10546 && integer_onep (TREE_OPERAND (arg0, 1))
10547 && integer_onep (arg1))
10549 tree tem2;
10550 tem = TREE_OPERAND (arg0, 0);
10551 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10552 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10553 tem, tem2);
10554 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10555 build_zero_cst (TREE_TYPE (tem)));
10557 /* Fold ~X & 1 as (X & 1) == 0. */
10558 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10559 && INTEGRAL_TYPE_P (type)
10560 && integer_onep (arg1))
10562 tree tem2;
10563 tem = TREE_OPERAND (arg0, 0);
10564 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10565 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10566 tem, tem2);
10567 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10568 build_zero_cst (TREE_TYPE (tem)));
10570 /* Fold !X & 1 as X == 0. */
10571 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10572 && integer_onep (arg1))
10574 tem = TREE_OPERAND (arg0, 0);
10575 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10576 build_zero_cst (TREE_TYPE (tem)));
10579 /* Fold (X ^ Y) & Y as ~X & Y. */
10580 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10581 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10583 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10584 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10585 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10586 fold_convert_loc (loc, type, arg1));
10588 /* Fold (X ^ Y) & X as ~Y & X. */
10589 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10590 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10591 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10593 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10594 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10595 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10596 fold_convert_loc (loc, type, arg1));
10598 /* Fold X & (X ^ Y) as X & ~Y. */
10599 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10600 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10602 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10603 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10604 fold_convert_loc (loc, type, arg0),
10605 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10607 /* Fold X & (Y ^ X) as ~Y & X. */
10608 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10609 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10610 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10612 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10613 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10614 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10615 fold_convert_loc (loc, type, arg0));
10618 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10619 multiple of 1 << CST. */
10620 if (TREE_CODE (arg1) == INTEGER_CST)
10622 wide_int cst1 = arg1;
10623 wide_int ncst1 = -cst1;
10624 if ((cst1 & ncst1) == ncst1
10625 && multiple_of_p (type, arg0,
10626 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10627 return fold_convert_loc (loc, type, arg0);
10630 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10631 bits from CST2. */
10632 if (TREE_CODE (arg1) == INTEGER_CST
10633 && TREE_CODE (arg0) == MULT_EXPR
10634 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10636 wide_int warg1 = arg1;
10637 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10639 if (masked == 0)
10640 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10641 arg0, arg1);
10642 else if (masked != warg1)
10644 /* Avoid the transform if arg1 is a mask of some
10645 mode which allows further optimizations. */
10646 int pop = wi::popcount (warg1);
10647 if (!(pop >= BITS_PER_UNIT
10648 && exact_log2 (pop) != -1
10649 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10650 return fold_build2_loc (loc, code, type, op0,
10651 wide_int_to_tree (type, masked));
10655 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10656 ((A & N) + B) & M -> (A + B) & M
10657 Similarly if (N & M) == 0,
10658 ((A | N) + B) & M -> (A + B) & M
10659 and for - instead of + (or unary - instead of +)
10660 and/or ^ instead of |.
10661 If B is constant and (B & M) == 0, fold into A & M. */
10662 if (TREE_CODE (arg1) == INTEGER_CST)
10664 wide_int cst1 = arg1;
10665 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10666 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10667 && (TREE_CODE (arg0) == PLUS_EXPR
10668 || TREE_CODE (arg0) == MINUS_EXPR
10669 || TREE_CODE (arg0) == NEGATE_EXPR)
10670 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10671 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10673 tree pmop[2];
10674 int which = 0;
10675 wide_int cst0;
10677 /* Now we know that arg0 is (C + D) or (C - D) or
10678 -C and arg1 (M) is == (1LL << cst) - 1.
10679 Store C into PMOP[0] and D into PMOP[1]. */
10680 pmop[0] = TREE_OPERAND (arg0, 0);
10681 pmop[1] = NULL;
10682 if (TREE_CODE (arg0) != NEGATE_EXPR)
10684 pmop[1] = TREE_OPERAND (arg0, 1);
10685 which = 1;
10688 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10689 which = -1;
10691 for (; which >= 0; which--)
10692 switch (TREE_CODE (pmop[which]))
10694 case BIT_AND_EXPR:
10695 case BIT_IOR_EXPR:
10696 case BIT_XOR_EXPR:
10697 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10698 != INTEGER_CST)
10699 break;
10700 cst0 = TREE_OPERAND (pmop[which], 1);
10701 cst0 &= cst1;
10702 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10704 if (cst0 != cst1)
10705 break;
10707 else if (cst0 != 0)
10708 break;
10709 /* If C or D is of the form (A & N) where
10710 (N & M) == M, or of the form (A | N) or
10711 (A ^ N) where (N & M) == 0, replace it with A. */
10712 pmop[which] = TREE_OPERAND (pmop[which], 0);
10713 break;
10714 case INTEGER_CST:
10715 /* If C or D is a N where (N & M) == 0, it can be
10716 omitted (assumed 0). */
10717 if ((TREE_CODE (arg0) == PLUS_EXPR
10718 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10719 && (cst1 & pmop[which]) == 0)
10720 pmop[which] = NULL;
10721 break;
10722 default:
10723 break;
10726 /* Only build anything new if we optimized one or both arguments
10727 above. */
10728 if (pmop[0] != TREE_OPERAND (arg0, 0)
10729 || (TREE_CODE (arg0) != NEGATE_EXPR
10730 && pmop[1] != TREE_OPERAND (arg0, 1)))
10732 tree utype = TREE_TYPE (arg0);
10733 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10735 /* Perform the operations in a type that has defined
10736 overflow behavior. */
10737 utype = unsigned_type_for (TREE_TYPE (arg0));
10738 if (pmop[0] != NULL)
10739 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10740 if (pmop[1] != NULL)
10741 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10744 if (TREE_CODE (arg0) == NEGATE_EXPR)
10745 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10746 else if (TREE_CODE (arg0) == PLUS_EXPR)
10748 if (pmop[0] != NULL && pmop[1] != NULL)
10749 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10750 pmop[0], pmop[1]);
10751 else if (pmop[0] != NULL)
10752 tem = pmop[0];
10753 else if (pmop[1] != NULL)
10754 tem = pmop[1];
10755 else
10756 return build_int_cst (type, 0);
10758 else if (pmop[0] == NULL)
10759 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10760 else
10761 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10762 pmop[0], pmop[1]);
10763 /* TEM is now the new binary +, - or unary - replacement. */
10764 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10765 fold_convert_loc (loc, utype, arg1));
10766 return fold_convert_loc (loc, type, tem);
10771 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
10772 if (t1 != NULL_TREE)
10773 return t1;
10774 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10775 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10776 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10778 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10780 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10781 if (mask == -1)
10782 return
10783 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10786 goto associate;
10788 case RDIV_EXPR:
10789 /* Don't touch a floating-point divide by zero unless the mode
10790 of the constant can represent infinity. */
10791 if (TREE_CODE (arg1) == REAL_CST
10792 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10793 && real_zerop (arg1))
10794 return NULL_TREE;
10796 /* (-A) / (-B) -> A / B */
10797 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10798 return fold_build2_loc (loc, RDIV_EXPR, type,
10799 TREE_OPERAND (arg0, 0),
10800 negate_expr (arg1));
10801 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10802 return fold_build2_loc (loc, RDIV_EXPR, type,
10803 negate_expr (arg0),
10804 TREE_OPERAND (arg1, 0));
10806 /* Convert A/B/C to A/(B*C). */
10807 if (flag_reciprocal_math
10808 && TREE_CODE (arg0) == RDIV_EXPR)
10809 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10810 fold_build2_loc (loc, MULT_EXPR, type,
10811 TREE_OPERAND (arg0, 1), arg1));
10813 /* Convert A/(B/C) to (A/B)*C. */
10814 if (flag_reciprocal_math
10815 && TREE_CODE (arg1) == RDIV_EXPR)
10816 return fold_build2_loc (loc, MULT_EXPR, type,
10817 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
10818 TREE_OPERAND (arg1, 0)),
10819 TREE_OPERAND (arg1, 1));
10821 /* Convert C1/(X*C2) into (C1/C2)/X. */
10822 if (flag_reciprocal_math
10823 && TREE_CODE (arg1) == MULT_EXPR
10824 && TREE_CODE (arg0) == REAL_CST
10825 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10827 tree tem = const_binop (RDIV_EXPR, arg0,
10828 TREE_OPERAND (arg1, 1));
10829 if (tem)
10830 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10831 TREE_OPERAND (arg1, 0));
10834 if (flag_unsafe_math_optimizations)
10836 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10837 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10839 /* Optimize sin(x)/cos(x) as tan(x). */
10840 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10841 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10842 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10843 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10844 CALL_EXPR_ARG (arg1, 0), 0))
10846 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10848 if (tanfn != NULL_TREE)
10849 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10852 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10853 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10854 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10855 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10856 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10857 CALL_EXPR_ARG (arg1, 0), 0))
10859 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10861 if (tanfn != NULL_TREE)
10863 tree tmp = build_call_expr_loc (loc, tanfn, 1,
10864 CALL_EXPR_ARG (arg0, 0));
10865 return fold_build2_loc (loc, RDIV_EXPR, type,
10866 build_real (type, dconst1), tmp);
10870 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10871 NaNs or Infinities. */
10872 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10873 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10874 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10876 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10877 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10879 if (! HONOR_NANS (arg00)
10880 && ! HONOR_INFINITIES (element_mode (arg00))
10881 && operand_equal_p (arg00, arg01, 0))
10883 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10885 if (cosfn != NULL_TREE)
10886 return build_call_expr_loc (loc, cosfn, 1, arg00);
10890 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10891 NaNs or Infinities. */
10892 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10893 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10894 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10896 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10897 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10899 if (! HONOR_NANS (arg00)
10900 && ! HONOR_INFINITIES (element_mode (arg00))
10901 && operand_equal_p (arg00, arg01, 0))
10903 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10905 if (cosfn != NULL_TREE)
10907 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
10908 return fold_build2_loc (loc, RDIV_EXPR, type,
10909 build_real (type, dconst1),
10910 tmp);
10915 /* Optimize pow(x,c)/x as pow(x,c-1). */
10916 if (fcode0 == BUILT_IN_POW
10917 || fcode0 == BUILT_IN_POWF
10918 || fcode0 == BUILT_IN_POWL)
10920 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10921 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10922 if (TREE_CODE (arg01) == REAL_CST
10923 && !TREE_OVERFLOW (arg01)
10924 && operand_equal_p (arg1, arg00, 0))
10926 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10927 REAL_VALUE_TYPE c;
10928 tree arg;
10930 c = TREE_REAL_CST (arg01);
10931 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10932 arg = build_real (type, c);
10933 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10937 /* Optimize a/root(b/c) into a*root(c/b). */
10938 if (BUILTIN_ROOT_P (fcode1))
10940 tree rootarg = CALL_EXPR_ARG (arg1, 0);
10942 if (TREE_CODE (rootarg) == RDIV_EXPR)
10944 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10945 tree b = TREE_OPERAND (rootarg, 0);
10946 tree c = TREE_OPERAND (rootarg, 1);
10948 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
10950 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
10951 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
10955 /* Optimize x/expN(y) into x*expN(-y). */
10956 if (BUILTIN_EXPONENT_P (fcode1))
10958 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10959 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10960 arg1 = build_call_expr_loc (loc,
10961 expfn, 1,
10962 fold_convert_loc (loc, type, arg));
10963 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
10966 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10967 if (fcode1 == BUILT_IN_POW
10968 || fcode1 == BUILT_IN_POWF
10969 || fcode1 == BUILT_IN_POWL)
10971 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10972 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10973 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10974 tree neg11 = fold_convert_loc (loc, type,
10975 negate_expr (arg11));
10976 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
10977 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
10980 return NULL_TREE;
10982 case TRUNC_DIV_EXPR:
10983 /* Optimize (X & (-A)) / A where A is a power of 2,
10984 to X >> log2(A) */
10985 if (TREE_CODE (arg0) == BIT_AND_EXPR
10986 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
10987 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
10989 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
10990 arg1, TREE_OPERAND (arg0, 1));
10991 if (sum && integer_zerop (sum)) {
10992 tree pow2 = build_int_cst (integer_type_node,
10993 wi::exact_log2 (arg1));
10994 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10995 TREE_OPERAND (arg0, 0), pow2);
10999 /* Fall through */
11001 case FLOOR_DIV_EXPR:
11002 /* Simplify A / (B << N) where A and B are positive and B is
11003 a power of 2, to A >> (N + log2(B)). */
11004 strict_overflow_p = false;
11005 if (TREE_CODE (arg1) == LSHIFT_EXPR
11006 && (TYPE_UNSIGNED (type)
11007 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11009 tree sval = TREE_OPERAND (arg1, 0);
11010 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11012 tree sh_cnt = TREE_OPERAND (arg1, 1);
11013 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11014 wi::exact_log2 (sval));
11016 if (strict_overflow_p)
11017 fold_overflow_warning (("assuming signed overflow does not "
11018 "occur when simplifying A / (B << N)"),
11019 WARN_STRICT_OVERFLOW_MISC);
11021 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11022 sh_cnt, pow2);
11023 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11024 fold_convert_loc (loc, type, arg0), sh_cnt);
11028 /* Fall through */
11030 case ROUND_DIV_EXPR:
11031 case CEIL_DIV_EXPR:
11032 case EXACT_DIV_EXPR:
11033 if (integer_zerop (arg1))
11034 return NULL_TREE;
11036 /* Convert -A / -B to A / B when the type is signed and overflow is
11037 undefined. */
11038 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11039 && TREE_CODE (arg0) == NEGATE_EXPR
11040 && negate_expr_p (arg1))
11042 if (INTEGRAL_TYPE_P (type))
11043 fold_overflow_warning (("assuming signed overflow does not occur "
11044 "when distributing negation across "
11045 "division"),
11046 WARN_STRICT_OVERFLOW_MISC);
11047 return fold_build2_loc (loc, code, type,
11048 fold_convert_loc (loc, type,
11049 TREE_OPERAND (arg0, 0)),
11050 fold_convert_loc (loc, type,
11051 negate_expr (arg1)));
11053 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11054 && TREE_CODE (arg1) == NEGATE_EXPR
11055 && negate_expr_p (arg0))
11057 if (INTEGRAL_TYPE_P (type))
11058 fold_overflow_warning (("assuming signed overflow does not occur "
11059 "when distributing negation across "
11060 "division"),
11061 WARN_STRICT_OVERFLOW_MISC);
11062 return fold_build2_loc (loc, code, type,
11063 fold_convert_loc (loc, type,
11064 negate_expr (arg0)),
11065 fold_convert_loc (loc, type,
11066 TREE_OPERAND (arg1, 0)));
11069 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11070 operation, EXACT_DIV_EXPR.
11072 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11073 At one time others generated faster code, it's not clear if they do
11074 after the last round to changes to the DIV code in expmed.c. */
11075 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11076 && multiple_of_p (type, arg0, arg1))
11077 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11079 strict_overflow_p = false;
11080 if (TREE_CODE (arg1) == INTEGER_CST
11081 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11082 &strict_overflow_p)))
11084 if (strict_overflow_p)
11085 fold_overflow_warning (("assuming signed overflow does not occur "
11086 "when simplifying division"),
11087 WARN_STRICT_OVERFLOW_MISC);
11088 return fold_convert_loc (loc, type, tem);
11091 return NULL_TREE;
11093 case CEIL_MOD_EXPR:
11094 case FLOOR_MOD_EXPR:
11095 case ROUND_MOD_EXPR:
11096 case TRUNC_MOD_EXPR:
11097 strict_overflow_p = false;
11098 if (TREE_CODE (arg1) == INTEGER_CST
11099 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11100 &strict_overflow_p)))
11102 if (strict_overflow_p)
11103 fold_overflow_warning (("assuming signed overflow does not occur "
11104 "when simplifying modulus"),
11105 WARN_STRICT_OVERFLOW_MISC);
11106 return fold_convert_loc (loc, type, tem);
11109 return NULL_TREE;
11111 case LROTATE_EXPR:
11112 case RROTATE_EXPR:
11113 case RSHIFT_EXPR:
11114 case LSHIFT_EXPR:
11115 /* Since negative shift count is not well-defined,
11116 don't try to compute it in the compiler. */
11117 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11118 return NULL_TREE;
11120 prec = element_precision (type);
11122 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11123 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11124 && tree_to_uhwi (arg1) < prec
11125 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11126 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11128 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11129 + tree_to_uhwi (arg1));
11131 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11132 being well defined. */
11133 if (low >= prec)
11135 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11136 low = low % prec;
11137 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11138 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11139 TREE_OPERAND (arg0, 0));
11140 else
11141 low = prec - 1;
11144 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11145 build_int_cst (TREE_TYPE (arg1), low));
11148 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11149 into x & ((unsigned)-1 >> c) for unsigned types. */
11150 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11151 || (TYPE_UNSIGNED (type)
11152 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11153 && tree_fits_uhwi_p (arg1)
11154 && tree_to_uhwi (arg1) < prec
11155 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11156 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11158 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11159 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
11160 tree lshift;
11161 tree arg00;
11163 if (low0 == low1)
11165 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11167 lshift = build_minus_one_cst (type);
11168 lshift = const_binop (code, lshift, arg1);
11170 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
11174 /* If we have a rotate of a bit operation with the rotate count and
11175 the second operand of the bit operation both constant,
11176 permute the two operations. */
11177 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11178 && (TREE_CODE (arg0) == BIT_AND_EXPR
11179 || TREE_CODE (arg0) == BIT_IOR_EXPR
11180 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11181 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11182 return fold_build2_loc (loc, TREE_CODE (arg0), type,
11183 fold_build2_loc (loc, code, type,
11184 TREE_OPERAND (arg0, 0), arg1),
11185 fold_build2_loc (loc, code, type,
11186 TREE_OPERAND (arg0, 1), arg1));
11188 /* Two consecutive rotates adding up to the some integer
11189 multiple of the precision of the type can be ignored. */
11190 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11191 && TREE_CODE (arg0) == RROTATE_EXPR
11192 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11193 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
11194 prec) == 0)
11195 return TREE_OPERAND (arg0, 0);
11197 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11198 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11199 if the latter can be further optimized. */
11200 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11201 && TREE_CODE (arg0) == BIT_AND_EXPR
11202 && TREE_CODE (arg1) == INTEGER_CST
11203 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11205 tree mask = fold_build2_loc (loc, code, type,
11206 fold_convert_loc (loc, type,
11207 TREE_OPERAND (arg0, 1)),
11208 arg1);
11209 tree shift = fold_build2_loc (loc, code, type,
11210 fold_convert_loc (loc, type,
11211 TREE_OPERAND (arg0, 0)),
11212 arg1);
11213 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
11214 if (tem)
11215 return tem;
11218 return NULL_TREE;
11220 case MIN_EXPR:
11221 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
11222 if (tem)
11223 return tem;
11224 goto associate;
11226 case MAX_EXPR:
11227 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
11228 if (tem)
11229 return tem;
11230 goto associate;
11232 case TRUTH_ANDIF_EXPR:
11233 /* Note that the operands of this must be ints
11234 and their values must be 0 or 1.
11235 ("true" is a fixed value perhaps depending on the language.) */
11236 /* If first arg is constant zero, return it. */
11237 if (integer_zerop (arg0))
11238 return fold_convert_loc (loc, type, arg0);
11239 case TRUTH_AND_EXPR:
11240 /* If either arg is constant true, drop it. */
11241 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11242 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11243 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11244 /* Preserve sequence points. */
11245 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11246 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11247 /* If second arg is constant zero, result is zero, but first arg
11248 must be evaluated. */
11249 if (integer_zerop (arg1))
11250 return omit_one_operand_loc (loc, type, arg1, arg0);
11251 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11252 case will be handled here. */
11253 if (integer_zerop (arg0))
11254 return omit_one_operand_loc (loc, type, arg0, arg1);
11256 /* !X && X is always false. */
11257 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11258 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11259 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11260 /* X && !X is always false. */
11261 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11262 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11263 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11265 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11266 means A >= Y && A != MAX, but in this case we know that
11267 A < X <= MAX. */
11269 if (!TREE_SIDE_EFFECTS (arg0)
11270 && !TREE_SIDE_EFFECTS (arg1))
11272 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11273 if (tem && !operand_equal_p (tem, arg0, 0))
11274 return fold_build2_loc (loc, code, type, tem, arg1);
11276 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11277 if (tem && !operand_equal_p (tem, arg1, 0))
11278 return fold_build2_loc (loc, code, type, arg0, tem);
11281 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11282 != NULL_TREE)
11283 return tem;
11285 return NULL_TREE;
11287 case TRUTH_ORIF_EXPR:
11288 /* Note that the operands of this must be ints
11289 and their values must be 0 or true.
11290 ("true" is a fixed value perhaps depending on the language.) */
11291 /* If first arg is constant true, return it. */
11292 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11293 return fold_convert_loc (loc, type, arg0);
11294 case TRUTH_OR_EXPR:
11295 /* If either arg is constant zero, drop it. */
11296 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11297 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11298 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11299 /* Preserve sequence points. */
11300 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11301 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11302 /* If second arg is constant true, result is true, but we must
11303 evaluate first arg. */
11304 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11305 return omit_one_operand_loc (loc, type, arg1, arg0);
11306 /* Likewise for first arg, but note this only occurs here for
11307 TRUTH_OR_EXPR. */
11308 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11309 return omit_one_operand_loc (loc, type, arg0, arg1);
11311 /* !X || X is always true. */
11312 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11313 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11314 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11315 /* X || !X is always true. */
11316 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11317 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11318 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11320 /* (X && !Y) || (!X && Y) is X ^ Y */
11321 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11322 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11324 tree a0, a1, l0, l1, n0, n1;
11326 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11327 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11329 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11330 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11332 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11333 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11335 if ((operand_equal_p (n0, a0, 0)
11336 && operand_equal_p (n1, a1, 0))
11337 || (operand_equal_p (n0, a1, 0)
11338 && operand_equal_p (n1, a0, 0)))
11339 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11342 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11343 != NULL_TREE)
11344 return tem;
11346 return NULL_TREE;
11348 case TRUTH_XOR_EXPR:
11349 /* If the second arg is constant zero, drop it. */
11350 if (integer_zerop (arg1))
11351 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11352 /* If the second arg is constant true, this is a logical inversion. */
11353 if (integer_onep (arg1))
11355 tem = invert_truthvalue_loc (loc, arg0);
11356 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11358 /* Identical arguments cancel to zero. */
11359 if (operand_equal_p (arg0, arg1, 0))
11360 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11362 /* !X ^ X is always true. */
11363 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11364 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11365 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11367 /* X ^ !X is always true. */
11368 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11369 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11370 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11372 return NULL_TREE;
11374 case EQ_EXPR:
11375 case NE_EXPR:
11376 STRIP_NOPS (arg0);
11377 STRIP_NOPS (arg1);
11379 tem = fold_comparison (loc, code, type, op0, op1);
11380 if (tem != NULL_TREE)
11381 return tem;
11383 /* bool_var != 0 becomes bool_var. */
11384 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11385 && code == NE_EXPR)
11386 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11388 /* bool_var == 1 becomes bool_var. */
11389 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11390 && code == EQ_EXPR)
11391 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11393 /* bool_var != 1 becomes !bool_var. */
11394 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11395 && code == NE_EXPR)
11396 return fold_convert_loc (loc, type,
11397 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11398 TREE_TYPE (arg0), arg0));
11400 /* bool_var == 0 becomes !bool_var. */
11401 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11402 && code == EQ_EXPR)
11403 return fold_convert_loc (loc, type,
11404 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11405 TREE_TYPE (arg0), arg0));
11407 /* !exp != 0 becomes !exp */
11408 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11409 && code == NE_EXPR)
11410 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11412 /* If this is an equality comparison of the address of two non-weak,
11413 unaliased symbols neither of which are extern (since we do not
11414 have access to attributes for externs), then we know the result. */
11415 if (TREE_CODE (arg0) == ADDR_EXPR
11416 && DECL_P (TREE_OPERAND (arg0, 0))
11417 && TREE_CODE (arg1) == ADDR_EXPR
11418 && DECL_P (TREE_OPERAND (arg1, 0)))
11420 int equal;
11422 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
11423 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
11424 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
11425 ->equal_address_to (symtab_node::get_create
11426 (TREE_OPERAND (arg1, 0)));
11427 else
11428 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11429 if (equal != 2)
11430 return constant_boolean_node (equal
11431 ? code == EQ_EXPR : code != EQ_EXPR,
11432 type);
11435 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11436 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11437 && TREE_CODE (arg1) == INTEGER_CST
11438 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11439 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11440 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
11441 fold_convert_loc (loc,
11442 TREE_TYPE (arg0),
11443 arg1),
11444 TREE_OPERAND (arg0, 1)));
11446 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
11447 if ((TREE_CODE (arg0) == PLUS_EXPR
11448 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
11449 || TREE_CODE (arg0) == MINUS_EXPR)
11450 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
11451 0)),
11452 arg1, 0)
11453 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11454 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11456 tree val = TREE_OPERAND (arg0, 1);
11457 return omit_two_operands_loc (loc, type,
11458 fold_build2_loc (loc, code, type,
11459 val,
11460 build_int_cst (TREE_TYPE (val),
11461 0)),
11462 TREE_OPERAND (arg0, 0), arg1);
11465 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
11466 if (TREE_CODE (arg0) == MINUS_EXPR
11467 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
11468 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
11469 1)),
11470 arg1, 0)
11471 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
11473 return omit_two_operands_loc (loc, type,
11474 code == NE_EXPR
11475 ? boolean_true_node : boolean_false_node,
11476 TREE_OPERAND (arg0, 1), arg1);
11479 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11480 if (TREE_CODE (arg0) == ABS_EXPR
11481 && (integer_zerop (arg1) || real_zerop (arg1)))
11482 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
11484 /* If this is an EQ or NE comparison with zero and ARG0 is
11485 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11486 two operations, but the latter can be done in one less insn
11487 on machines that have only two-operand insns or on which a
11488 constant cannot be the first operand. */
11489 if (TREE_CODE (arg0) == BIT_AND_EXPR
11490 && integer_zerop (arg1))
11492 tree arg00 = TREE_OPERAND (arg0, 0);
11493 tree arg01 = TREE_OPERAND (arg0, 1);
11494 if (TREE_CODE (arg00) == LSHIFT_EXPR
11495 && integer_onep (TREE_OPERAND (arg00, 0)))
11497 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11498 arg01, TREE_OPERAND (arg00, 1));
11499 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11500 build_int_cst (TREE_TYPE (arg0), 1));
11501 return fold_build2_loc (loc, code, type,
11502 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11503 arg1);
11505 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11506 && integer_onep (TREE_OPERAND (arg01, 0)))
11508 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11509 arg00, TREE_OPERAND (arg01, 1));
11510 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11511 build_int_cst (TREE_TYPE (arg0), 1));
11512 return fold_build2_loc (loc, code, type,
11513 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11514 arg1);
11518 /* If this is an NE or EQ comparison of zero against the result of a
11519 signed MOD operation whose second operand is a power of 2, make
11520 the MOD operation unsigned since it is simpler and equivalent. */
11521 if (integer_zerop (arg1)
11522 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11523 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11524 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11525 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11526 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11527 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11529 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11530 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
11531 fold_convert_loc (loc, newtype,
11532 TREE_OPERAND (arg0, 0)),
11533 fold_convert_loc (loc, newtype,
11534 TREE_OPERAND (arg0, 1)));
11536 return fold_build2_loc (loc, code, type, newmod,
11537 fold_convert_loc (loc, newtype, arg1));
11540 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11541 C1 is a valid shift constant, and C2 is a power of two, i.e.
11542 a single bit. */
11543 if (TREE_CODE (arg0) == BIT_AND_EXPR
11544 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11545 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11546 == INTEGER_CST
11547 && integer_pow2p (TREE_OPERAND (arg0, 1))
11548 && integer_zerop (arg1))
11550 tree itype = TREE_TYPE (arg0);
11551 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11552 prec = TYPE_PRECISION (itype);
11554 /* Check for a valid shift count. */
11555 if (wi::ltu_p (arg001, prec))
11557 tree arg01 = TREE_OPERAND (arg0, 1);
11558 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11559 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11560 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11561 can be rewritten as (X & (C2 << C1)) != 0. */
11562 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11564 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11565 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11566 return fold_build2_loc (loc, code, type, tem,
11567 fold_convert_loc (loc, itype, arg1));
11569 /* Otherwise, for signed (arithmetic) shifts,
11570 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11571 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11572 else if (!TYPE_UNSIGNED (itype))
11573 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11574 arg000, build_int_cst (itype, 0));
11575 /* Otherwise, of unsigned (logical) shifts,
11576 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11577 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11578 else
11579 return omit_one_operand_loc (loc, type,
11580 code == EQ_EXPR ? integer_one_node
11581 : integer_zero_node,
11582 arg000);
11586 /* If we have (A & C) == C where C is a power of 2, convert this into
11587 (A & C) != 0. Similarly for NE_EXPR. */
11588 if (TREE_CODE (arg0) == BIT_AND_EXPR
11589 && integer_pow2p (TREE_OPERAND (arg0, 1))
11590 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11591 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11592 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
11593 integer_zero_node));
11595 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11596 bit, then fold the expression into A < 0 or A >= 0. */
11597 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
11598 if (tem)
11599 return tem;
11601 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11602 Similarly for NE_EXPR. */
11603 if (TREE_CODE (arg0) == BIT_AND_EXPR
11604 && TREE_CODE (arg1) == INTEGER_CST
11605 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11607 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
11608 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11609 TREE_OPERAND (arg0, 1));
11610 tree dandnotc
11611 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11612 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
11613 notc);
11614 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11615 if (integer_nonzerop (dandnotc))
11616 return omit_one_operand_loc (loc, type, rslt, arg0);
11619 /* If this is a comparison of a field, we may be able to simplify it. */
11620 if ((TREE_CODE (arg0) == COMPONENT_REF
11621 || TREE_CODE (arg0) == BIT_FIELD_REF)
11622 /* Handle the constant case even without -O
11623 to make sure the warnings are given. */
11624 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11626 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11627 if (t1)
11628 return t1;
11631 /* Optimize comparisons of strlen vs zero to a compare of the
11632 first character of the string vs zero. To wit,
11633 strlen(ptr) == 0 => *ptr == 0
11634 strlen(ptr) != 0 => *ptr != 0
11635 Other cases should reduce to one of these two (or a constant)
11636 due to the return value of strlen being unsigned. */
11637 if (TREE_CODE (arg0) == CALL_EXPR
11638 && integer_zerop (arg1))
11640 tree fndecl = get_callee_fndecl (arg0);
11642 if (fndecl
11643 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11644 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11645 && call_expr_nargs (arg0) == 1
11646 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11648 tree iref = build_fold_indirect_ref_loc (loc,
11649 CALL_EXPR_ARG (arg0, 0));
11650 return fold_build2_loc (loc, code, type, iref,
11651 build_int_cst (TREE_TYPE (iref), 0));
11655 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11656 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11657 if (TREE_CODE (arg0) == RSHIFT_EXPR
11658 && integer_zerop (arg1)
11659 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11661 tree arg00 = TREE_OPERAND (arg0, 0);
11662 tree arg01 = TREE_OPERAND (arg0, 1);
11663 tree itype = TREE_TYPE (arg00);
11664 if (wi::eq_p (arg01, element_precision (itype) - 1))
11666 if (TYPE_UNSIGNED (itype))
11668 itype = signed_type_for (itype);
11669 arg00 = fold_convert_loc (loc, itype, arg00);
11671 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11672 type, arg00, build_zero_cst (itype));
11676 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11677 (X & C) == 0 when C is a single bit. */
11678 if (TREE_CODE (arg0) == BIT_AND_EXPR
11679 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11680 && integer_zerop (arg1)
11681 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11683 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11684 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11685 TREE_OPERAND (arg0, 1));
11686 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11687 type, tem,
11688 fold_convert_loc (loc, TREE_TYPE (arg0),
11689 arg1));
11692 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11693 constant C is a power of two, i.e. a single bit. */
11694 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11695 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11696 && integer_zerop (arg1)
11697 && integer_pow2p (TREE_OPERAND (arg0, 1))
11698 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11699 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11701 tree arg00 = TREE_OPERAND (arg0, 0);
11702 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11703 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11706 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11707 when is C is a power of two, i.e. a single bit. */
11708 if (TREE_CODE (arg0) == BIT_AND_EXPR
11709 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11710 && integer_zerop (arg1)
11711 && integer_pow2p (TREE_OPERAND (arg0, 1))
11712 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11713 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11715 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11716 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11717 arg000, TREE_OPERAND (arg0, 1));
11718 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11719 tem, build_int_cst (TREE_TYPE (tem), 0));
11722 if (integer_zerop (arg1)
11723 && tree_expr_nonzero_p (arg0))
11725 tree res = constant_boolean_node (code==NE_EXPR, type);
11726 return omit_one_operand_loc (loc, type, res, arg0);
11729 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11730 if (TREE_CODE (arg0) == BIT_AND_EXPR
11731 && TREE_CODE (arg1) == BIT_AND_EXPR)
11733 tree arg00 = TREE_OPERAND (arg0, 0);
11734 tree arg01 = TREE_OPERAND (arg0, 1);
11735 tree arg10 = TREE_OPERAND (arg1, 0);
11736 tree arg11 = TREE_OPERAND (arg1, 1);
11737 tree itype = TREE_TYPE (arg0);
11739 if (operand_equal_p (arg01, arg11, 0))
11740 return fold_build2_loc (loc, code, type,
11741 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11742 fold_build2_loc (loc,
11743 BIT_XOR_EXPR, itype,
11744 arg00, arg10),
11745 arg01),
11746 build_zero_cst (itype));
11748 if (operand_equal_p (arg01, arg10, 0))
11749 return fold_build2_loc (loc, code, type,
11750 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11751 fold_build2_loc (loc,
11752 BIT_XOR_EXPR, itype,
11753 arg00, arg11),
11754 arg01),
11755 build_zero_cst (itype));
11757 if (operand_equal_p (arg00, arg11, 0))
11758 return fold_build2_loc (loc, code, type,
11759 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11760 fold_build2_loc (loc,
11761 BIT_XOR_EXPR, itype,
11762 arg01, arg10),
11763 arg00),
11764 build_zero_cst (itype));
11766 if (operand_equal_p (arg00, arg10, 0))
11767 return fold_build2_loc (loc, code, type,
11768 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11769 fold_build2_loc (loc,
11770 BIT_XOR_EXPR, itype,
11771 arg01, arg11),
11772 arg00),
11773 build_zero_cst (itype));
11776 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11777 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11779 tree arg00 = TREE_OPERAND (arg0, 0);
11780 tree arg01 = TREE_OPERAND (arg0, 1);
11781 tree arg10 = TREE_OPERAND (arg1, 0);
11782 tree arg11 = TREE_OPERAND (arg1, 1);
11783 tree itype = TREE_TYPE (arg0);
11785 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11786 operand_equal_p guarantees no side-effects so we don't need
11787 to use omit_one_operand on Z. */
11788 if (operand_equal_p (arg01, arg11, 0))
11789 return fold_build2_loc (loc, code, type, arg00,
11790 fold_convert_loc (loc, TREE_TYPE (arg00),
11791 arg10));
11792 if (operand_equal_p (arg01, arg10, 0))
11793 return fold_build2_loc (loc, code, type, arg00,
11794 fold_convert_loc (loc, TREE_TYPE (arg00),
11795 arg11));
11796 if (operand_equal_p (arg00, arg11, 0))
11797 return fold_build2_loc (loc, code, type, arg01,
11798 fold_convert_loc (loc, TREE_TYPE (arg01),
11799 arg10));
11800 if (operand_equal_p (arg00, arg10, 0))
11801 return fold_build2_loc (loc, code, type, arg01,
11802 fold_convert_loc (loc, TREE_TYPE (arg01),
11803 arg11));
11805 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11806 if (TREE_CODE (arg01) == INTEGER_CST
11807 && TREE_CODE (arg11) == INTEGER_CST)
11809 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11810 fold_convert_loc (loc, itype, arg11));
11811 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11812 return fold_build2_loc (loc, code, type, tem,
11813 fold_convert_loc (loc, itype, arg10));
11817 /* Attempt to simplify equality/inequality comparisons of complex
11818 values. Only lower the comparison if the result is known or
11819 can be simplified to a single scalar comparison. */
11820 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11821 || TREE_CODE (arg0) == COMPLEX_CST)
11822 && (TREE_CODE (arg1) == COMPLEX_EXPR
11823 || TREE_CODE (arg1) == COMPLEX_CST))
11825 tree real0, imag0, real1, imag1;
11826 tree rcond, icond;
11828 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11830 real0 = TREE_OPERAND (arg0, 0);
11831 imag0 = TREE_OPERAND (arg0, 1);
11833 else
11835 real0 = TREE_REALPART (arg0);
11836 imag0 = TREE_IMAGPART (arg0);
11839 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11841 real1 = TREE_OPERAND (arg1, 0);
11842 imag1 = TREE_OPERAND (arg1, 1);
11844 else
11846 real1 = TREE_REALPART (arg1);
11847 imag1 = TREE_IMAGPART (arg1);
11850 rcond = fold_binary_loc (loc, code, type, real0, real1);
11851 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11853 if (integer_zerop (rcond))
11855 if (code == EQ_EXPR)
11856 return omit_two_operands_loc (loc, type, boolean_false_node,
11857 imag0, imag1);
11858 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11860 else
11862 if (code == NE_EXPR)
11863 return omit_two_operands_loc (loc, type, boolean_true_node,
11864 imag0, imag1);
11865 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11869 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11870 if (icond && TREE_CODE (icond) == INTEGER_CST)
11872 if (integer_zerop (icond))
11874 if (code == EQ_EXPR)
11875 return omit_two_operands_loc (loc, type, boolean_false_node,
11876 real0, real1);
11877 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11879 else
11881 if (code == NE_EXPR)
11882 return omit_two_operands_loc (loc, type, boolean_true_node,
11883 real0, real1);
11884 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11889 return NULL_TREE;
11891 case LT_EXPR:
11892 case GT_EXPR:
11893 case LE_EXPR:
11894 case GE_EXPR:
11895 tem = fold_comparison (loc, code, type, op0, op1);
11896 if (tem != NULL_TREE)
11897 return tem;
11899 /* Transform comparisons of the form X +- C CMP X. */
11900 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11901 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11902 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11903 && !HONOR_SNANS (arg0))
11904 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11905 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11907 tree arg01 = TREE_OPERAND (arg0, 1);
11908 enum tree_code code0 = TREE_CODE (arg0);
11909 int is_positive;
11911 if (TREE_CODE (arg01) == REAL_CST)
11912 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11913 else
11914 is_positive = tree_int_cst_sgn (arg01);
11916 /* (X - c) > X becomes false. */
11917 if (code == GT_EXPR
11918 && ((code0 == MINUS_EXPR && is_positive >= 0)
11919 || (code0 == PLUS_EXPR && is_positive <= 0)))
11921 if (TREE_CODE (arg01) == INTEGER_CST
11922 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11923 fold_overflow_warning (("assuming signed overflow does not "
11924 "occur when assuming that (X - c) > X "
11925 "is always false"),
11926 WARN_STRICT_OVERFLOW_ALL);
11927 return constant_boolean_node (0, type);
11930 /* Likewise (X + c) < X becomes false. */
11931 if (code == LT_EXPR
11932 && ((code0 == PLUS_EXPR && is_positive >= 0)
11933 || (code0 == MINUS_EXPR && is_positive <= 0)))
11935 if (TREE_CODE (arg01) == INTEGER_CST
11936 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11937 fold_overflow_warning (("assuming signed overflow does not "
11938 "occur when assuming that "
11939 "(X + c) < X is always false"),
11940 WARN_STRICT_OVERFLOW_ALL);
11941 return constant_boolean_node (0, type);
11944 /* Convert (X - c) <= X to true. */
11945 if (!HONOR_NANS (arg1)
11946 && code == LE_EXPR
11947 && ((code0 == MINUS_EXPR && is_positive >= 0)
11948 || (code0 == PLUS_EXPR && is_positive <= 0)))
11950 if (TREE_CODE (arg01) == INTEGER_CST
11951 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11952 fold_overflow_warning (("assuming signed overflow does not "
11953 "occur when assuming that "
11954 "(X - c) <= X is always true"),
11955 WARN_STRICT_OVERFLOW_ALL);
11956 return constant_boolean_node (1, type);
11959 /* Convert (X + c) >= X to true. */
11960 if (!HONOR_NANS (arg1)
11961 && code == GE_EXPR
11962 && ((code0 == PLUS_EXPR && is_positive >= 0)
11963 || (code0 == MINUS_EXPR && is_positive <= 0)))
11965 if (TREE_CODE (arg01) == INTEGER_CST
11966 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11967 fold_overflow_warning (("assuming signed overflow does not "
11968 "occur when assuming that "
11969 "(X + c) >= X is always true"),
11970 WARN_STRICT_OVERFLOW_ALL);
11971 return constant_boolean_node (1, type);
11974 if (TREE_CODE (arg01) == INTEGER_CST)
11976 /* Convert X + c > X and X - c < X to true for integers. */
11977 if (code == GT_EXPR
11978 && ((code0 == PLUS_EXPR && is_positive > 0)
11979 || (code0 == MINUS_EXPR && is_positive < 0)))
11981 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11982 fold_overflow_warning (("assuming signed overflow does "
11983 "not occur when assuming that "
11984 "(X + c) > X is always true"),
11985 WARN_STRICT_OVERFLOW_ALL);
11986 return constant_boolean_node (1, type);
11989 if (code == LT_EXPR
11990 && ((code0 == MINUS_EXPR && is_positive > 0)
11991 || (code0 == PLUS_EXPR && is_positive < 0)))
11993 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11994 fold_overflow_warning (("assuming signed overflow does "
11995 "not occur when assuming that "
11996 "(X - c) < X is always true"),
11997 WARN_STRICT_OVERFLOW_ALL);
11998 return constant_boolean_node (1, type);
12001 /* Convert X + c <= X and X - c >= X to false for integers. */
12002 if (code == LE_EXPR
12003 && ((code0 == PLUS_EXPR && is_positive > 0)
12004 || (code0 == MINUS_EXPR && is_positive < 0)))
12006 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12007 fold_overflow_warning (("assuming signed overflow does "
12008 "not occur when assuming that "
12009 "(X + c) <= X is always false"),
12010 WARN_STRICT_OVERFLOW_ALL);
12011 return constant_boolean_node (0, type);
12014 if (code == GE_EXPR
12015 && ((code0 == MINUS_EXPR && is_positive > 0)
12016 || (code0 == PLUS_EXPR && is_positive < 0)))
12018 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12019 fold_overflow_warning (("assuming signed overflow does "
12020 "not occur when assuming that "
12021 "(X - c) >= X is always false"),
12022 WARN_STRICT_OVERFLOW_ALL);
12023 return constant_boolean_node (0, type);
12028 /* Comparisons with the highest or lowest possible integer of
12029 the specified precision will have known values. */
12031 tree arg1_type = TREE_TYPE (arg1);
12032 unsigned int prec = TYPE_PRECISION (arg1_type);
12034 if (TREE_CODE (arg1) == INTEGER_CST
12035 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12037 wide_int max = wi::max_value (arg1_type);
12038 wide_int signed_max = wi::max_value (prec, SIGNED);
12039 wide_int min = wi::min_value (arg1_type);
12041 if (wi::eq_p (arg1, max))
12042 switch (code)
12044 case GT_EXPR:
12045 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12047 case GE_EXPR:
12048 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12050 case LE_EXPR:
12051 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12053 case LT_EXPR:
12054 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12056 /* The GE_EXPR and LT_EXPR cases above are not normally
12057 reached because of previous transformations. */
12059 default:
12060 break;
12062 else if (wi::eq_p (arg1, max - 1))
12063 switch (code)
12065 case GT_EXPR:
12066 arg1 = const_binop (PLUS_EXPR, arg1,
12067 build_int_cst (TREE_TYPE (arg1), 1));
12068 return fold_build2_loc (loc, EQ_EXPR, type,
12069 fold_convert_loc (loc,
12070 TREE_TYPE (arg1), arg0),
12071 arg1);
12072 case LE_EXPR:
12073 arg1 = const_binop (PLUS_EXPR, arg1,
12074 build_int_cst (TREE_TYPE (arg1), 1));
12075 return fold_build2_loc (loc, NE_EXPR, type,
12076 fold_convert_loc (loc, TREE_TYPE (arg1),
12077 arg0),
12078 arg1);
12079 default:
12080 break;
12082 else if (wi::eq_p (arg1, min))
12083 switch (code)
12085 case LT_EXPR:
12086 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12088 case LE_EXPR:
12089 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12091 case GE_EXPR:
12092 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12094 case GT_EXPR:
12095 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12097 default:
12098 break;
12100 else if (wi::eq_p (arg1, min + 1))
12101 switch (code)
12103 case GE_EXPR:
12104 arg1 = const_binop (MINUS_EXPR, arg1,
12105 build_int_cst (TREE_TYPE (arg1), 1));
12106 return fold_build2_loc (loc, NE_EXPR, type,
12107 fold_convert_loc (loc,
12108 TREE_TYPE (arg1), arg0),
12109 arg1);
12110 case LT_EXPR:
12111 arg1 = const_binop (MINUS_EXPR, arg1,
12112 build_int_cst (TREE_TYPE (arg1), 1));
12113 return fold_build2_loc (loc, EQ_EXPR, type,
12114 fold_convert_loc (loc, TREE_TYPE (arg1),
12115 arg0),
12116 arg1);
12117 default:
12118 break;
12121 else if (wi::eq_p (arg1, signed_max)
12122 && TYPE_UNSIGNED (arg1_type)
12123 /* We will flip the signedness of the comparison operator
12124 associated with the mode of arg1, so the sign bit is
12125 specified by this mode. Check that arg1 is the signed
12126 max associated with this sign bit. */
12127 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
12128 /* signed_type does not work on pointer types. */
12129 && INTEGRAL_TYPE_P (arg1_type))
12131 /* The following case also applies to X < signed_max+1
12132 and X >= signed_max+1 because previous transformations. */
12133 if (code == LE_EXPR || code == GT_EXPR)
12135 tree st = signed_type_for (arg1_type);
12136 return fold_build2_loc (loc,
12137 code == LE_EXPR ? GE_EXPR : LT_EXPR,
12138 type, fold_convert_loc (loc, st, arg0),
12139 build_int_cst (st, 0));
12145 /* If we are comparing an ABS_EXPR with a constant, we can
12146 convert all the cases into explicit comparisons, but they may
12147 well not be faster than doing the ABS and one comparison.
12148 But ABS (X) <= C is a range comparison, which becomes a subtraction
12149 and a comparison, and is probably faster. */
12150 if (code == LE_EXPR
12151 && TREE_CODE (arg1) == INTEGER_CST
12152 && TREE_CODE (arg0) == ABS_EXPR
12153 && ! TREE_SIDE_EFFECTS (arg0)
12154 && (0 != (tem = negate_expr (arg1)))
12155 && TREE_CODE (tem) == INTEGER_CST
12156 && !TREE_OVERFLOW (tem))
12157 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12158 build2 (GE_EXPR, type,
12159 TREE_OPERAND (arg0, 0), tem),
12160 build2 (LE_EXPR, type,
12161 TREE_OPERAND (arg0, 0), arg1));
12163 /* Convert ABS_EXPR<x> >= 0 to true. */
12164 strict_overflow_p = false;
12165 if (code == GE_EXPR
12166 && (integer_zerop (arg1)
12167 || (! HONOR_NANS (arg0)
12168 && real_zerop (arg1)))
12169 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12171 if (strict_overflow_p)
12172 fold_overflow_warning (("assuming signed overflow does not occur "
12173 "when simplifying comparison of "
12174 "absolute value and zero"),
12175 WARN_STRICT_OVERFLOW_CONDITIONAL);
12176 return omit_one_operand_loc (loc, type,
12177 constant_boolean_node (true, type),
12178 arg0);
12181 /* Convert ABS_EXPR<x> < 0 to false. */
12182 strict_overflow_p = false;
12183 if (code == LT_EXPR
12184 && (integer_zerop (arg1) || real_zerop (arg1))
12185 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12187 if (strict_overflow_p)
12188 fold_overflow_warning (("assuming signed overflow does not occur "
12189 "when simplifying comparison of "
12190 "absolute value and zero"),
12191 WARN_STRICT_OVERFLOW_CONDITIONAL);
12192 return omit_one_operand_loc (loc, type,
12193 constant_boolean_node (false, type),
12194 arg0);
12197 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12198 and similarly for >= into !=. */
12199 if ((code == LT_EXPR || code == GE_EXPR)
12200 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12201 && TREE_CODE (arg1) == LSHIFT_EXPR
12202 && integer_onep (TREE_OPERAND (arg1, 0)))
12203 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12204 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12205 TREE_OPERAND (arg1, 1)),
12206 build_zero_cst (TREE_TYPE (arg0)));
12208 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
12209 otherwise Y might be >= # of bits in X's type and thus e.g.
12210 (unsigned char) (1 << Y) for Y 15 might be 0.
12211 If the cast is widening, then 1 << Y should have unsigned type,
12212 otherwise if Y is number of bits in the signed shift type minus 1,
12213 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
12214 31 might be 0xffffffff80000000. */
12215 if ((code == LT_EXPR || code == GE_EXPR)
12216 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12217 && CONVERT_EXPR_P (arg1)
12218 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12219 && (element_precision (TREE_TYPE (arg1))
12220 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12221 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12222 || (element_precision (TREE_TYPE (arg1))
12223 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12224 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12226 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12227 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12228 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12229 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12230 build_zero_cst (TREE_TYPE (arg0)));
12233 return NULL_TREE;
12235 case UNORDERED_EXPR:
12236 case ORDERED_EXPR:
12237 case UNLT_EXPR:
12238 case UNLE_EXPR:
12239 case UNGT_EXPR:
12240 case UNGE_EXPR:
12241 case UNEQ_EXPR:
12242 case LTGT_EXPR:
12243 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12245 t1 = fold_relational_const (code, type, arg0, arg1);
12246 if (t1 != NULL_TREE)
12247 return t1;
12250 /* If the first operand is NaN, the result is constant. */
12251 if (TREE_CODE (arg0) == REAL_CST
12252 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12253 && (code != LTGT_EXPR || ! flag_trapping_math))
12255 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12256 ? integer_zero_node
12257 : integer_one_node;
12258 return omit_one_operand_loc (loc, type, t1, arg1);
12261 /* If the second operand is NaN, the result is constant. */
12262 if (TREE_CODE (arg1) == REAL_CST
12263 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12264 && (code != LTGT_EXPR || ! flag_trapping_math))
12266 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12267 ? integer_zero_node
12268 : integer_one_node;
12269 return omit_one_operand_loc (loc, type, t1, arg0);
12272 /* Simplify unordered comparison of something with itself. */
12273 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12274 && operand_equal_p (arg0, arg1, 0))
12275 return constant_boolean_node (1, type);
12277 if (code == LTGT_EXPR
12278 && !flag_trapping_math
12279 && operand_equal_p (arg0, arg1, 0))
12280 return constant_boolean_node (0, type);
12282 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12284 tree targ0 = strip_float_extensions (arg0);
12285 tree targ1 = strip_float_extensions (arg1);
12286 tree newtype = TREE_TYPE (targ0);
12288 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12289 newtype = TREE_TYPE (targ1);
12291 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12292 return fold_build2_loc (loc, code, type,
12293 fold_convert_loc (loc, newtype, targ0),
12294 fold_convert_loc (loc, newtype, targ1));
12297 return NULL_TREE;
12299 case COMPOUND_EXPR:
12300 /* When pedantic, a compound expression can be neither an lvalue
12301 nor an integer constant expression. */
12302 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12303 return NULL_TREE;
12304 /* Don't let (0, 0) be null pointer constant. */
12305 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12306 : fold_convert_loc (loc, type, arg1);
12307 return pedantic_non_lvalue_loc (loc, tem);
12309 case ASSERT_EXPR:
12310 /* An ASSERT_EXPR should never be passed to fold_binary. */
12311 gcc_unreachable ();
12313 default:
12314 return NULL_TREE;
12315 } /* switch (code) */
12318 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
12319 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
12320 of GOTO_EXPR. */
12322 static tree
12323 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
12325 switch (TREE_CODE (*tp))
12327 case LABEL_EXPR:
12328 return *tp;
12330 case GOTO_EXPR:
12331 *walk_subtrees = 0;
12333 /* ... fall through ... */
12335 default:
12336 return NULL_TREE;
12340 /* Return whether the sub-tree ST contains a label which is accessible from
12341 outside the sub-tree. */
12343 static bool
12344 contains_label_p (tree st)
12346 return
12347 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
12350 /* Fold a ternary expression of code CODE and type TYPE with operands
12351 OP0, OP1, and OP2. Return the folded expression if folding is
12352 successful. Otherwise, return NULL_TREE. */
12354 tree
12355 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12356 tree op0, tree op1, tree op2)
12358 tree tem;
12359 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12360 enum tree_code_class kind = TREE_CODE_CLASS (code);
12362 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12363 && TREE_CODE_LENGTH (code) == 3);
12365 /* If this is a commutative operation, and OP0 is a constant, move it
12366 to OP1 to reduce the number of tests below. */
12367 if (commutative_ternary_tree_code (code)
12368 && tree_swap_operands_p (op0, op1, true))
12369 return fold_build3_loc (loc, code, type, op1, op0, op2);
12371 tem = generic_simplify (loc, code, type, op0, op1, op2);
12372 if (tem)
12373 return tem;
12375 /* Strip any conversions that don't change the mode. This is safe
12376 for every expression, except for a comparison expression because
12377 its signedness is derived from its operands. So, in the latter
12378 case, only strip conversions that don't change the signedness.
12380 Note that this is done as an internal manipulation within the
12381 constant folder, in order to find the simplest representation of
12382 the arguments so that their form can be studied. In any cases,
12383 the appropriate type conversions should be put back in the tree
12384 that will get out of the constant folder. */
12385 if (op0)
12387 arg0 = op0;
12388 STRIP_NOPS (arg0);
12391 if (op1)
12393 arg1 = op1;
12394 STRIP_NOPS (arg1);
12397 if (op2)
12399 arg2 = op2;
12400 STRIP_NOPS (arg2);
12403 switch (code)
12405 case COMPONENT_REF:
12406 if (TREE_CODE (arg0) == CONSTRUCTOR
12407 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12409 unsigned HOST_WIDE_INT idx;
12410 tree field, value;
12411 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12412 if (field == arg1)
12413 return value;
12415 return NULL_TREE;
12417 case COND_EXPR:
12418 case VEC_COND_EXPR:
12419 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12420 so all simple results must be passed through pedantic_non_lvalue. */
12421 if (TREE_CODE (arg0) == INTEGER_CST)
12423 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12424 tem = integer_zerop (arg0) ? op2 : op1;
12425 /* Only optimize constant conditions when the selected branch
12426 has the same type as the COND_EXPR. This avoids optimizing
12427 away "c ? x : throw", where the throw has a void type.
12428 Avoid throwing away that operand which contains label. */
12429 if ((!TREE_SIDE_EFFECTS (unused_op)
12430 || !contains_label_p (unused_op))
12431 && (! VOID_TYPE_P (TREE_TYPE (tem))
12432 || VOID_TYPE_P (type)))
12433 return pedantic_non_lvalue_loc (loc, tem);
12434 return NULL_TREE;
12436 else if (TREE_CODE (arg0) == VECTOR_CST)
12438 if ((TREE_CODE (arg1) == VECTOR_CST
12439 || TREE_CODE (arg1) == CONSTRUCTOR)
12440 && (TREE_CODE (arg2) == VECTOR_CST
12441 || TREE_CODE (arg2) == CONSTRUCTOR))
12443 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
12444 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
12445 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
12446 for (i = 0; i < nelts; i++)
12448 tree val = VECTOR_CST_ELT (arg0, i);
12449 if (integer_all_onesp (val))
12450 sel[i] = i;
12451 else if (integer_zerop (val))
12452 sel[i] = nelts + i;
12453 else /* Currently unreachable. */
12454 return NULL_TREE;
12456 tree t = fold_vec_perm (type, arg1, arg2, sel);
12457 if (t != NULL_TREE)
12458 return t;
12462 /* If we have A op B ? A : C, we may be able to convert this to a
12463 simpler expression, depending on the operation and the values
12464 of B and C. Signed zeros prevent all of these transformations,
12465 for reasons given above each one.
12467 Also try swapping the arguments and inverting the conditional. */
12468 if (COMPARISON_CLASS_P (arg0)
12469 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12470 arg1, TREE_OPERAND (arg0, 1))
12471 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
12473 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12474 if (tem)
12475 return tem;
12478 if (COMPARISON_CLASS_P (arg0)
12479 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12480 op2,
12481 TREE_OPERAND (arg0, 1))
12482 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12484 location_t loc0 = expr_location_or (arg0, loc);
12485 tem = fold_invert_truthvalue (loc0, arg0);
12486 if (tem && COMPARISON_CLASS_P (tem))
12488 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12489 if (tem)
12490 return tem;
12494 /* If the second operand is simpler than the third, swap them
12495 since that produces better jump optimization results. */
12496 if (truth_value_p (TREE_CODE (arg0))
12497 && tree_swap_operands_p (op1, op2, false))
12499 location_t loc0 = expr_location_or (arg0, loc);
12500 /* See if this can be inverted. If it can't, possibly because
12501 it was a floating-point inequality comparison, don't do
12502 anything. */
12503 tem = fold_invert_truthvalue (loc0, arg0);
12504 if (tem)
12505 return fold_build3_loc (loc, code, type, tem, op2, op1);
12508 /* Convert A ? 1 : 0 to simply A. */
12509 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12510 : (integer_onep (op1)
12511 && !VECTOR_TYPE_P (type)))
12512 && integer_zerop (op2)
12513 /* If we try to convert OP0 to our type, the
12514 call to fold will try to move the conversion inside
12515 a COND, which will recurse. In that case, the COND_EXPR
12516 is probably the best choice, so leave it alone. */
12517 && type == TREE_TYPE (arg0))
12518 return pedantic_non_lvalue_loc (loc, arg0);
12520 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12521 over COND_EXPR in cases such as floating point comparisons. */
12522 if (integer_zerop (op1)
12523 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
12524 : (integer_onep (op2)
12525 && !VECTOR_TYPE_P (type)))
12526 && truth_value_p (TREE_CODE (arg0)))
12527 return pedantic_non_lvalue_loc (loc,
12528 fold_convert_loc (loc, type,
12529 invert_truthvalue_loc (loc,
12530 arg0)));
12532 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12533 if (TREE_CODE (arg0) == LT_EXPR
12534 && integer_zerop (TREE_OPERAND (arg0, 1))
12535 && integer_zerop (op2)
12536 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12538 /* sign_bit_p looks through both zero and sign extensions,
12539 but for this optimization only sign extensions are
12540 usable. */
12541 tree tem2 = TREE_OPERAND (arg0, 0);
12542 while (tem != tem2)
12544 if (TREE_CODE (tem2) != NOP_EXPR
12545 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12547 tem = NULL_TREE;
12548 break;
12550 tem2 = TREE_OPERAND (tem2, 0);
12552 /* sign_bit_p only checks ARG1 bits within A's precision.
12553 If <sign bit of A> has wider type than A, bits outside
12554 of A's precision in <sign bit of A> need to be checked.
12555 If they are all 0, this optimization needs to be done
12556 in unsigned A's type, if they are all 1 in signed A's type,
12557 otherwise this can't be done. */
12558 if (tem
12559 && TYPE_PRECISION (TREE_TYPE (tem))
12560 < TYPE_PRECISION (TREE_TYPE (arg1))
12561 && TYPE_PRECISION (TREE_TYPE (tem))
12562 < TYPE_PRECISION (type))
12564 int inner_width, outer_width;
12565 tree tem_type;
12567 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12568 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12569 if (outer_width > TYPE_PRECISION (type))
12570 outer_width = TYPE_PRECISION (type);
12572 wide_int mask = wi::shifted_mask
12573 (inner_width, outer_width - inner_width, false,
12574 TYPE_PRECISION (TREE_TYPE (arg1)));
12576 wide_int common = mask & arg1;
12577 if (common == mask)
12579 tem_type = signed_type_for (TREE_TYPE (tem));
12580 tem = fold_convert_loc (loc, tem_type, tem);
12582 else if (common == 0)
12584 tem_type = unsigned_type_for (TREE_TYPE (tem));
12585 tem = fold_convert_loc (loc, tem_type, tem);
12587 else
12588 tem = NULL;
12591 if (tem)
12592 return
12593 fold_convert_loc (loc, type,
12594 fold_build2_loc (loc, BIT_AND_EXPR,
12595 TREE_TYPE (tem), tem,
12596 fold_convert_loc (loc,
12597 TREE_TYPE (tem),
12598 arg1)));
12601 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12602 already handled above. */
12603 if (TREE_CODE (arg0) == BIT_AND_EXPR
12604 && integer_onep (TREE_OPERAND (arg0, 1))
12605 && integer_zerop (op2)
12606 && integer_pow2p (arg1))
12608 tree tem = TREE_OPERAND (arg0, 0);
12609 STRIP_NOPS (tem);
12610 if (TREE_CODE (tem) == RSHIFT_EXPR
12611 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12612 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12613 tree_to_uhwi (TREE_OPERAND (tem, 1)))
12614 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12615 TREE_OPERAND (tem, 0), arg1);
12618 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12619 is probably obsolete because the first operand should be a
12620 truth value (that's why we have the two cases above), but let's
12621 leave it in until we can confirm this for all front-ends. */
12622 if (integer_zerop (op2)
12623 && TREE_CODE (arg0) == NE_EXPR
12624 && integer_zerop (TREE_OPERAND (arg0, 1))
12625 && integer_pow2p (arg1)
12626 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12627 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12628 arg1, OEP_ONLY_CONST))
12629 return pedantic_non_lvalue_loc (loc,
12630 fold_convert_loc (loc, type,
12631 TREE_OPERAND (arg0, 0)));
12633 /* Disable the transformations below for vectors, since
12634 fold_binary_op_with_conditional_arg may undo them immediately,
12635 yielding an infinite loop. */
12636 if (code == VEC_COND_EXPR)
12637 return NULL_TREE;
12639 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12640 if (integer_zerop (op2)
12641 && truth_value_p (TREE_CODE (arg0))
12642 && truth_value_p (TREE_CODE (arg1))
12643 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12644 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12645 : TRUTH_ANDIF_EXPR,
12646 type, fold_convert_loc (loc, type, arg0), arg1);
12648 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12649 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12650 && truth_value_p (TREE_CODE (arg0))
12651 && truth_value_p (TREE_CODE (arg1))
12652 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12654 location_t loc0 = expr_location_or (arg0, loc);
12655 /* Only perform transformation if ARG0 is easily inverted. */
12656 tem = fold_invert_truthvalue (loc0, arg0);
12657 if (tem)
12658 return fold_build2_loc (loc, code == VEC_COND_EXPR
12659 ? BIT_IOR_EXPR
12660 : TRUTH_ORIF_EXPR,
12661 type, fold_convert_loc (loc, type, tem),
12662 arg1);
12665 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12666 if (integer_zerop (arg1)
12667 && truth_value_p (TREE_CODE (arg0))
12668 && truth_value_p (TREE_CODE (op2))
12669 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12671 location_t loc0 = expr_location_or (arg0, loc);
12672 /* Only perform transformation if ARG0 is easily inverted. */
12673 tem = fold_invert_truthvalue (loc0, arg0);
12674 if (tem)
12675 return fold_build2_loc (loc, code == VEC_COND_EXPR
12676 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12677 type, fold_convert_loc (loc, type, tem),
12678 op2);
12681 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12682 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12683 && truth_value_p (TREE_CODE (arg0))
12684 && truth_value_p (TREE_CODE (op2))
12685 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12686 return fold_build2_loc (loc, code == VEC_COND_EXPR
12687 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12688 type, fold_convert_loc (loc, type, arg0), op2);
12690 return NULL_TREE;
12692 case CALL_EXPR:
12693 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12694 of fold_ternary on them. */
12695 gcc_unreachable ();
12697 case BIT_FIELD_REF:
12698 if ((TREE_CODE (arg0) == VECTOR_CST
12699 || (TREE_CODE (arg0) == CONSTRUCTOR
12700 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
12701 && (type == TREE_TYPE (TREE_TYPE (arg0))
12702 || (TREE_CODE (type) == VECTOR_TYPE
12703 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
12705 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12706 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12707 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12708 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12710 if (n != 0
12711 && (idx % width) == 0
12712 && (n % width) == 0
12713 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12715 idx = idx / width;
12716 n = n / width;
12718 if (TREE_CODE (arg0) == VECTOR_CST)
12720 if (n == 1)
12721 return VECTOR_CST_ELT (arg0, idx);
12723 tree *vals = XALLOCAVEC (tree, n);
12724 for (unsigned i = 0; i < n; ++i)
12725 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
12726 return build_vector (type, vals);
12729 /* Constructor elements can be subvectors. */
12730 unsigned HOST_WIDE_INT k = 1;
12731 if (CONSTRUCTOR_NELTS (arg0) != 0)
12733 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
12734 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
12735 k = TYPE_VECTOR_SUBPARTS (cons_elem);
12738 /* We keep an exact subset of the constructor elements. */
12739 if ((idx % k) == 0 && (n % k) == 0)
12741 if (CONSTRUCTOR_NELTS (arg0) == 0)
12742 return build_constructor (type, NULL);
12743 idx /= k;
12744 n /= k;
12745 if (n == 1)
12747 if (idx < CONSTRUCTOR_NELTS (arg0))
12748 return CONSTRUCTOR_ELT (arg0, idx)->value;
12749 return build_zero_cst (type);
12752 vec<constructor_elt, va_gc> *vals;
12753 vec_alloc (vals, n);
12754 for (unsigned i = 0;
12755 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
12756 ++i)
12757 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
12758 CONSTRUCTOR_ELT
12759 (arg0, idx + i)->value);
12760 return build_constructor (type, vals);
12762 /* The bitfield references a single constructor element. */
12763 else if (idx + n <= (idx / k + 1) * k)
12765 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
12766 return build_zero_cst (type);
12767 else if (n == k)
12768 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
12769 else
12770 return fold_build3_loc (loc, code, type,
12771 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
12772 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
12777 /* A bit-field-ref that referenced the full argument can be stripped. */
12778 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12779 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
12780 && integer_zerop (op2))
12781 return fold_convert_loc (loc, type, arg0);
12783 /* On constants we can use native encode/interpret to constant
12784 fold (nearly) all BIT_FIELD_REFs. */
12785 if (CONSTANT_CLASS_P (arg0)
12786 && can_native_interpret_type_p (type)
12787 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
12788 /* This limitation should not be necessary, we just need to
12789 round this up to mode size. */
12790 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
12791 /* Need bit-shifting of the buffer to relax the following. */
12792 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
12794 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12795 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12796 unsigned HOST_WIDE_INT clen;
12797 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
12798 /* ??? We cannot tell native_encode_expr to start at
12799 some random byte only. So limit us to a reasonable amount
12800 of work. */
12801 if (clen <= 4096)
12803 unsigned char *b = XALLOCAVEC (unsigned char, clen);
12804 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
12805 if (len > 0
12806 && len * BITS_PER_UNIT >= bitpos + bitsize)
12808 tree v = native_interpret_expr (type,
12809 b + bitpos / BITS_PER_UNIT,
12810 bitsize / BITS_PER_UNIT);
12811 if (v)
12812 return v;
12817 return NULL_TREE;
12819 case FMA_EXPR:
12820 /* For integers we can decompose the FMA if possible. */
12821 if (TREE_CODE (arg0) == INTEGER_CST
12822 && TREE_CODE (arg1) == INTEGER_CST)
12823 return fold_build2_loc (loc, PLUS_EXPR, type,
12824 const_binop (MULT_EXPR, arg0, arg1), arg2);
12825 if (integer_zerop (arg2))
12826 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12828 return fold_fma (loc, type, arg0, arg1, arg2);
12830 case VEC_PERM_EXPR:
12831 if (TREE_CODE (arg2) == VECTOR_CST)
12833 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
12834 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
12835 unsigned char *sel2 = sel + nelts;
12836 bool need_mask_canon = false;
12837 bool need_mask_canon2 = false;
12838 bool all_in_vec0 = true;
12839 bool all_in_vec1 = true;
12840 bool maybe_identity = true;
12841 bool single_arg = (op0 == op1);
12842 bool changed = false;
12844 mask2 = 2 * nelts - 1;
12845 mask = single_arg ? (nelts - 1) : mask2;
12846 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
12847 for (i = 0; i < nelts; i++)
12849 tree val = VECTOR_CST_ELT (arg2, i);
12850 if (TREE_CODE (val) != INTEGER_CST)
12851 return NULL_TREE;
12853 /* Make sure that the perm value is in an acceptable
12854 range. */
12855 wide_int t = val;
12856 need_mask_canon |= wi::gtu_p (t, mask);
12857 need_mask_canon2 |= wi::gtu_p (t, mask2);
12858 sel[i] = t.to_uhwi () & mask;
12859 sel2[i] = t.to_uhwi () & mask2;
12861 if (sel[i] < nelts)
12862 all_in_vec1 = false;
12863 else
12864 all_in_vec0 = false;
12866 if ((sel[i] & (nelts-1)) != i)
12867 maybe_identity = false;
12870 if (maybe_identity)
12872 if (all_in_vec0)
12873 return op0;
12874 if (all_in_vec1)
12875 return op1;
12878 if (all_in_vec0)
12879 op1 = op0;
12880 else if (all_in_vec1)
12882 op0 = op1;
12883 for (i = 0; i < nelts; i++)
12884 sel[i] -= nelts;
12885 need_mask_canon = true;
12888 if ((TREE_CODE (op0) == VECTOR_CST
12889 || TREE_CODE (op0) == CONSTRUCTOR)
12890 && (TREE_CODE (op1) == VECTOR_CST
12891 || TREE_CODE (op1) == CONSTRUCTOR))
12893 tree t = fold_vec_perm (type, op0, op1, sel);
12894 if (t != NULL_TREE)
12895 return t;
12898 if (op0 == op1 && !single_arg)
12899 changed = true;
12901 /* Some targets are deficient and fail to expand a single
12902 argument permutation while still allowing an equivalent
12903 2-argument version. */
12904 if (need_mask_canon && arg2 == op2
12905 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
12906 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
12908 need_mask_canon = need_mask_canon2;
12909 sel = sel2;
12912 if (need_mask_canon && arg2 == op2)
12914 tree *tsel = XALLOCAVEC (tree, nelts);
12915 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
12916 for (i = 0; i < nelts; i++)
12917 tsel[i] = build_int_cst (eltype, sel[i]);
12918 op2 = build_vector (TREE_TYPE (arg2), tsel);
12919 changed = true;
12922 if (changed)
12923 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
12925 return NULL_TREE;
12927 default:
12928 return NULL_TREE;
12929 } /* switch (code) */
12932 /* Perform constant folding and related simplification of EXPR.
12933 The related simplifications include x*1 => x, x*0 => 0, etc.,
12934 and application of the associative law.
12935 NOP_EXPR conversions may be removed freely (as long as we
12936 are careful not to change the type of the overall expression).
12937 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12938 but we can constant-fold them if they have constant operands. */
12940 #ifdef ENABLE_FOLD_CHECKING
12941 # define fold(x) fold_1 (x)
12942 static tree fold_1 (tree);
12943 static
12944 #endif
12945 tree
12946 fold (tree expr)
12948 const tree t = expr;
12949 enum tree_code code = TREE_CODE (t);
12950 enum tree_code_class kind = TREE_CODE_CLASS (code);
12951 tree tem;
12952 location_t loc = EXPR_LOCATION (expr);
12954 /* Return right away if a constant. */
12955 if (kind == tcc_constant)
12956 return t;
12958 /* CALL_EXPR-like objects with variable numbers of operands are
12959 treated specially. */
12960 if (kind == tcc_vl_exp)
12962 if (code == CALL_EXPR)
12964 tem = fold_call_expr (loc, expr, false);
12965 return tem ? tem : expr;
12967 return expr;
12970 if (IS_EXPR_CODE_CLASS (kind))
12972 tree type = TREE_TYPE (t);
12973 tree op0, op1, op2;
12975 switch (TREE_CODE_LENGTH (code))
12977 case 1:
12978 op0 = TREE_OPERAND (t, 0);
12979 tem = fold_unary_loc (loc, code, type, op0);
12980 return tem ? tem : expr;
12981 case 2:
12982 op0 = TREE_OPERAND (t, 0);
12983 op1 = TREE_OPERAND (t, 1);
12984 tem = fold_binary_loc (loc, code, type, op0, op1);
12985 return tem ? tem : expr;
12986 case 3:
12987 op0 = TREE_OPERAND (t, 0);
12988 op1 = TREE_OPERAND (t, 1);
12989 op2 = TREE_OPERAND (t, 2);
12990 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12991 return tem ? tem : expr;
12992 default:
12993 break;
12997 switch (code)
12999 case ARRAY_REF:
13001 tree op0 = TREE_OPERAND (t, 0);
13002 tree op1 = TREE_OPERAND (t, 1);
13004 if (TREE_CODE (op1) == INTEGER_CST
13005 && TREE_CODE (op0) == CONSTRUCTOR
13006 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13008 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13009 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13010 unsigned HOST_WIDE_INT begin = 0;
13012 /* Find a matching index by means of a binary search. */
13013 while (begin != end)
13015 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13016 tree index = (*elts)[middle].index;
13018 if (TREE_CODE (index) == INTEGER_CST
13019 && tree_int_cst_lt (index, op1))
13020 begin = middle + 1;
13021 else if (TREE_CODE (index) == INTEGER_CST
13022 && tree_int_cst_lt (op1, index))
13023 end = middle;
13024 else if (TREE_CODE (index) == RANGE_EXPR
13025 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13026 begin = middle + 1;
13027 else if (TREE_CODE (index) == RANGE_EXPR
13028 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13029 end = middle;
13030 else
13031 return (*elts)[middle].value;
13035 return t;
13038 /* Return a VECTOR_CST if possible. */
13039 case CONSTRUCTOR:
13041 tree type = TREE_TYPE (t);
13042 if (TREE_CODE (type) != VECTOR_TYPE)
13043 return t;
13045 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13046 unsigned HOST_WIDE_INT idx, pos = 0;
13047 tree value;
13049 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13051 if (!CONSTANT_CLASS_P (value))
13052 return t;
13053 if (TREE_CODE (value) == VECTOR_CST)
13055 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13056 vec[pos++] = VECTOR_CST_ELT (value, i);
13058 else
13059 vec[pos++] = value;
13061 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13062 vec[pos] = build_zero_cst (TREE_TYPE (type));
13064 return build_vector (type, vec);
13067 case CONST_DECL:
13068 return fold (DECL_INITIAL (t));
13070 default:
13071 return t;
13072 } /* switch (code) */
13075 #ifdef ENABLE_FOLD_CHECKING
13076 #undef fold
13078 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13079 hash_table<nofree_ptr_hash<const tree_node> > *);
13080 static void fold_check_failed (const_tree, const_tree);
13081 void print_fold_checksum (const_tree);
13083 /* When --enable-checking=fold, compute a digest of expr before
13084 and after actual fold call to see if fold did not accidentally
13085 change original expr. */
13087 tree
13088 fold (tree expr)
13090 tree ret;
13091 struct md5_ctx ctx;
13092 unsigned char checksum_before[16], checksum_after[16];
13093 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13095 md5_init_ctx (&ctx);
13096 fold_checksum_tree (expr, &ctx, &ht);
13097 md5_finish_ctx (&ctx, checksum_before);
13098 ht.empty ();
13100 ret = fold_1 (expr);
13102 md5_init_ctx (&ctx);
13103 fold_checksum_tree (expr, &ctx, &ht);
13104 md5_finish_ctx (&ctx, checksum_after);
13106 if (memcmp (checksum_before, checksum_after, 16))
13107 fold_check_failed (expr, ret);
13109 return ret;
13112 void
13113 print_fold_checksum (const_tree expr)
13115 struct md5_ctx ctx;
13116 unsigned char checksum[16], cnt;
13117 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13119 md5_init_ctx (&ctx);
13120 fold_checksum_tree (expr, &ctx, &ht);
13121 md5_finish_ctx (&ctx, checksum);
13122 for (cnt = 0; cnt < 16; ++cnt)
13123 fprintf (stderr, "%02x", checksum[cnt]);
13124 putc ('\n', stderr);
13127 static void
13128 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13130 internal_error ("fold check: original tree changed by fold");
13133 static void
13134 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13135 hash_table<nofree_ptr_hash <const tree_node> > *ht)
13137 const tree_node **slot;
13138 enum tree_code code;
13139 union tree_node buf;
13140 int i, len;
13142 recursive_label:
13143 if (expr == NULL)
13144 return;
13145 slot = ht->find_slot (expr, INSERT);
13146 if (*slot != NULL)
13147 return;
13148 *slot = expr;
13149 code = TREE_CODE (expr);
13150 if (TREE_CODE_CLASS (code) == tcc_declaration
13151 && HAS_DECL_ASSEMBLER_NAME_P (expr))
13153 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
13154 memcpy ((char *) &buf, expr, tree_size (expr));
13155 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13156 buf.decl_with_vis.symtab_node = NULL;
13157 expr = (tree) &buf;
13159 else if (TREE_CODE_CLASS (code) == tcc_type
13160 && (TYPE_POINTER_TO (expr)
13161 || TYPE_REFERENCE_TO (expr)
13162 || TYPE_CACHED_VALUES_P (expr)
13163 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13164 || TYPE_NEXT_VARIANT (expr)))
13166 /* Allow these fields to be modified. */
13167 tree tmp;
13168 memcpy ((char *) &buf, expr, tree_size (expr));
13169 expr = tmp = (tree) &buf;
13170 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13171 TYPE_POINTER_TO (tmp) = NULL;
13172 TYPE_REFERENCE_TO (tmp) = NULL;
13173 TYPE_NEXT_VARIANT (tmp) = NULL;
13174 if (TYPE_CACHED_VALUES_P (tmp))
13176 TYPE_CACHED_VALUES_P (tmp) = 0;
13177 TYPE_CACHED_VALUES (tmp) = NULL;
13180 md5_process_bytes (expr, tree_size (expr), ctx);
13181 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13182 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13183 if (TREE_CODE_CLASS (code) != tcc_type
13184 && TREE_CODE_CLASS (code) != tcc_declaration
13185 && code != TREE_LIST
13186 && code != SSA_NAME
13187 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13188 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13189 switch (TREE_CODE_CLASS (code))
13191 case tcc_constant:
13192 switch (code)
13194 case STRING_CST:
13195 md5_process_bytes (TREE_STRING_POINTER (expr),
13196 TREE_STRING_LENGTH (expr), ctx);
13197 break;
13198 case COMPLEX_CST:
13199 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13200 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13201 break;
13202 case VECTOR_CST:
13203 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
13204 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
13205 break;
13206 default:
13207 break;
13209 break;
13210 case tcc_exceptional:
13211 switch (code)
13213 case TREE_LIST:
13214 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13215 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13216 expr = TREE_CHAIN (expr);
13217 goto recursive_label;
13218 break;
13219 case TREE_VEC:
13220 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13221 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13222 break;
13223 default:
13224 break;
13226 break;
13227 case tcc_expression:
13228 case tcc_reference:
13229 case tcc_comparison:
13230 case tcc_unary:
13231 case tcc_binary:
13232 case tcc_statement:
13233 case tcc_vl_exp:
13234 len = TREE_OPERAND_LENGTH (expr);
13235 for (i = 0; i < len; ++i)
13236 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13237 break;
13238 case tcc_declaration:
13239 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13240 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13241 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13243 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13244 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13245 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13246 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13247 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13250 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13252 if (TREE_CODE (expr) == FUNCTION_DECL)
13254 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13255 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13257 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13259 break;
13260 case tcc_type:
13261 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13262 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13263 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13264 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13265 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13266 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13267 if (INTEGRAL_TYPE_P (expr)
13268 || SCALAR_FLOAT_TYPE_P (expr))
13270 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13271 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13273 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13274 if (TREE_CODE (expr) == RECORD_TYPE
13275 || TREE_CODE (expr) == UNION_TYPE
13276 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13277 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13278 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13279 break;
13280 default:
13281 break;
13285 /* Helper function for outputting the checksum of a tree T. When
13286 debugging with gdb, you can "define mynext" to be "next" followed
13287 by "call debug_fold_checksum (op0)", then just trace down till the
13288 outputs differ. */
13290 DEBUG_FUNCTION void
13291 debug_fold_checksum (const_tree t)
13293 int i;
13294 unsigned char checksum[16];
13295 struct md5_ctx ctx;
13296 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13298 md5_init_ctx (&ctx);
13299 fold_checksum_tree (t, &ctx, &ht);
13300 md5_finish_ctx (&ctx, checksum);
13301 ht.empty ();
13303 for (i = 0; i < 16; i++)
13304 fprintf (stderr, "%d ", checksum[i]);
13306 fprintf (stderr, "\n");
13309 #endif
13311 /* Fold a unary tree expression with code CODE of type TYPE with an
13312 operand OP0. LOC is the location of the resulting expression.
13313 Return a folded expression if successful. Otherwise, return a tree
13314 expression with code CODE of type TYPE with an operand OP0. */
13316 tree
13317 fold_build1_stat_loc (location_t loc,
13318 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13320 tree tem;
13321 #ifdef ENABLE_FOLD_CHECKING
13322 unsigned char checksum_before[16], checksum_after[16];
13323 struct md5_ctx ctx;
13324 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13326 md5_init_ctx (&ctx);
13327 fold_checksum_tree (op0, &ctx, &ht);
13328 md5_finish_ctx (&ctx, checksum_before);
13329 ht.empty ();
13330 #endif
13332 tem = fold_unary_loc (loc, code, type, op0);
13333 if (!tem)
13334 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
13336 #ifdef ENABLE_FOLD_CHECKING
13337 md5_init_ctx (&ctx);
13338 fold_checksum_tree (op0, &ctx, &ht);
13339 md5_finish_ctx (&ctx, checksum_after);
13341 if (memcmp (checksum_before, checksum_after, 16))
13342 fold_check_failed (op0, tem);
13343 #endif
13344 return tem;
13347 /* Fold a binary tree expression with code CODE of type TYPE with
13348 operands OP0 and OP1. LOC is the location of the resulting
13349 expression. Return a folded expression if successful. Otherwise,
13350 return a tree expression with code CODE of type TYPE with operands
13351 OP0 and OP1. */
13353 tree
13354 fold_build2_stat_loc (location_t loc,
13355 enum tree_code code, tree type, tree op0, tree op1
13356 MEM_STAT_DECL)
13358 tree tem;
13359 #ifdef ENABLE_FOLD_CHECKING
13360 unsigned char checksum_before_op0[16],
13361 checksum_before_op1[16],
13362 checksum_after_op0[16],
13363 checksum_after_op1[16];
13364 struct md5_ctx ctx;
13365 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13367 md5_init_ctx (&ctx);
13368 fold_checksum_tree (op0, &ctx, &ht);
13369 md5_finish_ctx (&ctx, checksum_before_op0);
13370 ht.empty ();
13372 md5_init_ctx (&ctx);
13373 fold_checksum_tree (op1, &ctx, &ht);
13374 md5_finish_ctx (&ctx, checksum_before_op1);
13375 ht.empty ();
13376 #endif
13378 tem = fold_binary_loc (loc, code, type, op0, op1);
13379 if (!tem)
13380 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13382 #ifdef ENABLE_FOLD_CHECKING
13383 md5_init_ctx (&ctx);
13384 fold_checksum_tree (op0, &ctx, &ht);
13385 md5_finish_ctx (&ctx, checksum_after_op0);
13386 ht.empty ();
13388 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13389 fold_check_failed (op0, tem);
13391 md5_init_ctx (&ctx);
13392 fold_checksum_tree (op1, &ctx, &ht);
13393 md5_finish_ctx (&ctx, checksum_after_op1);
13395 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13396 fold_check_failed (op1, tem);
13397 #endif
13398 return tem;
13401 /* Fold a ternary tree expression with code CODE of type TYPE with
13402 operands OP0, OP1, and OP2. Return a folded expression if
13403 successful. Otherwise, return a tree expression with code CODE of
13404 type TYPE with operands OP0, OP1, and OP2. */
13406 tree
13407 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
13408 tree op0, tree op1, tree op2 MEM_STAT_DECL)
13410 tree tem;
13411 #ifdef ENABLE_FOLD_CHECKING
13412 unsigned char checksum_before_op0[16],
13413 checksum_before_op1[16],
13414 checksum_before_op2[16],
13415 checksum_after_op0[16],
13416 checksum_after_op1[16],
13417 checksum_after_op2[16];
13418 struct md5_ctx ctx;
13419 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13421 md5_init_ctx (&ctx);
13422 fold_checksum_tree (op0, &ctx, &ht);
13423 md5_finish_ctx (&ctx, checksum_before_op0);
13424 ht.empty ();
13426 md5_init_ctx (&ctx);
13427 fold_checksum_tree (op1, &ctx, &ht);
13428 md5_finish_ctx (&ctx, checksum_before_op1);
13429 ht.empty ();
13431 md5_init_ctx (&ctx);
13432 fold_checksum_tree (op2, &ctx, &ht);
13433 md5_finish_ctx (&ctx, checksum_before_op2);
13434 ht.empty ();
13435 #endif
13437 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13438 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13439 if (!tem)
13440 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13442 #ifdef ENABLE_FOLD_CHECKING
13443 md5_init_ctx (&ctx);
13444 fold_checksum_tree (op0, &ctx, &ht);
13445 md5_finish_ctx (&ctx, checksum_after_op0);
13446 ht.empty ();
13448 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13449 fold_check_failed (op0, tem);
13451 md5_init_ctx (&ctx);
13452 fold_checksum_tree (op1, &ctx, &ht);
13453 md5_finish_ctx (&ctx, checksum_after_op1);
13454 ht.empty ();
13456 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13457 fold_check_failed (op1, tem);
13459 md5_init_ctx (&ctx);
13460 fold_checksum_tree (op2, &ctx, &ht);
13461 md5_finish_ctx (&ctx, checksum_after_op2);
13463 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13464 fold_check_failed (op2, tem);
13465 #endif
13466 return tem;
13469 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13470 arguments in ARGARRAY, and a null static chain.
13471 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13472 of type TYPE from the given operands as constructed by build_call_array. */
13474 tree
13475 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13476 int nargs, tree *argarray)
13478 tree tem;
13479 #ifdef ENABLE_FOLD_CHECKING
13480 unsigned char checksum_before_fn[16],
13481 checksum_before_arglist[16],
13482 checksum_after_fn[16],
13483 checksum_after_arglist[16];
13484 struct md5_ctx ctx;
13485 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13486 int i;
13488 md5_init_ctx (&ctx);
13489 fold_checksum_tree (fn, &ctx, &ht);
13490 md5_finish_ctx (&ctx, checksum_before_fn);
13491 ht.empty ();
13493 md5_init_ctx (&ctx);
13494 for (i = 0; i < nargs; i++)
13495 fold_checksum_tree (argarray[i], &ctx, &ht);
13496 md5_finish_ctx (&ctx, checksum_before_arglist);
13497 ht.empty ();
13498 #endif
13500 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13501 if (!tem)
13502 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13504 #ifdef ENABLE_FOLD_CHECKING
13505 md5_init_ctx (&ctx);
13506 fold_checksum_tree (fn, &ctx, &ht);
13507 md5_finish_ctx (&ctx, checksum_after_fn);
13508 ht.empty ();
13510 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13511 fold_check_failed (fn, tem);
13513 md5_init_ctx (&ctx);
13514 for (i = 0; i < nargs; i++)
13515 fold_checksum_tree (argarray[i], &ctx, &ht);
13516 md5_finish_ctx (&ctx, checksum_after_arglist);
13518 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13519 fold_check_failed (NULL_TREE, tem);
13520 #endif
13521 return tem;
13524 /* Perform constant folding and related simplification of initializer
13525 expression EXPR. These behave identically to "fold_buildN" but ignore
13526 potential run-time traps and exceptions that fold must preserve. */
13528 #define START_FOLD_INIT \
13529 int saved_signaling_nans = flag_signaling_nans;\
13530 int saved_trapping_math = flag_trapping_math;\
13531 int saved_rounding_math = flag_rounding_math;\
13532 int saved_trapv = flag_trapv;\
13533 int saved_folding_initializer = folding_initializer;\
13534 flag_signaling_nans = 0;\
13535 flag_trapping_math = 0;\
13536 flag_rounding_math = 0;\
13537 flag_trapv = 0;\
13538 folding_initializer = 1;
13540 #define END_FOLD_INIT \
13541 flag_signaling_nans = saved_signaling_nans;\
13542 flag_trapping_math = saved_trapping_math;\
13543 flag_rounding_math = saved_rounding_math;\
13544 flag_trapv = saved_trapv;\
13545 folding_initializer = saved_folding_initializer;
13547 tree
13548 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13549 tree type, tree op)
13551 tree result;
13552 START_FOLD_INIT;
13554 result = fold_build1_loc (loc, code, type, op);
13556 END_FOLD_INIT;
13557 return result;
13560 tree
13561 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13562 tree type, tree op0, tree op1)
13564 tree result;
13565 START_FOLD_INIT;
13567 result = fold_build2_loc (loc, code, type, op0, op1);
13569 END_FOLD_INIT;
13570 return result;
13573 tree
13574 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13575 int nargs, tree *argarray)
13577 tree result;
13578 START_FOLD_INIT;
13580 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13582 END_FOLD_INIT;
13583 return result;
13586 #undef START_FOLD_INIT
13587 #undef END_FOLD_INIT
13589 /* Determine if first argument is a multiple of second argument. Return 0 if
13590 it is not, or we cannot easily determined it to be.
13592 An example of the sort of thing we care about (at this point; this routine
13593 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13594 fold cases do now) is discovering that
13596 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13598 is a multiple of
13600 SAVE_EXPR (J * 8)
13602 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13604 This code also handles discovering that
13606 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13608 is a multiple of 8 so we don't have to worry about dealing with a
13609 possible remainder.
13611 Note that we *look* inside a SAVE_EXPR only to determine how it was
13612 calculated; it is not safe for fold to do much of anything else with the
13613 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13614 at run time. For example, the latter example above *cannot* be implemented
13615 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13616 evaluation time of the original SAVE_EXPR is not necessarily the same at
13617 the time the new expression is evaluated. The only optimization of this
13618 sort that would be valid is changing
13620 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13622 divided by 8 to
13624 SAVE_EXPR (I) * SAVE_EXPR (J)
13626 (where the same SAVE_EXPR (J) is used in the original and the
13627 transformed version). */
13630 multiple_of_p (tree type, const_tree top, const_tree bottom)
13632 if (operand_equal_p (top, bottom, 0))
13633 return 1;
13635 if (TREE_CODE (type) != INTEGER_TYPE)
13636 return 0;
13638 switch (TREE_CODE (top))
13640 case BIT_AND_EXPR:
13641 /* Bitwise and provides a power of two multiple. If the mask is
13642 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13643 if (!integer_pow2p (bottom))
13644 return 0;
13645 /* FALLTHRU */
13647 case MULT_EXPR:
13648 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13649 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13651 case PLUS_EXPR:
13652 case MINUS_EXPR:
13653 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13654 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13656 case LSHIFT_EXPR:
13657 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13659 tree op1, t1;
13661 op1 = TREE_OPERAND (top, 1);
13662 /* const_binop may not detect overflow correctly,
13663 so check for it explicitly here. */
13664 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
13665 && 0 != (t1 = fold_convert (type,
13666 const_binop (LSHIFT_EXPR,
13667 size_one_node,
13668 op1)))
13669 && !TREE_OVERFLOW (t1))
13670 return multiple_of_p (type, t1, bottom);
13672 return 0;
13674 case NOP_EXPR:
13675 /* Can't handle conversions from non-integral or wider integral type. */
13676 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13677 || (TYPE_PRECISION (type)
13678 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13679 return 0;
13681 /* .. fall through ... */
13683 case SAVE_EXPR:
13684 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13686 case COND_EXPR:
13687 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13688 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13690 case INTEGER_CST:
13691 if (TREE_CODE (bottom) != INTEGER_CST
13692 || integer_zerop (bottom)
13693 || (TYPE_UNSIGNED (type)
13694 && (tree_int_cst_sgn (top) < 0
13695 || tree_int_cst_sgn (bottom) < 0)))
13696 return 0;
13697 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13698 SIGNED);
13700 default:
13701 return 0;
13705 /* Return true if CODE or TYPE is known to be non-negative. */
13707 static bool
13708 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13710 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13711 && truth_value_p (code))
13712 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13713 have a signed:1 type (where the value is -1 and 0). */
13714 return true;
13715 return false;
13718 /* Return true if (CODE OP0) is known to be non-negative. If the return
13719 value is based on the assumption that signed overflow is undefined,
13720 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13721 *STRICT_OVERFLOW_P. */
13723 bool
13724 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13725 bool *strict_overflow_p)
13727 if (TYPE_UNSIGNED (type))
13728 return true;
13730 switch (code)
13732 case ABS_EXPR:
13733 /* We can't return 1 if flag_wrapv is set because
13734 ABS_EXPR<INT_MIN> = INT_MIN. */
13735 if (!ANY_INTEGRAL_TYPE_P (type))
13736 return true;
13737 if (TYPE_OVERFLOW_UNDEFINED (type))
13739 *strict_overflow_p = true;
13740 return true;
13742 break;
13744 case NON_LVALUE_EXPR:
13745 case FLOAT_EXPR:
13746 case FIX_TRUNC_EXPR:
13747 return tree_expr_nonnegative_warnv_p (op0,
13748 strict_overflow_p);
13750 CASE_CONVERT:
13752 tree inner_type = TREE_TYPE (op0);
13753 tree outer_type = type;
13755 if (TREE_CODE (outer_type) == REAL_TYPE)
13757 if (TREE_CODE (inner_type) == REAL_TYPE)
13758 return tree_expr_nonnegative_warnv_p (op0,
13759 strict_overflow_p);
13760 if (INTEGRAL_TYPE_P (inner_type))
13762 if (TYPE_UNSIGNED (inner_type))
13763 return true;
13764 return tree_expr_nonnegative_warnv_p (op0,
13765 strict_overflow_p);
13768 else if (INTEGRAL_TYPE_P (outer_type))
13770 if (TREE_CODE (inner_type) == REAL_TYPE)
13771 return tree_expr_nonnegative_warnv_p (op0,
13772 strict_overflow_p);
13773 if (INTEGRAL_TYPE_P (inner_type))
13774 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13775 && TYPE_UNSIGNED (inner_type);
13778 break;
13780 default:
13781 return tree_simple_nonnegative_warnv_p (code, type);
13784 /* We don't know sign of `t', so be conservative and return false. */
13785 return false;
13788 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13789 value is based on the assumption that signed overflow is undefined,
13790 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13791 *STRICT_OVERFLOW_P. */
13793 bool
13794 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13795 tree op1, bool *strict_overflow_p)
13797 if (TYPE_UNSIGNED (type))
13798 return true;
13800 switch (code)
13802 case POINTER_PLUS_EXPR:
13803 case PLUS_EXPR:
13804 if (FLOAT_TYPE_P (type))
13805 return (tree_expr_nonnegative_warnv_p (op0,
13806 strict_overflow_p)
13807 && tree_expr_nonnegative_warnv_p (op1,
13808 strict_overflow_p));
13810 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13811 both unsigned and at least 2 bits shorter than the result. */
13812 if (TREE_CODE (type) == INTEGER_TYPE
13813 && TREE_CODE (op0) == NOP_EXPR
13814 && TREE_CODE (op1) == NOP_EXPR)
13816 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13817 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13818 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13819 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13821 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13822 TYPE_PRECISION (inner2)) + 1;
13823 return prec < TYPE_PRECISION (type);
13826 break;
13828 case MULT_EXPR:
13829 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13831 /* x * x is always non-negative for floating point x
13832 or without overflow. */
13833 if (operand_equal_p (op0, op1, 0)
13834 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
13835 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
13837 if (ANY_INTEGRAL_TYPE_P (type)
13838 && TYPE_OVERFLOW_UNDEFINED (type))
13839 *strict_overflow_p = true;
13840 return true;
13844 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13845 both unsigned and their total bits is shorter than the result. */
13846 if (TREE_CODE (type) == INTEGER_TYPE
13847 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13848 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13850 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13851 ? TREE_TYPE (TREE_OPERAND (op0, 0))
13852 : TREE_TYPE (op0);
13853 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13854 ? TREE_TYPE (TREE_OPERAND (op1, 0))
13855 : TREE_TYPE (op1);
13857 bool unsigned0 = TYPE_UNSIGNED (inner0);
13858 bool unsigned1 = TYPE_UNSIGNED (inner1);
13860 if (TREE_CODE (op0) == INTEGER_CST)
13861 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13863 if (TREE_CODE (op1) == INTEGER_CST)
13864 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13866 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13867 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13869 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13870 ? tree_int_cst_min_precision (op0, UNSIGNED)
13871 : TYPE_PRECISION (inner0);
13873 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13874 ? tree_int_cst_min_precision (op1, UNSIGNED)
13875 : TYPE_PRECISION (inner1);
13877 return precision0 + precision1 < TYPE_PRECISION (type);
13880 return false;
13882 case BIT_AND_EXPR:
13883 case MAX_EXPR:
13884 return (tree_expr_nonnegative_warnv_p (op0,
13885 strict_overflow_p)
13886 || tree_expr_nonnegative_warnv_p (op1,
13887 strict_overflow_p));
13889 case BIT_IOR_EXPR:
13890 case BIT_XOR_EXPR:
13891 case MIN_EXPR:
13892 case RDIV_EXPR:
13893 case TRUNC_DIV_EXPR:
13894 case CEIL_DIV_EXPR:
13895 case FLOOR_DIV_EXPR:
13896 case ROUND_DIV_EXPR:
13897 return (tree_expr_nonnegative_warnv_p (op0,
13898 strict_overflow_p)
13899 && tree_expr_nonnegative_warnv_p (op1,
13900 strict_overflow_p));
13902 case TRUNC_MOD_EXPR:
13903 case CEIL_MOD_EXPR:
13904 case FLOOR_MOD_EXPR:
13905 case ROUND_MOD_EXPR:
13906 return tree_expr_nonnegative_warnv_p (op0,
13907 strict_overflow_p);
13908 default:
13909 return tree_simple_nonnegative_warnv_p (code, type);
13912 /* We don't know sign of `t', so be conservative and return false. */
13913 return false;
13916 /* Return true if T is known to be non-negative. If the return
13917 value is based on the assumption that signed overflow is undefined,
13918 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13919 *STRICT_OVERFLOW_P. */
13921 bool
13922 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13924 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13925 return true;
13927 switch (TREE_CODE (t))
13929 case INTEGER_CST:
13930 return tree_int_cst_sgn (t) >= 0;
13932 case REAL_CST:
13933 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13935 case FIXED_CST:
13936 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13938 case COND_EXPR:
13939 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13940 strict_overflow_p)
13941 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13942 strict_overflow_p));
13943 default:
13944 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
13945 TREE_TYPE (t));
13947 /* We don't know sign of `t', so be conservative and return false. */
13948 return false;
13951 /* Return true if T is known to be non-negative. If the return
13952 value is based on the assumption that signed overflow is undefined,
13953 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13954 *STRICT_OVERFLOW_P. */
13956 bool
13957 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
13958 tree arg0, tree arg1, bool *strict_overflow_p)
13960 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13961 switch (DECL_FUNCTION_CODE (fndecl))
13963 CASE_FLT_FN (BUILT_IN_ACOS):
13964 CASE_FLT_FN (BUILT_IN_ACOSH):
13965 CASE_FLT_FN (BUILT_IN_CABS):
13966 CASE_FLT_FN (BUILT_IN_COSH):
13967 CASE_FLT_FN (BUILT_IN_ERFC):
13968 CASE_FLT_FN (BUILT_IN_EXP):
13969 CASE_FLT_FN (BUILT_IN_EXP10):
13970 CASE_FLT_FN (BUILT_IN_EXP2):
13971 CASE_FLT_FN (BUILT_IN_FABS):
13972 CASE_FLT_FN (BUILT_IN_FDIM):
13973 CASE_FLT_FN (BUILT_IN_HYPOT):
13974 CASE_FLT_FN (BUILT_IN_POW10):
13975 CASE_INT_FN (BUILT_IN_FFS):
13976 CASE_INT_FN (BUILT_IN_PARITY):
13977 CASE_INT_FN (BUILT_IN_POPCOUNT):
13978 CASE_INT_FN (BUILT_IN_CLZ):
13979 CASE_INT_FN (BUILT_IN_CLRSB):
13980 case BUILT_IN_BSWAP32:
13981 case BUILT_IN_BSWAP64:
13982 /* Always true. */
13983 return true;
13985 CASE_FLT_FN (BUILT_IN_SQRT):
13986 /* sqrt(-0.0) is -0.0. */
13987 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13988 return true;
13989 return tree_expr_nonnegative_warnv_p (arg0,
13990 strict_overflow_p);
13992 CASE_FLT_FN (BUILT_IN_ASINH):
13993 CASE_FLT_FN (BUILT_IN_ATAN):
13994 CASE_FLT_FN (BUILT_IN_ATANH):
13995 CASE_FLT_FN (BUILT_IN_CBRT):
13996 CASE_FLT_FN (BUILT_IN_CEIL):
13997 CASE_FLT_FN (BUILT_IN_ERF):
13998 CASE_FLT_FN (BUILT_IN_EXPM1):
13999 CASE_FLT_FN (BUILT_IN_FLOOR):
14000 CASE_FLT_FN (BUILT_IN_FMOD):
14001 CASE_FLT_FN (BUILT_IN_FREXP):
14002 CASE_FLT_FN (BUILT_IN_ICEIL):
14003 CASE_FLT_FN (BUILT_IN_IFLOOR):
14004 CASE_FLT_FN (BUILT_IN_IRINT):
14005 CASE_FLT_FN (BUILT_IN_IROUND):
14006 CASE_FLT_FN (BUILT_IN_LCEIL):
14007 CASE_FLT_FN (BUILT_IN_LDEXP):
14008 CASE_FLT_FN (BUILT_IN_LFLOOR):
14009 CASE_FLT_FN (BUILT_IN_LLCEIL):
14010 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14011 CASE_FLT_FN (BUILT_IN_LLRINT):
14012 CASE_FLT_FN (BUILT_IN_LLROUND):
14013 CASE_FLT_FN (BUILT_IN_LRINT):
14014 CASE_FLT_FN (BUILT_IN_LROUND):
14015 CASE_FLT_FN (BUILT_IN_MODF):
14016 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14017 CASE_FLT_FN (BUILT_IN_RINT):
14018 CASE_FLT_FN (BUILT_IN_ROUND):
14019 CASE_FLT_FN (BUILT_IN_SCALB):
14020 CASE_FLT_FN (BUILT_IN_SCALBLN):
14021 CASE_FLT_FN (BUILT_IN_SCALBN):
14022 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14023 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14024 CASE_FLT_FN (BUILT_IN_SINH):
14025 CASE_FLT_FN (BUILT_IN_TANH):
14026 CASE_FLT_FN (BUILT_IN_TRUNC):
14027 /* True if the 1st argument is nonnegative. */
14028 return tree_expr_nonnegative_warnv_p (arg0,
14029 strict_overflow_p);
14031 CASE_FLT_FN (BUILT_IN_FMAX):
14032 /* True if the 1st OR 2nd arguments are nonnegative. */
14033 return (tree_expr_nonnegative_warnv_p (arg0,
14034 strict_overflow_p)
14035 || (tree_expr_nonnegative_warnv_p (arg1,
14036 strict_overflow_p)));
14038 CASE_FLT_FN (BUILT_IN_FMIN):
14039 /* True if the 1st AND 2nd arguments are nonnegative. */
14040 return (tree_expr_nonnegative_warnv_p (arg0,
14041 strict_overflow_p)
14042 && (tree_expr_nonnegative_warnv_p (arg1,
14043 strict_overflow_p)));
14045 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14046 /* True if the 2nd argument is nonnegative. */
14047 return tree_expr_nonnegative_warnv_p (arg1,
14048 strict_overflow_p);
14050 CASE_FLT_FN (BUILT_IN_POWI):
14051 /* True if the 1st argument is nonnegative or the second
14052 argument is an even integer. */
14053 if (TREE_CODE (arg1) == INTEGER_CST
14054 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14055 return true;
14056 return tree_expr_nonnegative_warnv_p (arg0,
14057 strict_overflow_p);
14059 CASE_FLT_FN (BUILT_IN_POW):
14060 /* True if the 1st argument is nonnegative or the second
14061 argument is an even integer valued real. */
14062 if (TREE_CODE (arg1) == REAL_CST)
14064 REAL_VALUE_TYPE c;
14065 HOST_WIDE_INT n;
14067 c = TREE_REAL_CST (arg1);
14068 n = real_to_integer (&c);
14069 if ((n & 1) == 0)
14071 REAL_VALUE_TYPE cint;
14072 real_from_integer (&cint, VOIDmode, n, SIGNED);
14073 if (real_identical (&c, &cint))
14074 return true;
14077 return tree_expr_nonnegative_warnv_p (arg0,
14078 strict_overflow_p);
14080 default:
14081 break;
14083 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14084 type);
14087 /* Return true if T is known to be non-negative. If the return
14088 value is based on the assumption that signed overflow is undefined,
14089 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14090 *STRICT_OVERFLOW_P. */
14092 static bool
14093 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14095 enum tree_code code = TREE_CODE (t);
14096 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14097 return true;
14099 switch (code)
14101 case TARGET_EXPR:
14103 tree temp = TARGET_EXPR_SLOT (t);
14104 t = TARGET_EXPR_INITIAL (t);
14106 /* If the initializer is non-void, then it's a normal expression
14107 that will be assigned to the slot. */
14108 if (!VOID_TYPE_P (t))
14109 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14111 /* Otherwise, the initializer sets the slot in some way. One common
14112 way is an assignment statement at the end of the initializer. */
14113 while (1)
14115 if (TREE_CODE (t) == BIND_EXPR)
14116 t = expr_last (BIND_EXPR_BODY (t));
14117 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14118 || TREE_CODE (t) == TRY_CATCH_EXPR)
14119 t = expr_last (TREE_OPERAND (t, 0));
14120 else if (TREE_CODE (t) == STATEMENT_LIST)
14121 t = expr_last (t);
14122 else
14123 break;
14125 if (TREE_CODE (t) == MODIFY_EXPR
14126 && TREE_OPERAND (t, 0) == temp)
14127 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14128 strict_overflow_p);
14130 return false;
14133 case CALL_EXPR:
14135 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14136 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14138 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14139 get_callee_fndecl (t),
14140 arg0,
14141 arg1,
14142 strict_overflow_p);
14144 case COMPOUND_EXPR:
14145 case MODIFY_EXPR:
14146 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14147 strict_overflow_p);
14148 case BIND_EXPR:
14149 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14150 strict_overflow_p);
14151 case SAVE_EXPR:
14152 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14153 strict_overflow_p);
14155 default:
14156 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14157 TREE_TYPE (t));
14160 /* We don't know sign of `t', so be conservative and return false. */
14161 return false;
14164 /* Return true if T is known to be non-negative. If the return
14165 value is based on the assumption that signed overflow is undefined,
14166 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14167 *STRICT_OVERFLOW_P. */
14169 bool
14170 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14172 enum tree_code code;
14173 if (t == error_mark_node)
14174 return false;
14176 code = TREE_CODE (t);
14177 switch (TREE_CODE_CLASS (code))
14179 case tcc_binary:
14180 case tcc_comparison:
14181 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14182 TREE_TYPE (t),
14183 TREE_OPERAND (t, 0),
14184 TREE_OPERAND (t, 1),
14185 strict_overflow_p);
14187 case tcc_unary:
14188 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14189 TREE_TYPE (t),
14190 TREE_OPERAND (t, 0),
14191 strict_overflow_p);
14193 case tcc_constant:
14194 case tcc_declaration:
14195 case tcc_reference:
14196 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14198 default:
14199 break;
14202 switch (code)
14204 case TRUTH_AND_EXPR:
14205 case TRUTH_OR_EXPR:
14206 case TRUTH_XOR_EXPR:
14207 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14208 TREE_TYPE (t),
14209 TREE_OPERAND (t, 0),
14210 TREE_OPERAND (t, 1),
14211 strict_overflow_p);
14212 case TRUTH_NOT_EXPR:
14213 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14214 TREE_TYPE (t),
14215 TREE_OPERAND (t, 0),
14216 strict_overflow_p);
14218 case COND_EXPR:
14219 case CONSTRUCTOR:
14220 case OBJ_TYPE_REF:
14221 case ASSERT_EXPR:
14222 case ADDR_EXPR:
14223 case WITH_SIZE_EXPR:
14224 case SSA_NAME:
14225 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14227 default:
14228 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14232 /* Return true if `t' is known to be non-negative. Handle warnings
14233 about undefined signed overflow. */
14235 bool
14236 tree_expr_nonnegative_p (tree t)
14238 bool ret, strict_overflow_p;
14240 strict_overflow_p = false;
14241 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14242 if (strict_overflow_p)
14243 fold_overflow_warning (("assuming signed overflow does not occur when "
14244 "determining that expression is always "
14245 "non-negative"),
14246 WARN_STRICT_OVERFLOW_MISC);
14247 return ret;
14251 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14252 For floating point we further ensure that T is not denormal.
14253 Similar logic is present in nonzero_address in rtlanal.h.
14255 If the return value is based on the assumption that signed overflow
14256 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14257 change *STRICT_OVERFLOW_P. */
14259 bool
14260 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14261 bool *strict_overflow_p)
14263 switch (code)
14265 case ABS_EXPR:
14266 return tree_expr_nonzero_warnv_p (op0,
14267 strict_overflow_p);
14269 case NOP_EXPR:
14271 tree inner_type = TREE_TYPE (op0);
14272 tree outer_type = type;
14274 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14275 && tree_expr_nonzero_warnv_p (op0,
14276 strict_overflow_p));
14278 break;
14280 case NON_LVALUE_EXPR:
14281 return tree_expr_nonzero_warnv_p (op0,
14282 strict_overflow_p);
14284 default:
14285 break;
14288 return false;
14291 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14292 For floating point we further ensure that T is not denormal.
14293 Similar logic is present in nonzero_address in rtlanal.h.
14295 If the return value is based on the assumption that signed overflow
14296 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14297 change *STRICT_OVERFLOW_P. */
14299 bool
14300 tree_binary_nonzero_warnv_p (enum tree_code code,
14301 tree type,
14302 tree op0,
14303 tree op1, bool *strict_overflow_p)
14305 bool sub_strict_overflow_p;
14306 switch (code)
14308 case POINTER_PLUS_EXPR:
14309 case PLUS_EXPR:
14310 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14312 /* With the presence of negative values it is hard
14313 to say something. */
14314 sub_strict_overflow_p = false;
14315 if (!tree_expr_nonnegative_warnv_p (op0,
14316 &sub_strict_overflow_p)
14317 || !tree_expr_nonnegative_warnv_p (op1,
14318 &sub_strict_overflow_p))
14319 return false;
14320 /* One of operands must be positive and the other non-negative. */
14321 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14322 overflows, on a twos-complement machine the sum of two
14323 nonnegative numbers can never be zero. */
14324 return (tree_expr_nonzero_warnv_p (op0,
14325 strict_overflow_p)
14326 || tree_expr_nonzero_warnv_p (op1,
14327 strict_overflow_p));
14329 break;
14331 case MULT_EXPR:
14332 if (TYPE_OVERFLOW_UNDEFINED (type))
14334 if (tree_expr_nonzero_warnv_p (op0,
14335 strict_overflow_p)
14336 && tree_expr_nonzero_warnv_p (op1,
14337 strict_overflow_p))
14339 *strict_overflow_p = true;
14340 return true;
14343 break;
14345 case MIN_EXPR:
14346 sub_strict_overflow_p = false;
14347 if (tree_expr_nonzero_warnv_p (op0,
14348 &sub_strict_overflow_p)
14349 && tree_expr_nonzero_warnv_p (op1,
14350 &sub_strict_overflow_p))
14352 if (sub_strict_overflow_p)
14353 *strict_overflow_p = true;
14355 break;
14357 case MAX_EXPR:
14358 sub_strict_overflow_p = false;
14359 if (tree_expr_nonzero_warnv_p (op0,
14360 &sub_strict_overflow_p))
14362 if (sub_strict_overflow_p)
14363 *strict_overflow_p = true;
14365 /* When both operands are nonzero, then MAX must be too. */
14366 if (tree_expr_nonzero_warnv_p (op1,
14367 strict_overflow_p))
14368 return true;
14370 /* MAX where operand 0 is positive is positive. */
14371 return tree_expr_nonnegative_warnv_p (op0,
14372 strict_overflow_p);
14374 /* MAX where operand 1 is positive is positive. */
14375 else if (tree_expr_nonzero_warnv_p (op1,
14376 &sub_strict_overflow_p)
14377 && tree_expr_nonnegative_warnv_p (op1,
14378 &sub_strict_overflow_p))
14380 if (sub_strict_overflow_p)
14381 *strict_overflow_p = true;
14382 return true;
14384 break;
14386 case BIT_IOR_EXPR:
14387 return (tree_expr_nonzero_warnv_p (op1,
14388 strict_overflow_p)
14389 || tree_expr_nonzero_warnv_p (op0,
14390 strict_overflow_p));
14392 default:
14393 break;
14396 return false;
14399 /* Return true when T is an address and is known to be nonzero.
14400 For floating point we further ensure that T is not denormal.
14401 Similar logic is present in nonzero_address in rtlanal.h.
14403 If the return value is based on the assumption that signed overflow
14404 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14405 change *STRICT_OVERFLOW_P. */
14407 bool
14408 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14410 bool sub_strict_overflow_p;
14411 switch (TREE_CODE (t))
14413 case INTEGER_CST:
14414 return !integer_zerop (t);
14416 case ADDR_EXPR:
14418 tree base = TREE_OPERAND (t, 0);
14420 if (!DECL_P (base))
14421 base = get_base_address (base);
14423 if (!base)
14424 return false;
14426 /* For objects in symbol table check if we know they are non-zero.
14427 Don't do anything for variables and functions before symtab is built;
14428 it is quite possible that they will be declared weak later. */
14429 if (DECL_P (base) && decl_in_symtab_p (base))
14431 struct symtab_node *symbol;
14433 symbol = symtab_node::get_create (base);
14434 if (symbol)
14435 return symbol->nonzero_address ();
14436 else
14437 return false;
14440 /* Function local objects are never NULL. */
14441 if (DECL_P (base)
14442 && (DECL_CONTEXT (base)
14443 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
14444 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
14445 return true;
14447 /* Constants are never weak. */
14448 if (CONSTANT_CLASS_P (base))
14449 return true;
14451 return false;
14454 case COND_EXPR:
14455 sub_strict_overflow_p = false;
14456 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14457 &sub_strict_overflow_p)
14458 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14459 &sub_strict_overflow_p))
14461 if (sub_strict_overflow_p)
14462 *strict_overflow_p = true;
14463 return true;
14465 break;
14467 default:
14468 break;
14470 return false;
14473 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14474 attempt to fold the expression to a constant without modifying TYPE,
14475 OP0 or OP1.
14477 If the expression could be simplified to a constant, then return
14478 the constant. If the expression would not be simplified to a
14479 constant, then return NULL_TREE. */
14481 tree
14482 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14484 tree tem = fold_binary (code, type, op0, op1);
14485 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14488 /* Given the components of a unary expression CODE, TYPE and OP0,
14489 attempt to fold the expression to a constant without modifying
14490 TYPE or OP0.
14492 If the expression could be simplified to a constant, then return
14493 the constant. If the expression would not be simplified to a
14494 constant, then return NULL_TREE. */
14496 tree
14497 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14499 tree tem = fold_unary (code, type, op0);
14500 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14503 /* If EXP represents referencing an element in a constant string
14504 (either via pointer arithmetic or array indexing), return the
14505 tree representing the value accessed, otherwise return NULL. */
14507 tree
14508 fold_read_from_constant_string (tree exp)
14510 if ((TREE_CODE (exp) == INDIRECT_REF
14511 || TREE_CODE (exp) == ARRAY_REF)
14512 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14514 tree exp1 = TREE_OPERAND (exp, 0);
14515 tree index;
14516 tree string;
14517 location_t loc = EXPR_LOCATION (exp);
14519 if (TREE_CODE (exp) == INDIRECT_REF)
14520 string = string_constant (exp1, &index);
14521 else
14523 tree low_bound = array_ref_low_bound (exp);
14524 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14526 /* Optimize the special-case of a zero lower bound.
14528 We convert the low_bound to sizetype to avoid some problems
14529 with constant folding. (E.g. suppose the lower bound is 1,
14530 and its mode is QI. Without the conversion,l (ARRAY
14531 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14532 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14533 if (! integer_zerop (low_bound))
14534 index = size_diffop_loc (loc, index,
14535 fold_convert_loc (loc, sizetype, low_bound));
14537 string = exp1;
14540 if (string
14541 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14542 && TREE_CODE (string) == STRING_CST
14543 && TREE_CODE (index) == INTEGER_CST
14544 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14545 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14546 == MODE_INT)
14547 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14548 return build_int_cst_type (TREE_TYPE (exp),
14549 (TREE_STRING_POINTER (string)
14550 [TREE_INT_CST_LOW (index)]));
14552 return NULL;
14555 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14556 an integer constant, real, or fixed-point constant.
14558 TYPE is the type of the result. */
14560 static tree
14561 fold_negate_const (tree arg0, tree type)
14563 tree t = NULL_TREE;
14565 switch (TREE_CODE (arg0))
14567 case INTEGER_CST:
14569 bool overflow;
14570 wide_int val = wi::neg (arg0, &overflow);
14571 t = force_fit_type (type, val, 1,
14572 (overflow | TREE_OVERFLOW (arg0))
14573 && !TYPE_UNSIGNED (type));
14574 break;
14577 case REAL_CST:
14578 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14579 break;
14581 case FIXED_CST:
14583 FIXED_VALUE_TYPE f;
14584 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14585 &(TREE_FIXED_CST (arg0)), NULL,
14586 TYPE_SATURATING (type));
14587 t = build_fixed (type, f);
14588 /* Propagate overflow flags. */
14589 if (overflow_p | TREE_OVERFLOW (arg0))
14590 TREE_OVERFLOW (t) = 1;
14591 break;
14594 default:
14595 gcc_unreachable ();
14598 return t;
14601 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14602 an integer constant or real constant.
14604 TYPE is the type of the result. */
14606 tree
14607 fold_abs_const (tree arg0, tree type)
14609 tree t = NULL_TREE;
14611 switch (TREE_CODE (arg0))
14613 case INTEGER_CST:
14615 /* If the value is unsigned or non-negative, then the absolute value
14616 is the same as the ordinary value. */
14617 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
14618 t = arg0;
14620 /* If the value is negative, then the absolute value is
14621 its negation. */
14622 else
14624 bool overflow;
14625 wide_int val = wi::neg (arg0, &overflow);
14626 t = force_fit_type (type, val, -1,
14627 overflow | TREE_OVERFLOW (arg0));
14630 break;
14632 case REAL_CST:
14633 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14634 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14635 else
14636 t = arg0;
14637 break;
14639 default:
14640 gcc_unreachable ();
14643 return t;
14646 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14647 constant. TYPE is the type of the result. */
14649 static tree
14650 fold_not_const (const_tree arg0, tree type)
14652 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14654 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
14657 /* Given CODE, a relational operator, the target type, TYPE and two
14658 constant operands OP0 and OP1, return the result of the
14659 relational operation. If the result is not a compile time
14660 constant, then return NULL_TREE. */
14662 static tree
14663 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14665 int result, invert;
14667 /* From here on, the only cases we handle are when the result is
14668 known to be a constant. */
14670 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14672 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14673 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14675 /* Handle the cases where either operand is a NaN. */
14676 if (real_isnan (c0) || real_isnan (c1))
14678 switch (code)
14680 case EQ_EXPR:
14681 case ORDERED_EXPR:
14682 result = 0;
14683 break;
14685 case NE_EXPR:
14686 case UNORDERED_EXPR:
14687 case UNLT_EXPR:
14688 case UNLE_EXPR:
14689 case UNGT_EXPR:
14690 case UNGE_EXPR:
14691 case UNEQ_EXPR:
14692 result = 1;
14693 break;
14695 case LT_EXPR:
14696 case LE_EXPR:
14697 case GT_EXPR:
14698 case GE_EXPR:
14699 case LTGT_EXPR:
14700 if (flag_trapping_math)
14701 return NULL_TREE;
14702 result = 0;
14703 break;
14705 default:
14706 gcc_unreachable ();
14709 return constant_boolean_node (result, type);
14712 return constant_boolean_node (real_compare (code, c0, c1), type);
14715 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14717 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14718 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14719 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14722 /* Handle equality/inequality of complex constants. */
14723 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14725 tree rcond = fold_relational_const (code, type,
14726 TREE_REALPART (op0),
14727 TREE_REALPART (op1));
14728 tree icond = fold_relational_const (code, type,
14729 TREE_IMAGPART (op0),
14730 TREE_IMAGPART (op1));
14731 if (code == EQ_EXPR)
14732 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14733 else if (code == NE_EXPR)
14734 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14735 else
14736 return NULL_TREE;
14739 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14741 unsigned count = VECTOR_CST_NELTS (op0);
14742 tree *elts = XALLOCAVEC (tree, count);
14743 gcc_assert (VECTOR_CST_NELTS (op1) == count
14744 && TYPE_VECTOR_SUBPARTS (type) == count);
14746 for (unsigned i = 0; i < count; i++)
14748 tree elem_type = TREE_TYPE (type);
14749 tree elem0 = VECTOR_CST_ELT (op0, i);
14750 tree elem1 = VECTOR_CST_ELT (op1, i);
14752 tree tem = fold_relational_const (code, elem_type,
14753 elem0, elem1);
14755 if (tem == NULL_TREE)
14756 return NULL_TREE;
14758 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14761 return build_vector (type, elts);
14764 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14766 To compute GT, swap the arguments and do LT.
14767 To compute GE, do LT and invert the result.
14768 To compute LE, swap the arguments, do LT and invert the result.
14769 To compute NE, do EQ and invert the result.
14771 Therefore, the code below must handle only EQ and LT. */
14773 if (code == LE_EXPR || code == GT_EXPR)
14775 std::swap (op0, op1);
14776 code = swap_tree_comparison (code);
14779 /* Note that it is safe to invert for real values here because we
14780 have already handled the one case that it matters. */
14782 invert = 0;
14783 if (code == NE_EXPR || code == GE_EXPR)
14785 invert = 1;
14786 code = invert_tree_comparison (code, false);
14789 /* Compute a result for LT or EQ if args permit;
14790 Otherwise return T. */
14791 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14793 if (code == EQ_EXPR)
14794 result = tree_int_cst_equal (op0, op1);
14795 else
14796 result = tree_int_cst_lt (op0, op1);
14798 else
14799 return NULL_TREE;
14801 if (invert)
14802 result ^= 1;
14803 return constant_boolean_node (result, type);
14806 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14807 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14808 itself. */
14810 tree
14811 fold_build_cleanup_point_expr (tree type, tree expr)
14813 /* If the expression does not have side effects then we don't have to wrap
14814 it with a cleanup point expression. */
14815 if (!TREE_SIDE_EFFECTS (expr))
14816 return expr;
14818 /* If the expression is a return, check to see if the expression inside the
14819 return has no side effects or the right hand side of the modify expression
14820 inside the return. If either don't have side effects set we don't need to
14821 wrap the expression in a cleanup point expression. Note we don't check the
14822 left hand side of the modify because it should always be a return decl. */
14823 if (TREE_CODE (expr) == RETURN_EXPR)
14825 tree op = TREE_OPERAND (expr, 0);
14826 if (!op || !TREE_SIDE_EFFECTS (op))
14827 return expr;
14828 op = TREE_OPERAND (op, 1);
14829 if (!TREE_SIDE_EFFECTS (op))
14830 return expr;
14833 return build1 (CLEANUP_POINT_EXPR, type, expr);
14836 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14837 of an indirection through OP0, or NULL_TREE if no simplification is
14838 possible. */
14840 tree
14841 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14843 tree sub = op0;
14844 tree subtype;
14846 STRIP_NOPS (sub);
14847 subtype = TREE_TYPE (sub);
14848 if (!POINTER_TYPE_P (subtype))
14849 return NULL_TREE;
14851 if (TREE_CODE (sub) == ADDR_EXPR)
14853 tree op = TREE_OPERAND (sub, 0);
14854 tree optype = TREE_TYPE (op);
14855 /* *&CONST_DECL -> to the value of the const decl. */
14856 if (TREE_CODE (op) == CONST_DECL)
14857 return DECL_INITIAL (op);
14858 /* *&p => p; make sure to handle *&"str"[cst] here. */
14859 if (type == optype)
14861 tree fop = fold_read_from_constant_string (op);
14862 if (fop)
14863 return fop;
14864 else
14865 return op;
14867 /* *(foo *)&fooarray => fooarray[0] */
14868 else if (TREE_CODE (optype) == ARRAY_TYPE
14869 && type == TREE_TYPE (optype)
14870 && (!in_gimple_form
14871 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14873 tree type_domain = TYPE_DOMAIN (optype);
14874 tree min_val = size_zero_node;
14875 if (type_domain && TYPE_MIN_VALUE (type_domain))
14876 min_val = TYPE_MIN_VALUE (type_domain);
14877 if (in_gimple_form
14878 && TREE_CODE (min_val) != INTEGER_CST)
14879 return NULL_TREE;
14880 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14881 NULL_TREE, NULL_TREE);
14883 /* *(foo *)&complexfoo => __real__ complexfoo */
14884 else if (TREE_CODE (optype) == COMPLEX_TYPE
14885 && type == TREE_TYPE (optype))
14886 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14887 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14888 else if (TREE_CODE (optype) == VECTOR_TYPE
14889 && type == TREE_TYPE (optype))
14891 tree part_width = TYPE_SIZE (type);
14892 tree index = bitsize_int (0);
14893 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14897 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14898 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14900 tree op00 = TREE_OPERAND (sub, 0);
14901 tree op01 = TREE_OPERAND (sub, 1);
14903 STRIP_NOPS (op00);
14904 if (TREE_CODE (op00) == ADDR_EXPR)
14906 tree op00type;
14907 op00 = TREE_OPERAND (op00, 0);
14908 op00type = TREE_TYPE (op00);
14910 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14911 if (TREE_CODE (op00type) == VECTOR_TYPE
14912 && type == TREE_TYPE (op00type))
14914 HOST_WIDE_INT offset = tree_to_shwi (op01);
14915 tree part_width = TYPE_SIZE (type);
14916 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14917 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14918 tree index = bitsize_int (indexi);
14920 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14921 return fold_build3_loc (loc,
14922 BIT_FIELD_REF, type, op00,
14923 part_width, index);
14926 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14927 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14928 && type == TREE_TYPE (op00type))
14930 tree size = TYPE_SIZE_UNIT (type);
14931 if (tree_int_cst_equal (size, op01))
14932 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14934 /* ((foo *)&fooarray)[1] => fooarray[1] */
14935 else if (TREE_CODE (op00type) == ARRAY_TYPE
14936 && type == TREE_TYPE (op00type))
14938 tree type_domain = TYPE_DOMAIN (op00type);
14939 tree min_val = size_zero_node;
14940 if (type_domain && TYPE_MIN_VALUE (type_domain))
14941 min_val = TYPE_MIN_VALUE (type_domain);
14942 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14943 TYPE_SIZE_UNIT (type));
14944 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14945 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14946 NULL_TREE, NULL_TREE);
14951 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14952 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14953 && type == TREE_TYPE (TREE_TYPE (subtype))
14954 && (!in_gimple_form
14955 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14957 tree type_domain;
14958 tree min_val = size_zero_node;
14959 sub = build_fold_indirect_ref_loc (loc, sub);
14960 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14961 if (type_domain && TYPE_MIN_VALUE (type_domain))
14962 min_val = TYPE_MIN_VALUE (type_domain);
14963 if (in_gimple_form
14964 && TREE_CODE (min_val) != INTEGER_CST)
14965 return NULL_TREE;
14966 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14967 NULL_TREE);
14970 return NULL_TREE;
14973 /* Builds an expression for an indirection through T, simplifying some
14974 cases. */
14976 tree
14977 build_fold_indirect_ref_loc (location_t loc, tree t)
14979 tree type = TREE_TYPE (TREE_TYPE (t));
14980 tree sub = fold_indirect_ref_1 (loc, type, t);
14982 if (sub)
14983 return sub;
14985 return build1_loc (loc, INDIRECT_REF, type, t);
14988 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14990 tree
14991 fold_indirect_ref_loc (location_t loc, tree t)
14993 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14995 if (sub)
14996 return sub;
14997 else
14998 return t;
15001 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15002 whose result is ignored. The type of the returned tree need not be
15003 the same as the original expression. */
15005 tree
15006 fold_ignored_result (tree t)
15008 if (!TREE_SIDE_EFFECTS (t))
15009 return integer_zero_node;
15011 for (;;)
15012 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15014 case tcc_unary:
15015 t = TREE_OPERAND (t, 0);
15016 break;
15018 case tcc_binary:
15019 case tcc_comparison:
15020 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15021 t = TREE_OPERAND (t, 0);
15022 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15023 t = TREE_OPERAND (t, 1);
15024 else
15025 return t;
15026 break;
15028 case tcc_expression:
15029 switch (TREE_CODE (t))
15031 case COMPOUND_EXPR:
15032 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15033 return t;
15034 t = TREE_OPERAND (t, 0);
15035 break;
15037 case COND_EXPR:
15038 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15039 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15040 return t;
15041 t = TREE_OPERAND (t, 0);
15042 break;
15044 default:
15045 return t;
15047 break;
15049 default:
15050 return t;
15054 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15056 tree
15057 round_up_loc (location_t loc, tree value, unsigned int divisor)
15059 tree div = NULL_TREE;
15061 if (divisor == 1)
15062 return value;
15064 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15065 have to do anything. Only do this when we are not given a const,
15066 because in that case, this check is more expensive than just
15067 doing it. */
15068 if (TREE_CODE (value) != INTEGER_CST)
15070 div = build_int_cst (TREE_TYPE (value), divisor);
15072 if (multiple_of_p (TREE_TYPE (value), value, div))
15073 return value;
15076 /* If divisor is a power of two, simplify this to bit manipulation. */
15077 if (divisor == (divisor & -divisor))
15079 if (TREE_CODE (value) == INTEGER_CST)
15081 wide_int val = value;
15082 bool overflow_p;
15084 if ((val & (divisor - 1)) == 0)
15085 return value;
15087 overflow_p = TREE_OVERFLOW (value);
15088 val += divisor - 1;
15089 val &= - (int) divisor;
15090 if (val == 0)
15091 overflow_p = true;
15093 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15095 else
15097 tree t;
15099 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15100 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15101 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15102 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15105 else
15107 if (!div)
15108 div = build_int_cst (TREE_TYPE (value), divisor);
15109 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15110 value = size_binop_loc (loc, MULT_EXPR, value, div);
15113 return value;
15116 /* Likewise, but round down. */
15118 tree
15119 round_down_loc (location_t loc, tree value, int divisor)
15121 tree div = NULL_TREE;
15123 gcc_assert (divisor > 0);
15124 if (divisor == 1)
15125 return value;
15127 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15128 have to do anything. Only do this when we are not given a const,
15129 because in that case, this check is more expensive than just
15130 doing it. */
15131 if (TREE_CODE (value) != INTEGER_CST)
15133 div = build_int_cst (TREE_TYPE (value), divisor);
15135 if (multiple_of_p (TREE_TYPE (value), value, div))
15136 return value;
15139 /* If divisor is a power of two, simplify this to bit manipulation. */
15140 if (divisor == (divisor & -divisor))
15142 tree t;
15144 t = build_int_cst (TREE_TYPE (value), -divisor);
15145 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15147 else
15149 if (!div)
15150 div = build_int_cst (TREE_TYPE (value), divisor);
15151 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15152 value = size_binop_loc (loc, MULT_EXPR, value, div);
15155 return value;
15158 /* Returns the pointer to the base of the object addressed by EXP and
15159 extracts the information about the offset of the access, storing it
15160 to PBITPOS and POFFSET. */
15162 static tree
15163 split_address_to_core_and_offset (tree exp,
15164 HOST_WIDE_INT *pbitpos, tree *poffset)
15166 tree core;
15167 machine_mode mode;
15168 int unsignedp, volatilep;
15169 HOST_WIDE_INT bitsize;
15170 location_t loc = EXPR_LOCATION (exp);
15172 if (TREE_CODE (exp) == ADDR_EXPR)
15174 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15175 poffset, &mode, &unsignedp, &volatilep,
15176 false);
15177 core = build_fold_addr_expr_loc (loc, core);
15179 else
15181 core = exp;
15182 *pbitpos = 0;
15183 *poffset = NULL_TREE;
15186 return core;
15189 /* Returns true if addresses of E1 and E2 differ by a constant, false
15190 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15192 bool
15193 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15195 tree core1, core2;
15196 HOST_WIDE_INT bitpos1, bitpos2;
15197 tree toffset1, toffset2, tdiff, type;
15199 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15200 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15202 if (bitpos1 % BITS_PER_UNIT != 0
15203 || bitpos2 % BITS_PER_UNIT != 0
15204 || !operand_equal_p (core1, core2, 0))
15205 return false;
15207 if (toffset1 && toffset2)
15209 type = TREE_TYPE (toffset1);
15210 if (type != TREE_TYPE (toffset2))
15211 toffset2 = fold_convert (type, toffset2);
15213 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15214 if (!cst_and_fits_in_hwi (tdiff))
15215 return false;
15217 *diff = int_cst_value (tdiff);
15219 else if (toffset1 || toffset2)
15221 /* If only one of the offsets is non-constant, the difference cannot
15222 be a constant. */
15223 return false;
15225 else
15226 *diff = 0;
15228 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15229 return true;
15232 /* Simplify the floating point expression EXP when the sign of the
15233 result is not significant. Return NULL_TREE if no simplification
15234 is possible. */
15236 tree
15237 fold_strip_sign_ops (tree exp)
15239 tree arg0, arg1;
15240 location_t loc = EXPR_LOCATION (exp);
15242 switch (TREE_CODE (exp))
15244 case ABS_EXPR:
15245 case NEGATE_EXPR:
15246 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15247 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15249 case MULT_EXPR:
15250 case RDIV_EXPR:
15251 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
15252 return NULL_TREE;
15253 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15254 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15255 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15256 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
15257 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15258 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15259 break;
15261 case COMPOUND_EXPR:
15262 arg0 = TREE_OPERAND (exp, 0);
15263 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15264 if (arg1)
15265 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15266 break;
15268 case COND_EXPR:
15269 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15270 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15271 if (arg0 || arg1)
15272 return fold_build3_loc (loc,
15273 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15274 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15275 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15276 break;
15278 case CALL_EXPR:
15280 const enum built_in_function fcode = builtin_mathfn_code (exp);
15281 switch (fcode)
15283 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15284 /* Strip copysign function call, return the 1st argument. */
15285 arg0 = CALL_EXPR_ARG (exp, 0);
15286 arg1 = CALL_EXPR_ARG (exp, 1);
15287 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
15289 default:
15290 /* Strip sign ops from the argument of "odd" math functions. */
15291 if (negate_mathfn_p (fcode))
15293 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15294 if (arg0)
15295 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
15297 break;
15300 break;
15302 default:
15303 break;
15305 return NULL_TREE;
15308 /* Return OFF converted to a pointer offset type suitable as offset for
15309 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
15310 tree
15311 convert_to_ptrofftype_loc (location_t loc, tree off)
15313 return fold_convert_loc (loc, sizetype, off);
15316 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15317 tree
15318 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15320 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15321 ptr, convert_to_ptrofftype_loc (loc, off));
15324 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
15325 tree
15326 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15328 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15329 ptr, size_int (off));